diff --git a/dist/index.js b/dist/index.js index f5ea0e7..c93bd2d 100644 --- a/dist/index.js +++ b/dist/index.js @@ -1,5 +1,4 @@ -require('./sourcemap-register.js');module.exports = -/******/ (() => { // webpackBootstrap +require('./sourcemap-register.js');/******/ (() => { // webpackBootstrap /******/ var __webpack_modules__ = ({ /***/ 7171: @@ -9,7 +8,11 @@ require('./sourcemap-register.js');module.exports = var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -26,6 +29,15 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; @@ -67,61 +79,62 @@ class ArtifactProvider { this.artifactNameMatch = (str) => str === this.artifact; this.getReportName = () => this.name; } - this.fileNameMatch = picomatch_1.default(pattern); + this.fileNameMatch = (0, picomatch_1.default)(pattern); } - async load() { - const result = {}; - const resp = await this.octokit.actions.listWorkflowRunArtifacts({ - ...github.context.repo, - run_id: this.runId + load() { + return __awaiter(this, void 0, void 0, function* () { + const result = {}; + const resp = yield this.octokit.rest.actions.listWorkflowRunArtifacts(Object.assign(Object.assign({}, github.context.repo), { run_id: this.runId })); + if (resp.data.artifacts.length === 0) { + core.warning(`No artifacts found in run ${this.runId}`); + return {}; + } + const artifacts = resp.data.artifacts.filter(a => this.artifactNameMatch(a.name)); + if (artifacts.length === 0) { + core.warning(`No artifact matches ${this.artifact}`); + return {}; + } + for (const art of artifacts) { + const fileName = `${art.name}.zip`; + yield (0, github_utils_1.downloadArtifact)(this.octokit, art.id, fileName, this.token); + core.startGroup(`Reading archive ${fileName}`); + try { + const reportName = this.getReportName(art.name); + core.info(`Report name: ${reportName}`); + const files = []; + const zip = new adm_zip_1.default(fileName); + for (const entry of zip.getEntries()) { + const file = entry.entryName; + if (entry.isDirectory) { + core.info(`Skipping ${file}: entry is a directory`); + continue; + } + if (!this.fileNameMatch(file)) { + core.info(`Skipping ${file}: filename does not match pattern`); + continue; + } + const content = zip.readAsText(entry); + files.push({ file, content }); + core.info(`Read ${file}: ${content.length} chars`); + } + if (result[reportName]) { + result[reportName].push(...files); + } + else { + result[reportName] = files; + } + } + finally { + core.endGroup(); + } + } + return result; }); - if (resp.data.artifacts.length === 0) { - core.warning(`No artifacts found in run ${this.runId}`); - return {}; - } - const artifacts = resp.data.artifacts.filter(a => this.artifactNameMatch(a.name)); - if (artifacts.length === 0) { - core.warning(`No artifact matches ${this.artifact}`); - return {}; - } - for (const art of artifacts) { - const fileName = `${art.name}.zip`; - await github_utils_1.downloadArtifact(this.octokit, art.id, fileName, this.token); - core.startGroup(`Reading archive ${fileName}`); - try { - const reportName = this.getReportName(art.name); - core.info(`Report name: ${reportName}`); - const files = []; - const zip = new adm_zip_1.default(fileName); - for (const entry of zip.getEntries()) { - const file = entry.entryName; - if (entry.isDirectory) { - core.info(`Skipping ${file}: entry is a directory`); - continue; - } - if (!this.fileNameMatch(file)) { - core.info(`Skipping ${file}: filename does not match pattern`); - continue; - } - const content = zip.readAsText(entry); - files.push({ file, content }); - core.info(`Read ${file}: ${content.length} chars`); - } - if (result[reportName]) { - result[reportName].push(...files); - } - else { - result[reportName] = files; - } - } - finally { - core.endGroup(); - } - } - return result; } - async listTrackedFiles() { - return github_utils_1.listFiles(this.octokit, this.sha); + listTrackedFiles() { + return __awaiter(this, void 0, void 0, function* () { + return (0, github_utils_1.listFiles)(this.octokit, this.sha); + }); } } exports.ArtifactProvider = ArtifactProvider; @@ -136,7 +149,11 @@ exports.ArtifactProvider = ArtifactProvider; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -153,12 +170,21 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.LocalFileProvider = void 0; -const fs = __importStar(__nccwpck_require__(5747)); +const fs = __importStar(__nccwpck_require__(7147)); const fast_glob_1 = __importDefault(__nccwpck_require__(3664)); const git_1 = __nccwpck_require__(9844); class LocalFileProvider { @@ -166,19 +192,23 @@ class LocalFileProvider { this.name = name; this.pattern = pattern; } - async load() { - const result = []; - for (const pat of this.pattern) { - const paths = await fast_glob_1.default(pat, { dot: true }); - for (const file of paths) { - const content = await fs.promises.readFile(file, { encoding: 'utf8' }); - result.push({ file, content }); + load() { + return __awaiter(this, void 0, void 0, function* () { + const result = []; + for (const pat of this.pattern) { + const paths = yield (0, fast_glob_1.default)(pat, { dot: true }); + for (const file of paths) { + const content = yield fs.promises.readFile(file, { encoding: 'utf8' }); + result.push({ file, content }); + } } - } - return { [this.name]: result }; + return { [this.name]: result }; + }); } - async listTrackedFiles() { - return git_1.listFiles(); + listTrackedFiles() { + return __awaiter(this, void 0, void 0, function* () { + return (0, git_1.listFiles)(); + }); } } exports.LocalFileProvider = LocalFileProvider; @@ -193,7 +223,11 @@ exports.LocalFileProvider = LocalFileProvider; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -210,6 +244,15 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); const core = __importStar(__nccwpck_require__(2186)); const github = __importStar(__nccwpck_require__(5438)); @@ -225,14 +268,19 @@ const mocha_json_parser_1 = __nccwpck_require__(6043); const path_utils_1 = __nccwpck_require__(4070); const github_utils_1 = __nccwpck_require__(3522); const markdown_utils_1 = __nccwpck_require__(6482); -async function main() { - try { - const testReporter = new TestReporter(); - await testReporter.run(); - } - catch (error) { - core.setFailed(error.message); - } +function main() { + return __awaiter(this, void 0, void 0, function* () { + try { + const testReporter = new TestReporter(); + yield testReporter.run(); + } + catch (error) { + if (error instanceof Error) + core.setFailed(error); + else + core.setFailed(JSON.stringify(error)); + } + }); } class TestReporter { constructor() { @@ -248,7 +296,7 @@ class TestReporter { this.workDirInput = core.getInput('working-directory', { required: false }); this.onlySummary = core.getInput('only-summary', { required: false }) === 'true'; this.token = core.getInput('token', { required: true }); - this.context = github_utils_1.getCheckRunContext(); + this.context = (0, github_utils_1.getCheckRunContext)(); this.octokit = github.getOctokit(this.token); if (this.listSuites !== 'all' && this.listSuites !== 'failed') { core.setFailed(`Input parameter 'list-suites' has invalid value`); @@ -263,109 +311,101 @@ class TestReporter { return; } } - async run() { - if (this.workDirInput) { - core.info(`Changing directory to '${this.workDirInput}'`); - process.chdir(this.workDirInput); - } - core.info(`Check runs will be created with SHA=${this.context.sha}`); - // Split path pattern by ',' and optionally convert all backslashes to forward slashes - // fast-glob (micromatch) always interprets backslashes as escape characters instead of directory separators - const pathsList = this.path.split(','); - const pattern = this.pathReplaceBackslashes ? pathsList.map(path_utils_1.normalizeFilePath) : pathsList; - const inputProvider = this.artifact - ? new artifact_provider_1.ArtifactProvider(this.octokit, this.artifact, this.name, pattern, this.context.sha, this.context.runId, this.token) - : new local_file_provider_1.LocalFileProvider(this.name, pattern); - const parseErrors = this.maxAnnotations > 0; - const trackedFiles = await inputProvider.listTrackedFiles(); - const workDir = this.artifact ? undefined : path_utils_1.normalizeDirPath(process.cwd(), true); - core.info(`Found ${trackedFiles.length} files tracked by GitHub`); - const options = { - workDir, - trackedFiles, - parseErrors - }; - core.info(`Using test report parser '${this.reporter}'`); - const parser = this.getParser(this.reporter, options); - const results = []; - const input = await inputProvider.load(); - for (const [reportName, files] of Object.entries(input)) { - try { - core.startGroup(`Creating test report ${reportName}`); - const tr = await this.createReport(parser, reportName, files); - results.push(...tr); + run() { + return __awaiter(this, void 0, void 0, function* () { + if (this.workDirInput) { + core.info(`Changing directory to '${this.workDirInput}'`); + process.chdir(this.workDirInput); } - finally { - core.endGroup(); + core.info(`Check runs will be created with SHA=${this.context.sha}`); + // Split path pattern by ',' and optionally convert all backslashes to forward slashes + // fast-glob (micromatch) always interprets backslashes as escape characters instead of directory separators + const pathsList = this.path.split(','); + const pattern = this.pathReplaceBackslashes ? pathsList.map(path_utils_1.normalizeFilePath) : pathsList; + const inputProvider = this.artifact + ? new artifact_provider_1.ArtifactProvider(this.octokit, this.artifact, this.name, pattern, this.context.sha, this.context.runId, this.token) + : new local_file_provider_1.LocalFileProvider(this.name, pattern); + const parseErrors = this.maxAnnotations > 0; + const trackedFiles = yield inputProvider.listTrackedFiles(); + const workDir = this.artifact ? undefined : (0, path_utils_1.normalizeDirPath)(process.cwd(), true); + core.info(`Found ${trackedFiles.length} files tracked by GitHub`); + const options = { + workDir, + trackedFiles, + parseErrors + }; + core.info(`Using test report parser '${this.reporter}'`); + const parser = this.getParser(this.reporter, options); + const results = []; + const input = yield inputProvider.load(); + for (const [reportName, files] of Object.entries(input)) { + try { + core.startGroup(`Creating test report ${reportName}`); + const tr = yield this.createReport(parser, reportName, files); + results.push(...tr); + } + finally { + core.endGroup(); + } } - } - const isFailed = results.some(tr => tr.result === 'failed'); - const conclusion = isFailed ? 'failure' : 'success'; - const passed = results.reduce((sum, tr) => sum + tr.passed, 0); - const failed = results.reduce((sum, tr) => sum + tr.failed, 0); - const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0); - const time = results.reduce((sum, tr) => sum + tr.time, 0); - core.setOutput('conclusion', conclusion); - core.setOutput('passed', passed); - core.setOutput('failed', failed); - core.setOutput('skipped', skipped); - core.setOutput('time', time); - if (this.failOnError && isFailed) { - core.setFailed(`Failed test were found and 'fail-on-error' option is set to ${this.failOnError}`); - return; - } - if (results.length === 0) { - core.setFailed(`No test report files were found`); - return; - } + const isFailed = results.some(tr => tr.result === 'failed'); + const conclusion = isFailed ? 'failure' : 'success'; + const passed = results.reduce((sum, tr) => sum + tr.passed, 0); + const failed = results.reduce((sum, tr) => sum + tr.failed, 0); + const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0); + const time = results.reduce((sum, tr) => sum + tr.time, 0); + core.setOutput('conclusion', conclusion); + core.setOutput('passed', passed); + core.setOutput('failed', failed); + core.setOutput('skipped', skipped); + core.setOutput('time', time); + if (this.failOnError && isFailed) { + core.setFailed(`Failed test were found and 'fail-on-error' option is set to ${this.failOnError}`); + return; + } + if (results.length === 0) { + core.setFailed(`No test report files were found`); + return; + } + }); } - async createReport(parser, name, files) { - if (files.length === 0) { - core.warning(`No file matches path ${this.path}`); - return []; - } - const results = []; - for (const { file, content } of files) { - core.info(`Processing test results from ${file}`); - const tr = await parser.parse(file, content); - results.push(tr); - } - core.info(`Creating check run ${name}`); - const createResp = await this.octokit.checks.create({ - head_sha: this.context.sha, - name, - status: 'in_progress', - output: { - title: name, - summary: '' - }, - ...github.context.repo + createReport(parser, name, files) { + return __awaiter(this, void 0, void 0, function* () { + if (files.length === 0) { + core.warning(`No file matches path ${this.path}`); + return []; + } + const results = []; + for (const { file, content } of files) { + core.info(`Processing test results from ${file}`); + const tr = yield parser.parse(file, content); + results.push(tr); + } + core.info(`Creating check run ${name}`); + const createResp = yield this.octokit.rest.checks.create(Object.assign({ head_sha: this.context.sha, name, status: 'in_progress', output: { + title: name, + summary: '' + } }, github.context.repo)); + core.info('Creating report summary'); + const { listSuites, listTests, onlySummary } = this; + const baseUrl = createResp.data.html_url; + const summary = (0, get_report_1.getReport)(results, { listSuites, listTests, baseUrl, onlySummary }); + core.info('Creating annotations'); + const annotations = (0, get_annotations_1.getAnnotations)(results, this.maxAnnotations); + const isFailed = results.some(tr => tr.result === 'failed'); + const conclusion = isFailed ? 'failure' : 'success'; + const icon = isFailed ? markdown_utils_1.Icon.fail : markdown_utils_1.Icon.success; + core.info(`Updating check run conclusion (${conclusion}) and output`); + const resp = yield this.octokit.rest.checks.update(Object.assign({ check_run_id: createResp.data.id, conclusion, status: 'completed', output: { + title: `${name} ${icon}`, + summary, + annotations + } }, github.context.repo)); + core.info(`Check run create response: ${resp.status}`); + core.info(`Check run URL: ${resp.data.url}`); + core.info(`Check run HTML: ${resp.data.html_url}`); + return results; }); - core.info('Creating report summary'); - const { listSuites, listTests, onlySummary } = this; - const baseUrl = createResp.data.html_url; - const summary = get_report_1.getReport(results, { listSuites, listTests, baseUrl, onlySummary }); - core.info('Creating annotations'); - const annotations = get_annotations_1.getAnnotations(results, this.maxAnnotations); - const isFailed = results.some(tr => tr.result === 'failed'); - const conclusion = isFailed ? 'failure' : 'success'; - const icon = isFailed ? markdown_utils_1.Icon.fail : markdown_utils_1.Icon.success; - core.info(`Updating check run conclusion (${conclusion}) and output`); - const resp = await this.octokit.checks.update({ - check_run_id: createResp.data.id, - conclusion, - status: 'completed', - output: { - title: `${name} ${icon}`, - summary, - annotations - }, - ...github.context.repo - }); - core.info(`Check run create response: ${resp.status}`); - core.info(`Check run URL: ${resp.data.url}`); - core.info(`Check run HTML: ${resp.data.html_url}`); - return results; } getParser(reporter, options) { switch (reporter) { @@ -392,10 +432,19 @@ main(); /***/ }), /***/ 4528: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DartJsonParser = void 0; const path_utils_1 = __nccwpck_require__(4070); @@ -449,10 +498,12 @@ class DartJsonParser { this.options = options; this.sdk = sdk; } - async parse(path, content) { - const tr = this.getTestRun(path, content); - const result = this.getTestRunResult(tr); - return Promise.resolve(result); + parse(path, content) { + return __awaiter(this, void 0, void 0, function* () { + const tr = this.getTestRun(path, content); + const result = this.getTestRunResult(tr); + return Promise.resolve(result); + }); } getTestRun(path, content) { const lines = content.split(/\n\r?/g); @@ -465,7 +516,8 @@ class DartJsonParser { return JSON.parse(str); } catch (e) { - const col = e.columnNumber !== undefined ? `:${e.columnNumber}` : ''; + const errWithCol = e; + const col = errWithCol.columnNumber !== undefined ? `:${errWithCol.columnNumber}` : ''; throw new Error(`Invalid JSON at ${path}:${i + 1}${col}\n\n${e}`); } }) @@ -475,29 +527,29 @@ class DartJsonParser { const suites = {}; const tests = {}; for (const evt of events) { - if (dart_json_types_1.isSuiteEvent(evt)) { + if ((0, dart_json_types_1.isSuiteEvent)(evt)) { suites[evt.suite.id] = new TestSuite(evt.suite); } - else if (dart_json_types_1.isGroupEvent(evt)) { + else if ((0, dart_json_types_1.isGroupEvent)(evt)) { suites[evt.group.suiteID].groups[evt.group.id] = new TestGroup(evt.group); } - else if (dart_json_types_1.isTestStartEvent(evt) && evt.test.url !== null) { + else if ((0, dart_json_types_1.isTestStartEvent)(evt) && evt.test.url !== null) { const test = new TestCase(evt); const suite = suites[evt.test.suiteID]; const group = suite.groups[evt.test.groupIDs[evt.test.groupIDs.length - 1]]; group.tests.push(test); tests[evt.test.id] = test; } - else if (dart_json_types_1.isTestDoneEvent(evt) && !evt.hidden && tests[evt.testID]) { + else if ((0, dart_json_types_1.isTestDoneEvent)(evt) && !evt.hidden && tests[evt.testID]) { tests[evt.testID].testDone = evt; } - else if (dart_json_types_1.isErrorEvent(evt) && tests[evt.testID]) { + else if ((0, dart_json_types_1.isErrorEvent)(evt) && tests[evt.testID]) { tests[evt.testID].error = evt; } - else if (dart_json_types_1.isMessageEvent(evt) && tests[evt.testID]) { + else if ((0, dart_json_types_1.isMessageEvent)(evt) && tests[evt.testID]) { tests[evt.testID].print.push(evt); } - else if (dart_json_types_1.isDoneEvent(evt)) { + else if ((0, dart_json_types_1.isDoneEvent)(evt)) { success = evt.success; totalTime = evt.time; } @@ -582,7 +634,7 @@ class DartJsonParser { const match = str.match(re); if (match !== null) { const [_, pathStr, lineStr] = match; - const path = path_utils_1.normalizeFilePath(this.getRelativePath(pathStr)); + const path = (0, path_utils_1.normalizeFilePath)(this.getRelativePath(pathStr)); if (trackedFiles.includes(path)) { const line = parseInt(lineStr); return { path, line }; @@ -595,7 +647,7 @@ class DartJsonParser { if (path.startsWith(prefix)) { path = path.substr(prefix.length); } - path = path_utils_1.normalizeFilePath(path); + path = (0, path_utils_1.normalizeFilePath)(path); const workDir = this.getWorkDir(path); if (workDir !== undefined && path.startsWith(workDir)) { path = path.substr(workDir.length); @@ -604,7 +656,7 @@ class DartJsonParser { } getWorkDir(path) { var _a, _b; - return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles))); + return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = (0, path_utils_1.getBasePath)(path, this.options.trackedFiles))); } } exports.DartJsonParser = DartJsonParser; @@ -653,10 +705,19 @@ exports.isMessageEvent = isMessageEvent; /***/ }), /***/ 2664: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DotnetTrxParser = void 0; const xml2js_1 = __nccwpck_require__(6189); @@ -691,20 +752,24 @@ class DotnetTrxParser { constructor(options) { this.options = options; } - async parse(path, content) { - const trx = await this.getTrxReport(path, content); - const tc = this.getTestClasses(trx); - const tr = this.getTestRunResult(path, trx, tc); - tr.sort(true); - return tr; + parse(path, content) { + return __awaiter(this, void 0, void 0, function* () { + const trx = yield this.getTrxReport(path, content); + const tc = this.getTestClasses(trx); + const tr = this.getTestRunResult(path, trx, tc); + tr.sort(true); + return tr; + }); } - async getTrxReport(path, content) { - try { - return (await xml2js_1.parseStringPromise(content)); - } - catch (e) { - throw new Error(`Invalid XML at ${path}\n\n${e}`); - } + getTrxReport(path, content) { + return __awaiter(this, void 0, void 0, function* () { + try { + return (yield (0, xml2js_1.parseStringPromise)(content)); + } + catch (e) { + throw new Error(`Invalid XML at ${path}\n\n${e}`); + } + }); } getTestClasses(trx) { if (trx.TestRun.TestDefinitions === undefined || trx.TestRun.Results === undefined) { @@ -730,7 +795,7 @@ class DotnetTrxParser { } const error = this.getErrorInfo(r.result); const durationAttr = r.result.$.duration; - const duration = durationAttr ? parse_utils_1.parseNetDuration(durationAttr) : 0; + const duration = durationAttr ? (0, parse_utils_1.parseNetDuration)(durationAttr) : 0; const resultTestName = r.result.$.testName; const testName = resultTestName.startsWith(className) && resultTestName[className.length] === '.' ? resultTestName.substr(className.length + 1) @@ -743,7 +808,7 @@ class DotnetTrxParser { } getTestRunResult(path, trx, testClasses) { const times = trx.TestRun.Times[0].$; - const totalTime = parse_utils_1.parseIsoDate(times.finish).getTime() - parse_utils_1.parseIsoDate(times.start).getTime(); + const totalTime = (0, parse_utils_1.parseIsoDate)(times.finish).getTime() - (0, parse_utils_1.parseIsoDate)(times.start).getTime(); const suites = testClasses.map(testClass => { const tests = testClass.tests.map(test => { const error = this.getError(test); @@ -798,7 +863,7 @@ class DotnetTrxParser { const match = str.match(re); if (match !== null) { const [_, fileStr, lineStr] = match; - const filePath = path_utils_1.normalizeFilePath(fileStr); + const filePath = (0, path_utils_1.normalizeFilePath)(fileStr); const workDir = this.getWorkDir(filePath); if (workDir) { const file = filePath.substr(workDir.length); @@ -812,7 +877,7 @@ class DotnetTrxParser { } getWorkDir(path) { var _a, _b; - return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles))); + return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = (0, path_utils_1.getBasePath)(path, this.options.trackedFiles))); } } exports.DotnetTrxParser = DotnetTrxParser; @@ -827,7 +892,11 @@ exports.DotnetTrxParser = DotnetTrxParser; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -844,9 +913,18 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.JavaJunitParser = void 0; -const path = __importStar(__nccwpck_require__(5622)); +const path = __importStar(__nccwpck_require__(1017)); const xml2js_1 = __nccwpck_require__(6189); const path_utils_1 = __nccwpck_require__(4070); const test_results_1 = __nccwpck_require__(2768); @@ -859,38 +937,42 @@ class JavaJunitParser { for (const filePath of options.trackedFiles) { const fileName = path.basename(filePath); const files = (_a = this.trackedFiles[fileName]) !== null && _a !== void 0 ? _a : (this.trackedFiles[fileName] = []); - files.push(path_utils_1.normalizeFilePath(filePath)); + files.push((0, path_utils_1.normalizeFilePath)(filePath)); } } - async parse(filePath, content) { - const reportOrSuite = await this.getJunitReport(filePath, content); - const isReport = reportOrSuite.testsuites !== undefined; - // XML might contain: - // - multiple suites under root node - // - single as root node - let ju; - if (isReport) { - ju = reportOrSuite; - } - else { - // Make it behave the same way as if suite was inside root node - const suite = reportOrSuite.testsuite; - ju = { - testsuites: { - $: { time: suite.$.time }, - testsuite: [suite] - } - }; - } - return this.getTestRunResult(filePath, ju); + parse(filePath, content) { + return __awaiter(this, void 0, void 0, function* () { + const reportOrSuite = yield this.getJunitReport(filePath, content); + const isReport = reportOrSuite.testsuites !== undefined; + // XML might contain: + // - multiple suites under root node + // - single as root node + let ju; + if (isReport) { + ju = reportOrSuite; + } + else { + // Make it behave the same way as if suite was inside root node + const suite = reportOrSuite.testsuite; + ju = { + testsuites: { + $: { time: suite.$.time }, + testsuite: [suite] + } + }; + } + return this.getTestRunResult(filePath, ju); + }); } - async getJunitReport(filePath, content) { - try { - return await xml2js_1.parseStringPromise(content); - } - catch (e) { - throw new Error(`Invalid XML at ${filePath}\n\n${e}`); - } + getJunitReport(filePath, content) { + return __awaiter(this, void 0, void 0, function* () { + try { + return yield (0, xml2js_1.parseStringPromise)(content); + } + catch (e) { + throw new Error(`Invalid XML at ${filePath}\n\n${e}`); + } + }); } getTestRunResult(filePath, junit) { var _a; @@ -1029,10 +1111,19 @@ exports.JavaJunitParser = JavaJunitParser; /***/ }), /***/ 1113: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.JestJunitParser = void 0; const xml2js_1 = __nccwpck_require__(6189); @@ -1043,17 +1134,21 @@ class JestJunitParser { constructor(options) { this.options = options; } - async parse(path, content) { - const ju = await this.getJunitReport(path, content); - return this.getTestRunResult(path, ju); + parse(path, content) { + return __awaiter(this, void 0, void 0, function* () { + const ju = yield this.getJunitReport(path, content); + return this.getTestRunResult(path, ju); + }); } - async getJunitReport(path, content) { - try { - return (await xml2js_1.parseStringPromise(content)); - } - catch (e) { - throw new Error(`Invalid XML at ${path}\n\n${e}`); - } + getJunitReport(path, content) { + return __awaiter(this, void 0, void 0, function* () { + try { + return (yield (0, xml2js_1.parseStringPromise)(content)); + } + catch (e) { + throw new Error(`Invalid XML at ${path}\n\n${e}`); + } + }); } getTestRunResult(path, junit) { const suites = junit.testsuites.testsuite === undefined @@ -1102,7 +1197,7 @@ class JestJunitParser { const details = tc.failure[0]; let path; let line; - const src = node_utils_1.getExceptionSource(details, this.options.trackedFiles, file => this.getRelativePath(file)); + const src = (0, node_utils_1.getExceptionSource)(details, this.options.trackedFiles, file => this.getRelativePath(file)); if (src) { path = src.path; line = src.line; @@ -1114,7 +1209,7 @@ class JestJunitParser { }; } getRelativePath(path) { - path = path_utils_1.normalizeFilePath(path); + path = (0, path_utils_1.normalizeFilePath)(path); const workDir = this.getWorkDir(path); if (workDir !== undefined && path.startsWith(workDir)) { path = path.substr(workDir.length); @@ -1123,7 +1218,7 @@ class JestJunitParser { } getWorkDir(path) { var _a, _b; - return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles))); + return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = (0, path_utils_1.getBasePath)(path, this.options.trackedFiles))); } } exports.JestJunitParser = JestJunitParser; @@ -1132,10 +1227,19 @@ exports.JestJunitParser = JestJunitParser; /***/ }), /***/ 6043: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.MochaJsonParser = void 0; const test_results_1 = __nccwpck_require__(2768); @@ -1145,11 +1249,13 @@ class MochaJsonParser { constructor(options) { this.options = options; } - async parse(path, content) { - const mocha = this.getMochaJson(path, content); - const result = this.getTestRunResult(path, mocha); - result.sort(true); - return Promise.resolve(result); + parse(path, content) { + return __awaiter(this, void 0, void 0, function* () { + const mocha = this.getMochaJson(path, content); + const result = this.getTestRunResult(path, mocha); + result.sort(true); + return Promise.resolve(result); + }); } getMochaJson(path, content) { try { @@ -1203,7 +1309,7 @@ class MochaJsonParser { } let path; let line; - const src = node_utils_1.getExceptionSource(details, this.options.trackedFiles, file => this.getRelativePath(file)); + const src = (0, node_utils_1.getExceptionSource)(details, this.options.trackedFiles, file => this.getRelativePath(file)); if (src) { path = src.path; line = src.line; @@ -1216,7 +1322,7 @@ class MochaJsonParser { }; } getRelativePath(path) { - path = path_utils_1.normalizeFilePath(path); + path = (0, path_utils_1.normalizeFilePath)(path); const workDir = this.getWorkDir(path); if (workDir !== undefined && path.startsWith(workDir)) { path = path.substr(workDir.length); @@ -1225,7 +1331,7 @@ class MochaJsonParser { } getWorkDir(path) { var _a, _b; - return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles))); + return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = (0, path_utils_1.getBasePath)(path, this.options.trackedFiles))); } } exports.MochaJsonParser = MochaJsonParser; @@ -1273,7 +1379,7 @@ function getAnnotations(results, maxCount) { suiteName: ts.name, testName: tg.name ? `${tg.name} ► ${tc.name}` : tc.name, details: err.details, - message: (_d = (_c = err.message) !== null && _c !== void 0 ? _c : parse_utils_1.getFirstNonEmptyLine(err.details)) !== null && _d !== void 0 ? _d : 'Test failed', + message: (_d = (_c = err.message) !== null && _c !== void 0 ? _c : (0, parse_utils_1.getFirstNonEmptyLine)(err.details)) !== null && _d !== void 0 ? _d : 'Test failed', path, line }); @@ -1288,7 +1394,7 @@ function getAnnotations(results, maxCount) { 'Failed test found in:', e.testRunPaths.map(p => ` ${p}`).join('\n'), 'Error:', - ident(markdown_utils_1.fixEol(e.message), ' ') + ident((0, markdown_utils_1.fixEol)(e.message), ' ') ].join('\n'); return enforceCheckRunLimits({ path: e.path, @@ -1296,7 +1402,7 @@ function getAnnotations(results, maxCount) { end_line: e.line, annotation_level: 'failure', title: `${e.suiteName} ► ${e.testName}`, - raw_details: markdown_utils_1.fixEol(e.details), + raw_details: (0, markdown_utils_1.fixEol)(e.details), message }); }); @@ -1304,10 +1410,10 @@ function getAnnotations(results, maxCount) { } exports.getAnnotations = getAnnotations; function enforceCheckRunLimits(err) { - err.title = markdown_utils_1.ellipsis(err.title || '', 255); - err.message = markdown_utils_1.ellipsis(err.message, 65535); + err.title = (0, markdown_utils_1.ellipsis)(err.title || '', 255); + err.message = (0, markdown_utils_1.ellipsis)(err.message, 65535); if (err.raw_details) { - err.raw_details = markdown_utils_1.ellipsis(err.raw_details, 65535); + err.raw_details = (0, markdown_utils_1.ellipsis)(err.raw_details, 65535); } return err; } @@ -1328,7 +1434,11 @@ function ident(text, prefix) { var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1349,6 +1459,7 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getReport = void 0; const core = __importStar(__nccwpck_require__(2186)); const markdown_utils_1 = __nccwpck_require__(6482); +const node_utils_1 = __nccwpck_require__(5824); const parse_utils_1 = __nccwpck_require__(7811); const slugger_1 = __nccwpck_require__(3328); const MAX_REPORT_LENGTH = 65535; @@ -1361,7 +1472,7 @@ const defaultOptions = { function getReport(results, options = defaultOptions) { core.info('Generating check run summary'); applySort(results); - const opts = { ...options }; + const opts = Object.assign({}, options); let lines = renderReport(results, opts); let report = lines.join('\n'); if (getByteLength(report) <= MAX_REPORT_LENGTH) { @@ -1407,9 +1518,9 @@ function trimReport(lines) { return reportLines.join('\n'); } function applySort(results) { - results.sort((a, b) => a.path.localeCompare(b.path)); + results.sort((a, b) => a.path.localeCompare(b.path, node_utils_1.DEFAULT_LOCALE)); for (const res of results) { - res.suites.sort((a, b) => a.name.localeCompare(b.name)); + res.suites.sort((a, b) => a.name.localeCompare(b.name, node_utils_1.DEFAULT_LOCALE)); } } function getByteLength(text) { @@ -1456,16 +1567,16 @@ function getTestRunsReport(testRuns, options) { const sections = []; if (testRuns.length > 1 || options.onlySummary) { const tableData = testRuns.map((tr, runIndex) => { - const time = markdown_utils_1.formatTime(tr.time); + const time = (0, markdown_utils_1.formatTime)(tr.time); const name = tr.path; const addr = options.baseUrl + makeRunSlug(runIndex).link; - const nameLink = markdown_utils_1.link(name, addr); + const nameLink = (0, markdown_utils_1.link)(name, addr); const passed = tr.passed > 0 ? `${tr.passed}${markdown_utils_1.Icon.success}` : ''; const failed = tr.failed > 0 ? `${tr.failed}${markdown_utils_1.Icon.fail}` : ''; const skipped = tr.skipped > 0 ? `${tr.skipped}${markdown_utils_1.Icon.skip}` : ''; return [nameLink, passed, failed, skipped, time]; }); - const resultsTable = markdown_utils_1.table(['Report', 'Passed', 'Failed', 'Skipped', 'Time'], [markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...tableData); + const resultsTable = (0, markdown_utils_1.table)(['Report', 'Passed', 'Failed', 'Skipped', 'Time'], [markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...tableData); sections.push(resultsTable); } if (options.onlySummary === false) { @@ -1480,19 +1591,19 @@ function getSuitesReport(tr, runIndex, options) { const nameLink = `${tr.path}`; const icon = getResultIcon(tr.result); sections.push(`## ${icon}\xa0${nameLink}`); - const time = markdown_utils_1.formatTime(tr.time); + const time = (0, markdown_utils_1.formatTime)(tr.time); const headingLine2 = tr.tests > 0 ? `**${tr.tests}** tests were completed in **${time}** with **${tr.passed}** passed, **${tr.failed}** failed and **${tr.skipped}** skipped.` : 'No tests found'; sections.push(headingLine2); const suites = options.listSuites === 'failed' ? tr.failedSuites : tr.suites; if (suites.length > 0) { - const suitesTable = markdown_utils_1.table(['Test suite', 'Passed', 'Failed', 'Skipped', 'Time'], [markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...suites.map((s, suiteIndex) => { - const tsTime = markdown_utils_1.formatTime(s.time); + const suitesTable = (0, markdown_utils_1.table)(['Test suite', 'Passed', 'Failed', 'Skipped', 'Time'], [markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...suites.map((s, suiteIndex) => { + const tsTime = (0, markdown_utils_1.formatTime)(s.time); const tsName = s.name; const skipLink = options.listTests === 'none' || (options.listTests === 'failed' && s.result !== 'failed'); const tsAddr = options.baseUrl + makeSuiteSlug(runIndex, suiteIndex).link; - const tsNameLink = skipLink ? tsName : markdown_utils_1.link(tsName, tsAddr); + const tsNameLink = skipLink ? tsName : (0, markdown_utils_1.link)(tsName, tsAddr); const passed = s.passed > 0 ? `${s.passed}${markdown_utils_1.Icon.success}` : ''; const failed = s.failed > 0 ? `${s.failed}${markdown_utils_1.Icon.fail}` : ''; const skipped = s.skipped > 0 ? `${s.skipped}${markdown_utils_1.Icon.skip}` : ''; @@ -1533,7 +1644,7 @@ function getTestsReport(ts, runIndex, suiteIndex, options) { const result = getResultIcon(tc.result); sections.push(`${space}${result} ${tc.name}`); if (tc.error) { - const lines = (_c = ((_a = tc.error.message) !== null && _a !== void 0 ? _a : (_b = parse_utils_1.getFirstNonEmptyLine(tc.error.details)) === null || _b === void 0 ? void 0 : _b.trim())) === null || _c === void 0 ? void 0 : _c.split(/\r?\n/g).map(l => '\t' + l); + const lines = (_c = ((_a = tc.error.message) !== null && _a !== void 0 ? _a : (_b = (0, parse_utils_1.getFirstNonEmptyLine)(tc.error.details)) === null || _b === void 0 ? void 0 : _b.trim())) === null || _c === void 0 ? void 0 : _c.split(/\r?\n/g).map(l => '\t' + l); if (lines) { sections.push(...lines); } @@ -1545,11 +1656,11 @@ function getTestsReport(ts, runIndex, suiteIndex, options) { } function makeRunSlug(runIndex) { // use prefix to avoid slug conflicts after escaping the paths - return slugger_1.slug(`r${runIndex}`); + return (0, slugger_1.slug)(`r${runIndex}`); } function makeSuiteSlug(runIndex, suiteIndex) { // use prefix to avoid slug conflicts after escaping the paths - return slugger_1.slug(`r${runIndex}s${suiteIndex}`); + return (0, slugger_1.slug)(`r${runIndex}s${suiteIndex}`); } function getResultIcon(result) { switch (result) { @@ -1568,12 +1679,13 @@ function getResultIcon(result) { /***/ }), /***/ 2768: -/***/ ((__unused_webpack_module, exports) => { +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.TestCaseResult = exports.TestGroupResult = exports.TestSuiteResult = exports.TestRunResult = void 0; +const node_utils_1 = __nccwpck_require__(5824); class TestRunResult { constructor(path, suites, totalTime) { this.path = path; @@ -1603,7 +1715,7 @@ class TestRunResult { return this.suites.filter(s => s.result === 'failed'); } sort(deep) { - this.suites.sort((a, b) => a.name.localeCompare(b.name)); + this.suites.sort((a, b) => a.name.localeCompare(b.name, node_utils_1.DEFAULT_LOCALE)); if (deep) { for (const suite of this.suites) { suite.sort(deep); @@ -1641,7 +1753,7 @@ class TestSuiteResult { return this.groups.filter(grp => grp.result === 'failed'); } sort(deep) { - this.groups.sort((a, b) => { var _a, _b; return ((_a = a.name) !== null && _a !== void 0 ? _a : '').localeCompare((_b = b.name) !== null && _b !== void 0 ? _b : ''); }); + this.groups.sort((a, b) => { var _a, _b; return ((_a = a.name) !== null && _a !== void 0 ? _a : '').localeCompare((_b = b.name) !== null && _b !== void 0 ? _b : '', node_utils_1.DEFAULT_LOCALE); }); if (deep) { for (const grp of this.groups) { grp.sort(); @@ -1674,7 +1786,7 @@ class TestGroupResult { return this.tests.filter(tc => tc.result === 'failed'); } sort() { - this.tests.sort((a, b) => a.name.localeCompare(b.name)); + this.tests.sort((a, b) => a.name.localeCompare(b.name, node_utils_1.DEFAULT_LOCALE)); } } exports.TestGroupResult = TestGroupResult; @@ -1692,26 +1804,37 @@ exports.TestCaseResult = TestCaseResult; /***/ }), /***/ 6069: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); const exec_1 = __nccwpck_require__(1514); // Wraps original exec() function // Returns exit code and whole stdout/stderr -async function exec(commandLine, args, options) { - options = options || {}; - let stdout = ''; - let stderr = ''; - options.listeners = { - stdout: (data) => (stdout += data.toString()), - stderr: (data) => (stderr += data.toString()) - }; - const code = await exec_1.exec(commandLine, args, options); - return { code, stdout, stderr }; +function exec(commandLine, args, options) { + return __awaiter(this, void 0, void 0, function* () { + options = options || {}; + let stdout = ''; + let stderr = ''; + options.listeners = { + stdout: (data) => (stdout += data.toString()), + stderr: (data) => (stderr += data.toString()) + }; + const code = yield (0, exec_1.exec)(commandLine, args, options); + return { code, stdout, stderr }; + }); } -exports.default = exec; +exports["default"] = exec; /***/ }), @@ -1723,7 +1846,11 @@ exports.default = exec; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1740,6 +1867,15 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; @@ -1747,17 +1883,19 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.listFiles = void 0; const core = __importStar(__nccwpck_require__(2186)); const exec_1 = __importDefault(__nccwpck_require__(6069)); -async function listFiles() { - core.startGroup('Listing all files tracked by git'); - let output = ''; - try { - output = (await exec_1.default('git', ['ls-files', '-z'])).stdout; - } - finally { - fixStdOutNullTermination(); - core.endGroup(); - } - return output.split('\u0000').filter(s => s.length > 0); +function listFiles() { + return __awaiter(this, void 0, void 0, function* () { + core.startGroup('Listing all files tracked by git'); + let output = ''; + try { + output = (yield (0, exec_1.default)('git', ['ls-files', '-z'])).stdout; + } + finally { + fixStdOutNullTermination(); + core.endGroup(); + } + return output.split('\u0000').filter(s => s.length > 0); + }); } exports.listFiles = listFiles; function fixStdOutNullTermination() { @@ -1777,7 +1915,11 @@ function fixStdOutNullTermination() { var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1794,18 +1936,24 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.listFiles = exports.downloadArtifact = exports.getCheckRunContext = void 0; -const fs_1 = __nccwpck_require__(5747); +const fs_1 = __nccwpck_require__(7147); const core = __importStar(__nccwpck_require__(2186)); const github = __importStar(__nccwpck_require__(5438)); -const stream = __importStar(__nccwpck_require__(2413)); -const util_1 = __nccwpck_require__(1669); -const got_1 = __importDefault(__nccwpck_require__(3061)); -const asyncStream = util_1.promisify(stream.pipeline); +const stream = __importStar(__nccwpck_require__(2781)); +const util_1 = __nccwpck_require__(3837); +const got_1 = __nccwpck_require__(9859); +const asyncStream = (0, util_1.promisify)(stream.pipeline); function getCheckRunContext() { if (github.context.eventName === 'workflow_run') { core.info('Action was triggered by workflow_run: using SHA and RUN_ID from triggering workflow'); @@ -1827,91 +1975,83 @@ function getCheckRunContext() { return { sha: github.context.sha, runId }; } exports.getCheckRunContext = getCheckRunContext; -async function downloadArtifact(octokit, artifactId, fileName, token) { - core.startGroup(`Downloading artifact ${fileName}`); - try { - core.info(`Artifact ID: ${artifactId}`); - const req = octokit.actions.downloadArtifact.endpoint({ - ...github.context.repo, - artifact_id: artifactId, - archive_format: 'zip' - }); - const headers = { - Authorization: `Bearer ${token}` - }; - const resp = await got_1.default(req.url, { - headers, - followRedirect: false - }); - core.info(`Fetch artifact URL: ${resp.statusCode} ${resp.statusMessage}`); - if (resp.statusCode !== 302) { - throw new Error('Fetch artifact URL failed: received unexpected status code'); +function downloadArtifact(octokit, artifactId, fileName, token) { + return __awaiter(this, void 0, void 0, function* () { + core.startGroup(`Downloading artifact ${fileName}`); + try { + core.info(`Artifact ID: ${artifactId}`); + const req = octokit.rest.actions.downloadArtifact.endpoint(Object.assign(Object.assign({}, github.context.repo), { artifact_id: artifactId, archive_format: 'zip' })); + const headers = { + Authorization: `Bearer ${token}` + }; + const resp = yield (0, got_1.got)(req.url, { + headers, + followRedirect: false + }); + core.info(`Fetch artifact URL: ${resp.statusCode} ${resp.statusMessage}`); + if (resp.statusCode !== 302) { + throw new Error('Fetch artifact URL failed: received unexpected status code'); + } + const url = resp.headers.location; + if (url === undefined) { + const receivedHeaders = Object.keys(resp.headers); + core.info(`Received headers: ${receivedHeaders.join(', ')}`); + throw new Error('Location header was not found in API response'); + } + if (typeof url !== 'string') { + throw new Error(`Location header has unexpected value: ${url}`); + } + const downloadStream = got_1.got.stream(url, { headers }); + const fileWriterStream = (0, fs_1.createWriteStream)(fileName); + core.info(`Downloading ${url}`); + downloadStream.on('downloadProgress', ({ transferred }) => { + core.info(`Progress: ${transferred} B`); + }); + yield asyncStream(downloadStream, fileWriterStream); } - const url = resp.headers.location; - if (url === undefined) { - const receivedHeaders = Object.keys(resp.headers); - core.info(`Received headers: ${receivedHeaders.join(', ')}`); - throw new Error('Location header was not found in API response'); + finally { + core.endGroup(); } - if (typeof url !== 'string') { - throw new Error(`Location header has unexpected value: ${url}`); - } - const downloadStream = got_1.default.stream(url, { headers }); - const fileWriterStream = fs_1.createWriteStream(fileName); - core.info(`Downloading ${url}`); - downloadStream.on('downloadProgress', ({ transferred }) => { - core.info(`Progress: ${transferred} B`); - }); - await asyncStream(downloadStream, fileWriterStream); - } - finally { - core.endGroup(); - } + }); } exports.downloadArtifact = downloadArtifact; -async function listFiles(octokit, sha) { - core.startGroup('Fetching list of tracked files from GitHub'); - try { - const commit = await octokit.git.getCommit({ - commit_sha: sha, - ...github.context.repo - }); - const files = await listGitTree(octokit, commit.data.tree.sha, ''); - return files; - } - finally { - core.endGroup(); - } +function listFiles(octokit, sha) { + return __awaiter(this, void 0, void 0, function* () { + core.startGroup('Fetching list of tracked files from GitHub'); + try { + const commit = yield octokit.rest.git.getCommit(Object.assign({ commit_sha: sha }, github.context.repo)); + const files = yield listGitTree(octokit, commit.data.tree.sha, ''); + return files; + } + finally { + core.endGroup(); + } + }); } exports.listFiles = listFiles; -async function listGitTree(octokit, sha, path) { - const pathLog = path ? ` at ${path}` : ''; - core.info(`Fetching tree ${sha}${pathLog}`); - let truncated = false; - let tree = await octokit.git.getTree({ - recursive: 'true', - tree_sha: sha, - ...github.context.repo +function listGitTree(octokit, sha, path) { + return __awaiter(this, void 0, void 0, function* () { + const pathLog = path ? ` at ${path}` : ''; + core.info(`Fetching tree ${sha}${pathLog}`); + let truncated = false; + let tree = yield octokit.rest.git.getTree(Object.assign({ recursive: 'true', tree_sha: sha }, github.context.repo)); + if (tree.data.truncated) { + truncated = true; + tree = yield octokit.rest.git.getTree(Object.assign({ tree_sha: sha }, github.context.repo)); + } + const result = []; + for (const tr of tree.data.tree) { + const file = `${path}${tr.path}`; + if (tr.type === 'blob') { + result.push(file); + } + else if (tr.type === 'tree' && truncated) { + const files = yield listGitTree(octokit, tr.sha, `${file}/`); + result.push(...files); + } + } + return result; }); - if (tree.data.truncated) { - truncated = true; - tree = await octokit.git.getTree({ - tree_sha: sha, - ...github.context.repo - }); - } - const result = []; - for (const tr of tree.data.tree) { - const file = `${path}${tr.path}`; - if (tr.type === 'blob') { - result.push(file); - } - else if (tr.type === 'tree' && truncated) { - const files = await listGitTree(octokit, tr.sha, `${file}/`); - result.push(...files); - } - } - return result; } @@ -1980,8 +2120,9 @@ exports.formatTime = formatTime; "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getExceptionSource = void 0; +exports.getExceptionSource = exports.DEFAULT_LOCALE = void 0; const path_utils_1 = __nccwpck_require__(4070); +exports.DEFAULT_LOCALE = 'en-US'; function getExceptionSource(stackTrace, trackedFiles, getRelativePath) { const lines = stackTrace.split(/\r?\n/); const re = /\((.*):(\d+):\d+\)$/; @@ -1989,7 +2130,7 @@ function getExceptionSource(stackTrace, trackedFiles, getRelativePath) { const match = str.match(re); if (match !== null) { const [_, fileStr, lineStr] = match; - const filePath = path_utils_1.normalizeFilePath(fileStr); + const filePath = (0, path_utils_1.normalizeFilePath)(fileStr); if (filePath.startsWith('internal/') || filePath.includes('/node_modules/')) { continue; } @@ -2119,15 +2260,28 @@ exports.slug = slug; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -const os = __importStar(__nccwpck_require__(2087)); +exports.issue = exports.issueCommand = void 0; +const os = __importStar(__nccwpck_require__(2037)); const utils_1 = __nccwpck_require__(5278); /** * Commands @@ -2205,6 +2359,25 @@ function escapeProperty(s) { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -2214,19 +2387,15 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; const command_1 = __nccwpck_require__(7351); const file_command_1 = __nccwpck_require__(717); const utils_1 = __nccwpck_require__(5278); -const os = __importStar(__nccwpck_require__(2087)); -const path = __importStar(__nccwpck_require__(5622)); +const os = __importStar(__nccwpck_require__(2037)); +const path = __importStar(__nccwpck_require__(1017)); +const uuid_1 = __nccwpck_require__(5840); +const oidc_utils_1 = __nccwpck_require__(8041); /** * The code to exit an action */ @@ -2255,7 +2424,14 @@ function exportVariable(name, val) { process.env[name] = convertedVal; const filePath = process.env['GITHUB_ENV'] || ''; if (filePath) { - const delimiter = '_GitHubActionsFileCommandDelimeter_'; + const delimiter = `ghadelimiter_${uuid_1.v4()}`; + // These should realistically never happen, but just in case someone finds a way to exploit uuid generation let's not allow keys or values that contain the delimiter. + if (name.includes(delimiter)) { + throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); + } + if (convertedVal.includes(delimiter)) { + throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); + } const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; file_command_1.issueCommand('ENV', commandValue); } @@ -2288,7 +2464,9 @@ function addPath(inputPath) { } exports.addPath = addPath; /** - * Gets the value of an input. The value is also trimmed. + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. * * @param name name of the input to get * @param options optional. See InputOptions. @@ -2299,9 +2477,49 @@ function getInput(name, options) { if (options && options.required && !val) { throw new Error(`Input required and not supplied: ${name}`); } + if (options && options.trimWhitespace === false) { + return val; + } return val.trim(); } exports.getInput = getInput; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +function getMultilineInput(name, options) { + const inputs = getInput(name, options) + .split('\n') + .filter(x => x !== ''); + return inputs; +} +exports.getMultilineInput = getMultilineInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; /** * Sets the value of an output. * @@ -2310,6 +2528,7 @@ exports.getInput = getInput; */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function setOutput(name, value) { + process.stdout.write(os.EOL); command_1.issueCommand('set-output', { name }, value); } exports.setOutput = setOutput; @@ -2356,19 +2575,30 @@ exports.debug = debug; /** * Adds an error issue * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. */ -function error(message) { - command_1.issue('error', message instanceof Error ? message.toString() : message); +function error(message, properties = {}) { + command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); } exports.error = error; /** - * Adds an warning issue + * Adds a warning issue * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. */ -function warning(message) { - command_1.issue('warning', message instanceof Error ? message.toString() : message); +function warning(message, properties = {}) { + command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); } exports.warning = warning; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function notice(message, properties = {}) { + command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.notice = notice; /** * Writes info to log with console.log. * @param message info message @@ -2441,6 +2671,29 @@ function getState(name) { return process.env[`STATE_${name}`] || ''; } exports.getState = getState; +function getIDToken(aud) { + return __awaiter(this, void 0, void 0, function* () { + return yield oidc_utils_1.OidcClient.getIDToken(aud); + }); +} +exports.getIDToken = getIDToken; +/** + * Summary exports + */ +var summary_1 = __nccwpck_require__(1327); +Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); +/** + * @deprecated use core.summary + */ +var summary_2 = __nccwpck_require__(1327); +Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); +/** + * Path exports + */ +var path_utils_1 = __nccwpck_require__(2981); +Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); +Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); +Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); //# sourceMappingURL=core.js.map /***/ }), @@ -2451,18 +2704,31 @@ exports.getState = getState; "use strict"; // For internal use, subject to change. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.issueCommand = void 0; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ -const fs = __importStar(__nccwpck_require__(5747)); -const os = __importStar(__nccwpck_require__(2087)); +const fs = __importStar(__nccwpck_require__(7147)); +const os = __importStar(__nccwpck_require__(2037)); const utils_1 = __nccwpck_require__(5278); function issueCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; @@ -2481,6 +2747,445 @@ exports.issueCommand = issueCommand; /***/ }), +/***/ 8041: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OidcClient = void 0; +const http_client_1 = __nccwpck_require__(6255); +const auth_1 = __nccwpck_require__(5526); +const core_1 = __nccwpck_require__(2186); +class OidcClient { + static createHttpClient(allowRetry = true, maxRetry = 10) { + const requestOptions = { + allowRetries: allowRetry, + maxRetries: maxRetry + }; + return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); + } + static getRequestToken() { + const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); + } + return token; + } + static getIDTokenUrl() { + const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); + } + return runtimeUrl; + } + static getCall(id_token_url) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const httpclient = OidcClient.createHttpClient(); + const res = yield httpclient + .getJson(id_token_url) + .catch(error => { + throw new Error(`Failed to get ID Token. \n + Error Code : ${error.statusCode}\n + Error Message: ${error.result.message}`); + }); + const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; + if (!id_token) { + throw new Error('Response json body do not have ID Token field'); + } + return id_token; + }); + } + static getIDToken(audience) { + return __awaiter(this, void 0, void 0, function* () { + try { + // New ID Token is requested from action service + let id_token_url = OidcClient.getIDTokenUrl(); + if (audience) { + const encodedAudience = encodeURIComponent(audience); + id_token_url = `${id_token_url}&audience=${encodedAudience}`; + } + core_1.debug(`ID token url is ${id_token_url}`); + const id_token = yield OidcClient.getCall(id_token_url); + core_1.setSecret(id_token); + return id_token; + } + catch (error) { + throw new Error(`Error message: ${error.message}`); + } + }); + } +} +exports.OidcClient = OidcClient; +//# sourceMappingURL=oidc-utils.js.map + +/***/ }), + +/***/ 2981: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; +const path = __importStar(__nccwpck_require__(1017)); +/** + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. + * + * @param pth. Path to transform. + * @return string Posix path. + */ +function toPosixPath(pth) { + return pth.replace(/[\\]/g, '/'); +} +exports.toPosixPath = toPosixPath; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. + * + * @param pth. Path to transform. + * @return string Win32 path. + */ +function toWin32Path(pth) { + return pth.replace(/[/]/g, '\\'); +} +exports.toWin32Path = toWin32Path; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +function toPlatformPath(pth) { + return pth.replace(/[/\\]/g, path.sep); +} +exports.toPlatformPath = toPlatformPath; +//# sourceMappingURL=path-utils.js.map + +/***/ }), + +/***/ 1327: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; +const os_1 = __nccwpck_require__(2037); +const fs_1 = __nccwpck_require__(7147); +const { access, appendFile, writeFile } = fs_1.promises; +exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; +exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; +class Summary { + constructor() { + this._buffer = ''; + } + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + filePath() { + return __awaiter(this, void 0, void 0, function* () { + if (this._filePath) { + return this._filePath; + } + const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; + if (!pathFromEnv) { + throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); + } + try { + yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); + } + catch (_a) { + throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); + } + this._filePath = pathFromEnv; + return this._filePath; + }); + } + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + wrap(tag, content, attrs = {}) { + const htmlAttrs = Object.entries(attrs) + .map(([key, value]) => ` ${key}="${value}"`) + .join(''); + if (!content) { + return `<${tag}${htmlAttrs}>`; + } + return `<${tag}${htmlAttrs}>${content}`; + } + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options) { + return __awaiter(this, void 0, void 0, function* () { + const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); + const filePath = yield this.filePath(); + const writeFunc = overwrite ? writeFile : appendFile; + yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); + return this.emptyBuffer(); + }); + } + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear() { + return __awaiter(this, void 0, void 0, function* () { + return this.emptyBuffer().write({ overwrite: true }); + }); + } + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify() { + return this._buffer; + } + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer() { + return this._buffer.length === 0; + } + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer() { + this._buffer = ''; + return this; + } + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text, addEOL = false) { + this._buffer += text; + return addEOL ? this.addEOL() : this; + } + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL() { + return this.addRaw(os_1.EOL); + } + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code, lang) { + const attrs = Object.assign({}, (lang && { lang })); + const element = this.wrap('pre', this.wrap('code', code), attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items, ordered = false) { + const tag = ordered ? 'ol' : 'ul'; + const listItems = items.map(item => this.wrap('li', item)).join(''); + const element = this.wrap(tag, listItems); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows) { + const tableBody = rows + .map(row => { + const cells = row + .map(cell => { + if (typeof cell === 'string') { + return this.wrap('td', cell); + } + const { header, data, colspan, rowspan } = cell; + const tag = header ? 'th' : 'td'; + const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); + return this.wrap(tag, data, attrs); + }) + .join(''); + return this.wrap('tr', cells); + }) + .join(''); + const element = this.wrap('table', tableBody); + return this.addRaw(element).addEOL(); + } + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label, content) { + const element = this.wrap('details', this.wrap('summary', label) + content); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src, alt, options) { + const { width, height } = options || {}; + const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); + const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text, level) { + const tag = `h${level}`; + const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) + ? tag + : 'h1'; + const element = this.wrap(allowedTag, text); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator() { + const element = this.wrap('hr', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak() { + const element = this.wrap('br', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text, cite) { + const attrs = Object.assign({}, (cite && { cite })); + const element = this.wrap('blockquote', text, attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text, href) { + const element = this.wrap('a', text, { href }); + return this.addRaw(element).addEOL(); + } +} +const _summary = new Summary(); +/** + * @deprecated use `core.summary` + */ +exports.markdownSummary = _summary; +exports.summary = _summary; +//# sourceMappingURL=summary.js.map + +/***/ }), + /***/ 5278: /***/ ((__unused_webpack_module, exports) => { @@ -2489,6 +3194,7 @@ exports.issueCommand = issueCommand; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toCommandProperties = exports.toCommandValue = void 0; /** * Sanitizes an input into a string so it can be passed into issueCommand safely * @param input input to sanitize into a string @@ -2503,6 +3209,26 @@ function toCommandValue(input) { return JSON.stringify(input); } exports.toCommandValue = toCommandValue; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +function toCommandProperties(annotationProperties) { + if (!Object.keys(annotationProperties).length) { + return {}; + } + return { + title: annotationProperties.title, + file: annotationProperties.file, + line: annotationProperties.startLine, + endLine: annotationProperties.endLine, + col: annotationProperties.startColumn, + endColumn: annotationProperties.endColumn + }; +} +exports.toCommandProperties = toCommandProperties; //# sourceMappingURL=utils.js.map /***/ }), @@ -2512,6 +3238,25 @@ exports.toCommandValue = toCommandValue; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -2521,14 +3266,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getExecOutput = exports.exec = void 0; +const string_decoder_1 = __nccwpck_require__(1576); const tr = __importStar(__nccwpck_require__(8159)); /** * Exec a command. @@ -2554,6 +3294,51 @@ function exec(commandLine, args, options) { }); } exports.exec = exec; +/** + * Exec a command and get the output. + * Output will be streamed to the live console. + * Returns promise with the exit code and collected stdout and stderr + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code, stdout, and stderr + */ +function getExecOutput(commandLine, args, options) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + let stdout = ''; + let stderr = ''; + //Using string decoder covers the case where a mult-byte character is split + const stdoutDecoder = new string_decoder_1.StringDecoder('utf8'); + const stderrDecoder = new string_decoder_1.StringDecoder('utf8'); + const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout; + const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; + const stdErrListener = (data) => { + stderr += stderrDecoder.write(data); + if (originalStdErrListener) { + originalStdErrListener(data); + } + }; + const stdOutListener = (data) => { + stdout += stdoutDecoder.write(data); + if (originalStdoutListener) { + originalStdoutListener(data); + } + }; + const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); + const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); + //flush any remaining characters + stdout += stdoutDecoder.end(); + stderr += stderrDecoder.end(); + return { + exitCode, + stdout, + stderr + }; + }); +} +exports.getExecOutput = getExecOutput; //# sourceMappingURL=exec.js.map /***/ }), @@ -2563,6 +3348,25 @@ exports.exec = exec; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -2572,20 +3376,15 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -const os = __importStar(__nccwpck_require__(2087)); -const events = __importStar(__nccwpck_require__(8614)); -const child = __importStar(__nccwpck_require__(3129)); -const path = __importStar(__nccwpck_require__(5622)); +exports.argStringToArray = exports.ToolRunner = void 0; +const os = __importStar(__nccwpck_require__(2037)); +const events = __importStar(__nccwpck_require__(2361)); +const child = __importStar(__nccwpck_require__(2081)); +const path = __importStar(__nccwpck_require__(1017)); const io = __importStar(__nccwpck_require__(7436)); const ioUtil = __importStar(__nccwpck_require__(1962)); +const timers_1 = __nccwpck_require__(9512); /* eslint-disable @typescript-eslint/unbound-method */ const IS_WINDOWS = process.platform === 'win32'; /* @@ -2655,11 +3454,12 @@ class ToolRunner extends events.EventEmitter { s = s.substring(n + os.EOL.length); n = s.indexOf(os.EOL); } - strBuffer = s; + return s; } catch (err) { // streaming lines to console is best effort. Don't fail a build. this._debug(`error processing line. Failed with error ${err}`); + return ''; } } _getSpawnFileName() { @@ -2941,7 +3741,7 @@ class ToolRunner extends events.EventEmitter { // if the tool is only a file name, then resolve it from the PATH // otherwise verify it exists (add extension on Windows if necessary) this.toolPath = yield io.which(this.toolPath, true); - return new Promise((resolve, reject) => { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { this._debug(`exec tool: ${this.toolPath}`); this._debug('arguments:'); for (const arg of this.args) { @@ -2955,9 +3755,12 @@ class ToolRunner extends events.EventEmitter { state.on('debug', (message) => { this._debug(message); }); + if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { + return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); + } const fileName = this._getSpawnFileName(); const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); - const stdbuffer = ''; + let stdbuffer = ''; if (cp.stdout) { cp.stdout.on('data', (data) => { if (this.options.listeners && this.options.listeners.stdout) { @@ -2966,14 +3769,14 @@ class ToolRunner extends events.EventEmitter { if (!optionsNonNull.silent && optionsNonNull.outStream) { optionsNonNull.outStream.write(data); } - this._processLineBuffer(data, stdbuffer, (line) => { + stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { if (this.options.listeners && this.options.listeners.stdline) { this.options.listeners.stdline(line); } }); }); } - const errbuffer = ''; + let errbuffer = ''; if (cp.stderr) { cp.stderr.on('data', (data) => { state.processStderr = true; @@ -2988,7 +3791,7 @@ class ToolRunner extends events.EventEmitter { : optionsNonNull.outStream; s.write(data); } - this._processLineBuffer(data, errbuffer, (line) => { + errbuffer = this._processLineBuffer(data, errbuffer, (line) => { if (this.options.listeners && this.options.listeners.errline) { this.options.listeners.errline(line); } @@ -3035,7 +3838,7 @@ class ToolRunner extends events.EventEmitter { } cp.stdin.end(this.options.input); } - }); + })); }); } } @@ -3121,7 +3924,7 @@ class ExecState extends events.EventEmitter { this._setResult(); } else if (this.processExited) { - this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this); + this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this); } } _debug(message) { @@ -3172,13 +3975,14 @@ class ExecState extends events.EventEmitter { Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Context = void 0; -const fs_1 = __nccwpck_require__(5747); -const os_1 = __nccwpck_require__(2087); +const fs_1 = __nccwpck_require__(7147); +const os_1 = __nccwpck_require__(2037); class Context { /** * Hydrate the context from the environment */ constructor() { + var _a, _b, _c; this.payload = {}; if (process.env.GITHUB_EVENT_PATH) { if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { @@ -3198,6 +4002,9 @@ class Context { this.job = process.env.GITHUB_JOB; this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); + this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; + this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; + this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; } get issue() { const payload = this.payload; @@ -3242,7 +4049,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -3285,13 +4092,13 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; -const httpClient = __importStar(__nccwpck_require__(9925)); +const httpClient = __importStar(__nccwpck_require__(6255)); function getAuthString(token, options) { if (!token && !options.auth) { throw new Error('Parameter token or opts.auth is required'); @@ -3335,7 +4142,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -3376,16 +4183,134 @@ exports.getOctokitOptions = getOctokitOptions; /***/ }), -/***/ 9925: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 5526: +/***/ (function(__unused_webpack_module, exports) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -const http = __nccwpck_require__(8605); -const https = __nccwpck_require__(7211); -const pm = __nccwpck_require__(6443); -let tunnel; +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map + +/***/ }), + +/***/ 6255: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(__nccwpck_require__(3685)); +const https = __importStar(__nccwpck_require__(5687)); +const pm = __importStar(__nccwpck_require__(9835)); +const tunnel = __importStar(__nccwpck_require__(4294)); var HttpCodes; (function (HttpCodes) { HttpCodes[HttpCodes["OK"] = 200] = "OK"; @@ -3430,7 +4355,7 @@ var MediaTypes; * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ function getProxyUrl(serverUrl) { - let proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); return proxyUrl ? proxyUrl.href : ''; } exports.getProxyUrl = getProxyUrl; @@ -3463,20 +4388,22 @@ class HttpClientResponse { this.message = message; } readBody() { - return new Promise(async (resolve, reject) => { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); }); } } exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { - let parsedUrl = new URL(requestUrl); + const parsedUrl = new URL(requestUrl); return parsedUrl.protocol === 'https:'; } exports.isHttps = isHttps; @@ -3519,141 +4446,169 @@ class HttpClient { } } options(requestUrl, additionalHeaders) { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); } get(requestUrl, additionalHeaders) { - return this.request('GET', requestUrl, null, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); } del(requestUrl, additionalHeaders) { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); } post(requestUrl, data, additionalHeaders) { - return this.request('POST', requestUrl, data, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); } patch(requestUrl, data, additionalHeaders) { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); } put(requestUrl, data, additionalHeaders) { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); } head(requestUrl, additionalHeaders) { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); } sendStream(verb, requestUrl, stream, additionalHeaders) { - return this.request(verb, requestUrl, stream, additionalHeaders); + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); } /** * Gets a typed object from an endpoint * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise */ - async getJson(requestUrl, additionalHeaders = {}) { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - let res = await this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); } - async postJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); } - async putJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); } - async patchJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); } /** * Makes a raw http request. * All other methods such as get, post, patch, and request ultimately call this. * Prefer get, del, post and patch */ - async request(verb, requestUrl, data, headers) { - if (this._disposed) { - throw new Error('Client has already been disposed.'); - } - let parsedUrl = new URL(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - while (numTries < maxTries) { - response = await this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (let i = 0; i < this.handlers.length; i++) { - if (this.handlers[i].canHandleAuthentication(response)) { - authenticationHandler = this.handlers[i]; - break; - } - } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); - } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. - return response; - } + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); } - let redirectsRemaining = this._maxRedirects; - while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - let parsedRedirectUrl = new URL(redirectUrl); - if (parsedUrl.protocol == 'https:' && - parsedUrl.protocol != parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); - } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - await response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (let header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; } } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = await this.requestRaw(info, data); - redirectsRemaining--; - } - if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - await response.readBody(); - await this._performExponentialBackoff(numTries); - } - } - return response; + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; + } + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } while (numTries < maxTries); + return response; + }); } /** * Needs to be called if keepAlive is set to true in request options. @@ -3670,14 +4625,22 @@ class HttpClient { * @param data */ requestRaw(info, data) { - return new Promise((resolve, reject) => { - let callbackForResult = function (err, res) { - if (err) { - reject(err); + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } } - resolve(res); - }; - this.requestRawWithCallback(info, data, callbackForResult); + this.requestRawWithCallback(info, data, callbackForResult); + }); }); } /** @@ -3687,21 +4650,24 @@ class HttpClient { * @param onResult */ requestRawWithCallback(info, data, onResult) { - let socket; if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); } let callbackCalled = false; - let handleResult = (err, res) => { + function handleResult(err, res) { if (!callbackCalled) { callbackCalled = true; onResult(err, res); } - }; - let req = info.httpModule.request(info.options, (msg) => { - let res = new HttpClientResponse(msg); - handleResult(null, res); + } + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); }); + let socket; req.on('socket', sock => { socket = sock; }); @@ -3710,12 +4676,12 @@ class HttpClient { if (socket) { socket.end(); } - handleResult(new Error('Request timeout: ' + info.options.path), null); + handleResult(new Error(`Request timeout: ${info.options.path}`)); }); req.on('error', function (err) { // err has statusCode property // res should have headers - handleResult(err, null); + handleResult(err); }); if (data && typeof data === 'string') { req.write(data, 'utf8'); @@ -3736,7 +4702,7 @@ class HttpClient { * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ getAgent(serverUrl) { - let parsedUrl = new URL(serverUrl); + const parsedUrl = new URL(serverUrl); return this._getAgent(parsedUrl); } _prepareRequest(method, requestUrl, headers) { @@ -3760,21 +4726,19 @@ class HttpClient { info.options.agent = this._getAgent(info.parsedUrl); // gives handlers an opportunity to participate if (this.handlers) { - this.handlers.forEach(handler => { + for (const handler of this.handlers) { handler.prepareRequest(info.options); - }); + } } return info; } _mergeHeaders(headers) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); } return lowercaseKeys(headers || {}); } _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); let clientHeader; if (this.requestOptions && this.requestOptions.headers) { clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; @@ -3783,8 +4747,8 @@ class HttpClient { } _getAgent(parsedUrl) { let agent; - let proxyUrl = pm.getProxyUrl(parsedUrl); - let useProxy = proxyUrl && proxyUrl.hostname; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; if (this._keepAlive && useProxy) { agent = this._proxyAgent; } @@ -3792,27 +4756,22 @@ class HttpClient { agent = this._agent; } // if agent is already assigned use that agent. - if (!!agent) { + if (agent) { return agent; } const usingSsl = parsedUrl.protocol === 'https:'; let maxSockets = 100; - if (!!this.requestOptions) { + if (this.requestOptions) { maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; } - if (useProxy) { - // If using proxy, need tunnel - if (!tunnel) { - tunnel = __nccwpck_require__(4294); - } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { const agentOptions = { - maxSockets: maxSockets, + maxSockets, keepAlive: this._keepAlive, - proxy: { - proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`, - host: proxyUrl.hostname, - port: proxyUrl.port - } + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) }; let tunnelAgent; const overHttps = proxyUrl.protocol === 'https:'; @@ -3827,7 +4786,7 @@ class HttpClient { } // if reusing agent across request and tunneling agent isn't assigned create a new agent if (this._keepAlive && !agent) { - const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; + const options = { keepAlive: this._keepAlive, maxSockets }; agent = usingSsl ? new https.Agent(options) : new http.Agent(options); this._agent = agent; } @@ -3846,109 +4805,117 @@ class HttpClient { return agent; } _performExponentialBackoff(retryNumber) { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); } - static dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - let a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; - } - async _processResponse(res, options) { - return new Promise(async (resolve, reject) => { - const statusCode = res.message.statusCode; - const response = { - statusCode: statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode == HttpCodes.NotFound) { - resolve(response); - } - let obj; - let contents; - // get the result from the body - try { - contents = await res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; } else { - obj = JSON.parse(contents); + msg = `Failed request: (${statusCode})`; } - response.result = obj; - } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; - } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); } else { - msg = 'Failed request: (' + statusCode + ')'; + resolve(response); } - let err = new HttpClientError(msg, statusCode); - err.result = response.result; - reject(err); - } - else { - resolve(response); - } + })); }); } } exports.HttpClient = HttpClient; - +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 6443: +/***/ 9835: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkBypass = exports.getProxyUrl = void 0; function getProxyUrl(reqUrl) { - let usingSsl = reqUrl.protocol === 'https:'; - let proxyUrl; + const usingSsl = reqUrl.protocol === 'https:'; if (checkBypass(reqUrl)) { - return proxyUrl; + return undefined; } - let proxyVar; - if (usingSsl) { - proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + return new URL(proxyVar); } else { - proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; + return undefined; } - if (proxyVar) { - proxyUrl = new URL(proxyVar); - } - return proxyUrl; } exports.getProxyUrl = getProxyUrl; function checkBypass(reqUrl) { if (!reqUrl.hostname) { return false; } - let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; if (!noProxy) { return false; } @@ -3964,12 +4931,12 @@ function checkBypass(reqUrl) { reqPort = 443; } // Format the request hostname and hostname with port - let upperReqHosts = [reqUrl.hostname.toUpperCase()]; + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; if (typeof reqPort === 'number') { upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); } // Compare request host against noproxy - for (let upperNoProxyItem of noProxy + for (const upperNoProxyItem of noProxy .split(',') .map(x => x.trim().toUpperCase()) .filter(x => x)) { @@ -3980,7 +4947,7 @@ function checkBypass(reqUrl) { return false; } exports.checkBypass = checkBypass; - +//# sourceMappingURL=proxy.js.map /***/ }), @@ -3989,6 +4956,25 @@ exports.checkBypass = checkBypass; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -4000,9 +4986,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; var _a; Object.defineProperty(exports, "__esModule", ({ value: true })); -const assert_1 = __nccwpck_require__(2357); -const fs = __nccwpck_require__(5747); -const path = __nccwpck_require__(5622); +exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rename = exports.readlink = exports.readdir = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0; +const fs = __importStar(__nccwpck_require__(7147)); +const path = __importStar(__nccwpck_require__(1017)); _a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; exports.IS_WINDOWS = process.platform === 'win32'; function exists(fsPath) { @@ -4043,49 +5029,6 @@ function isRooted(p) { return p.startsWith('/'); } exports.isRooted = isRooted; -/** - * Recursively create a directory at `fsPath`. - * - * This implementation is optimistic, meaning it attempts to create the full - * path first, and backs up the path stack from there. - * - * @param fsPath The path to create - * @param maxDepth The maximum recursion depth - * @param depth The current recursion depth - */ -function mkdirP(fsPath, maxDepth = 1000, depth = 1) { - return __awaiter(this, void 0, void 0, function* () { - assert_1.ok(fsPath, 'a path argument must be provided'); - fsPath = path.resolve(fsPath); - if (depth >= maxDepth) - return exports.mkdir(fsPath); - try { - yield exports.mkdir(fsPath); - return; - } - catch (err) { - switch (err.code) { - case 'ENOENT': { - yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1); - yield exports.mkdir(fsPath); - return; - } - default: { - let stats; - try { - stats = yield exports.stat(fsPath); - } - catch (err2) { - throw err; - } - if (!stats.isDirectory()) - throw err; - } - } - } - }); -} -exports.mkdirP = mkdirP; /** * Best effort attempt to determine whether a file exists and is executable. * @param filePath file path to check @@ -4182,6 +5125,12 @@ function isUnixExecutable(stats) { ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || ((stats.mode & 64) > 0 && stats.uid === process.getuid())); } +// Get the path of cmd.exe in windows +function getCmdPath() { + var _a; + return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`; +} +exports.getCmdPath = getCmdPath; //# sourceMappingURL=io-util.js.map /***/ }), @@ -4191,6 +5140,25 @@ function isUnixExecutable(stats) { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -4201,11 +5169,14 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -const childProcess = __nccwpck_require__(3129); -const path = __nccwpck_require__(5622); -const util_1 = __nccwpck_require__(1669); -const ioUtil = __nccwpck_require__(1962); +exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0; +const assert_1 = __nccwpck_require__(9491); +const childProcess = __importStar(__nccwpck_require__(2081)); +const path = __importStar(__nccwpck_require__(1017)); +const util_1 = __nccwpck_require__(3837); +const ioUtil = __importStar(__nccwpck_require__(1962)); const exec = util_1.promisify(childProcess.exec); +const execFile = util_1.promisify(childProcess.execFile); /** * Copies a file or folder. * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js @@ -4216,14 +5187,14 @@ const exec = util_1.promisify(childProcess.exec); */ function cp(source, dest, options = {}) { return __awaiter(this, void 0, void 0, function* () { - const { force, recursive } = readCopyOptions(options); + const { force, recursive, copySourceDirectory } = readCopyOptions(options); const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; // Dest is an existing file, but not forcing if (destStat && destStat.isFile() && !force) { return; } // If dest is an existing directory, should copy inside. - const newDest = destStat && destStat.isDirectory() + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path.join(dest, path.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { @@ -4288,12 +5259,22 @@ function rmRF(inputPath) { if (ioUtil.IS_WINDOWS) { // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del. + // Check for invalid characters + // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file + if (/[*"<>|]/.test(inputPath)) { + throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows'); + } try { + const cmdPath = ioUtil.getCmdPath(); if (yield ioUtil.isDirectory(inputPath, true)) { - yield exec(`rd /s /q "${inputPath}"`); + yield exec(`${cmdPath} /s /c "rd /s /q "%inputPath%""`, { + env: { inputPath } + }); } else { - yield exec(`del /f /a "${inputPath}"`); + yield exec(`${cmdPath} /s /c "del /f /a "%inputPath%""`, { + env: { inputPath } + }); } } catch (err) { @@ -4326,7 +5307,7 @@ function rmRF(inputPath) { return; } if (isDir) { - yield exec(`rm -rf "${inputPath}"`); + yield execFile(`rm`, [`-rf`, `${inputPath}`]); } else { yield ioUtil.unlink(inputPath); @@ -4344,7 +5325,8 @@ exports.rmRF = rmRF; */ function mkdirP(fsPath) { return __awaiter(this, void 0, void 0, function* () { - yield ioUtil.mkdirP(fsPath); + assert_1.ok(fsPath, 'a path argument must be provided'); + yield ioUtil.mkdir(fsPath, { recursive: true }); }); } exports.mkdirP = mkdirP; @@ -4372,62 +5354,80 @@ function which(tool, check) { throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); } } + return result; } - try { - // build the list of extensions to try - const extensions = []; - if (ioUtil.IS_WINDOWS && process.env.PATHEXT) { - for (const extension of process.env.PATHEXT.split(path.delimiter)) { - if (extension) { - extensions.push(extension); - } - } - } - // if it's rooted, return it if exists. otherwise return empty. - if (ioUtil.isRooted(tool)) { - const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); - if (filePath) { - return filePath; - } - return ''; - } - // if any path separators, return empty - if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) { - return ''; - } - // build the list of directories - // - // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, - // it feels like we should not do this. Checking the current directory seems like more of a use - // case of a shell, and the which() function exposed by the toolkit should strive for consistency - // across platforms. - const directories = []; - if (process.env.PATH) { - for (const p of process.env.PATH.split(path.delimiter)) { - if (p) { - directories.push(p); - } - } - } - // return the first match - for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions); - if (filePath) { - return filePath; - } - } - return ''; - } - catch (err) { - throw new Error(`which failed with message ${err.message}`); + const matches = yield findInPath(tool); + if (matches && matches.length > 0) { + return matches[0]; } + return ''; }); } exports.which = which; +/** + * Returns a list of all occurrences of the given tool on the system path. + * + * @returns Promise the paths of the tool + */ +function findInPath(tool) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // build the list of extensions to try + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) { + for (const extension of process.env['PATHEXT'].split(path.delimiter)) { + if (extension) { + extensions.push(extension); + } + } + } + // if it's rooted, return it if exists. otherwise return empty. + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return [filePath]; + } + return []; + } + // if any path separators, return empty + if (tool.includes(path.sep)) { + return []; + } + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the toolkit should strive for consistency + // across platforms. + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path.delimiter)) { + if (p) { + directories.push(p); + } + } + } + // find all matches + const matches = []; + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions); + if (filePath) { + matches.push(filePath); + } + } + return matches; + }); +} +exports.findInPath = findInPath; function readCopyOptions(options) { const force = options.force == null ? true : options.force; const recursive = Boolean(options.recursive); - return { force, recursive }; + const copySourceDirectory = options.copySourceDirectory == null + ? true + : Boolean(options.copySourceDirectory); + return { force, recursive, copySourceDirectory }; } function cpDirRecursive(sourceDir, destDir, currentDepth, force) { return __awaiter(this, void 0, void 0, function* () { @@ -4490,7 +5490,7 @@ function copyFile(srcFile, destFile, force) { Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; -const fs = __nccwpck_require__(5747); +const fs = __nccwpck_require__(7147); exports.FILE_SYSTEM_ADAPTER = { lstat: fs.lstat, stat: fs.stat, @@ -4518,8 +5518,11 @@ exports.createFileSystemAdapter = createFileSystemAdapter; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0; const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.'); -const MAJOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[0], 10); -const MINOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[1], 10); +if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) { + throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`); +} +const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10); +const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10); const SUPPORTED_MAJOR_VERSION = 10; const SUPPORTED_MINOR_VERSION = 10; const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION; @@ -4545,7 +5548,8 @@ const settings_1 = __nccwpck_require__(8662); exports.Settings = settings_1.default; function scandir(path, optionsOrSettingsOrCallback, callback) { if (typeof optionsOrSettingsOrCallback === 'function') { - return async.read(path, getSettings(), optionsOrSettingsOrCallback); + async.read(path, getSettings(), optionsOrSettingsOrCallback); + return; } async.read(path, getSettings(optionsOrSettingsOrCallback), callback); } @@ -4579,15 +5583,17 @@ const utils = __nccwpck_require__(6297); const common = __nccwpck_require__(3847); function read(directory, settings, callback) { if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { - return readdirWithFileTypes(directory, settings, callback); + readdirWithFileTypes(directory, settings, callback); + return; } - return readdir(directory, settings, callback); + readdir(directory, settings, callback); } exports.read = read; function readdirWithFileTypes(directory, settings, callback) { settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => { if (readdirError !== null) { - return callFailureCallback(callback, readdirError); + callFailureCallback(callback, readdirError); + return; } const entries = dirents.map((dirent) => ({ dirent, @@ -4595,12 +5601,14 @@ function readdirWithFileTypes(directory, settings, callback) { path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) })); if (!settings.followSymbolicLinks) { - return callSuccessCallback(callback, entries); + callSuccessCallback(callback, entries); + return; } const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings)); rpl(tasks, (rplError, rplEntries) => { if (rplError !== null) { - return callFailureCallback(callback, rplError); + callFailureCallback(callback, rplError); + return; } callSuccessCallback(callback, rplEntries); }); @@ -4610,46 +5618,54 @@ exports.readdirWithFileTypes = readdirWithFileTypes; function makeRplTaskEntry(entry, settings) { return (done) => { if (!entry.dirent.isSymbolicLink()) { - return done(null, entry); + done(null, entry); + return; } settings.fs.stat(entry.path, (statError, stats) => { if (statError !== null) { if (settings.throwErrorOnBrokenSymbolicLink) { - return done(statError); + done(statError); + return; } - return done(null, entry); + done(null, entry); + return; } entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); - return done(null, entry); + done(null, entry); }); }; } function readdir(directory, settings, callback) { settings.fs.readdir(directory, (readdirError, names) => { if (readdirError !== null) { - return callFailureCallback(callback, readdirError); + callFailureCallback(callback, readdirError); + return; } - const filepaths = names.map((name) => common.joinPathSegments(directory, name, settings.pathSegmentSeparator)); - const tasks = filepaths.map((filepath) => { - return (done) => fsStat.stat(filepath, settings.fsStatSettings, done); + const tasks = names.map((name) => { + const path = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); + return (done) => { + fsStat.stat(path, settings.fsStatSettings, (error, stats) => { + if (error !== null) { + done(error); + return; + } + const entry = { + name, + path, + dirent: utils.fs.createDirentFromStats(name, stats) + }; + if (settings.stats) { + entry.stats = stats; + } + done(null, entry); + }); + }; }); - rpl(tasks, (rplError, results) => { + rpl(tasks, (rplError, entries) => { if (rplError !== null) { - return callFailureCallback(callback, rplError); + callFailureCallback(callback, rplError); + return; } - const entries = []; - names.forEach((name, index) => { - const stats = results[index]; - const entry = { - name, - path: filepaths[index], - dirent: utils.fs.createDirentFromStats(name, stats) - }; - if (settings.stats) { - entry.stats = stats; - } - entries.push(entry); - }); callSuccessCallback(callback, entries); }); }); @@ -4754,7 +5770,7 @@ exports.readdir = readdir; "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const path = __nccwpck_require__(5622); +const path = __nccwpck_require__(1017); const fsStat = __nccwpck_require__(109); const fs = __nccwpck_require__(3803); class Settings { @@ -4775,7 +5791,7 @@ class Settings { return option !== null && option !== void 0 ? option : value; } } -exports.default = Settings; +exports["default"] = Settings; /***/ }), @@ -4827,7 +5843,7 @@ exports.fs = fs; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; -const fs = __nccwpck_require__(5747); +const fs = __nccwpck_require__(7147); exports.FILE_SYSTEM_ADAPTER = { lstat: fs.lstat, stat: fs.stat, @@ -4858,7 +5874,8 @@ const settings_1 = __nccwpck_require__(2410); exports.Settings = settings_1.default; function stat(path, optionsOrSettingsOrCallback, callback) { if (typeof optionsOrSettingsOrCallback === 'function') { - return async.read(path, getSettings(), optionsOrSettingsOrCallback); + async.read(path, getSettings(), optionsOrSettingsOrCallback); + return; } async.read(path, getSettings(optionsOrSettingsOrCallback), callback); } @@ -4888,17 +5905,21 @@ exports.read = void 0; function read(path, settings, callback) { settings.fs.lstat(path, (lstatError, lstat) => { if (lstatError !== null) { - return callFailureCallback(callback, lstatError); + callFailureCallback(callback, lstatError); + return; } if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { - return callSuccessCallback(callback, lstat); + callSuccessCallback(callback, lstat); + return; } settings.fs.stat(path, (statError, stat) => { if (statError !== null) { if (settings.throwErrorOnBrokenSymbolicLink) { - return callFailureCallback(callback, statError); + callFailureCallback(callback, statError); + return; } - return callSuccessCallback(callback, lstat); + callSuccessCallback(callback, lstat); + return; } if (settings.markSymbolicLink) { stat.isSymbolicLink = () => true; @@ -4968,7 +5989,7 @@ class Settings { return option !== null && option !== void 0 ? option : value; } } -exports.default = Settings; +exports["default"] = Settings; /***/ }), @@ -4987,7 +6008,8 @@ const settings_1 = __nccwpck_require__(141); exports.Settings = settings_1.default; function walk(directory, optionsOrSettingsOrCallback, callback) { if (typeof optionsOrSettingsOrCallback === 'function') { - return new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback); + new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback); + return; } new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback); } @@ -5026,22 +6048,22 @@ class AsyncProvider { this._root = _root; this._settings = _settings; this._reader = new async_1.default(this._root, this._settings); - this._storage = new Set(); + this._storage = []; } read(callback) { this._reader.onError((error) => { callFailureCallback(callback, error); }); this._reader.onEntry((entry) => { - this._storage.add(entry); + this._storage.push(entry); }); this._reader.onEnd(() => { - callSuccessCallback(callback, [...this._storage]); + callSuccessCallback(callback, this._storage); }); this._reader.read(); } } -exports.default = AsyncProvider; +exports["default"] = AsyncProvider; function callFailureCallback(callback, error) { callback(error); } @@ -5058,7 +6080,7 @@ function callSuccessCallback(callback, entries) { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const stream_1 = __nccwpck_require__(2413); +const stream_1 = __nccwpck_require__(2781); const async_1 = __nccwpck_require__(5732); class StreamProvider { constructor(_root, _settings) { @@ -5089,7 +6111,7 @@ class StreamProvider { return this._stream; } } -exports.default = StreamProvider; +exports["default"] = StreamProvider; /***/ }), @@ -5111,7 +6133,7 @@ class SyncProvider { return this._reader.read(); } } -exports.default = SyncProvider; +exports["default"] = SyncProvider; /***/ }), @@ -5122,7 +6144,7 @@ exports.default = SyncProvider; "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const events_1 = __nccwpck_require__(8614); +const events_1 = __nccwpck_require__(2361); const fsScandir = __nccwpck_require__(5667); const fastq = __nccwpck_require__(7340); const common = __nccwpck_require__(7988); @@ -5180,7 +6202,8 @@ class AsyncReader extends reader_1.default { _worker(item, done) { this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => { if (error !== null) { - return done(error, undefined); + done(error, undefined); + return; } for (const entry of entries) { this._handleEntry(entry, item.base); @@ -5208,14 +6231,14 @@ class AsyncReader extends reader_1.default { this._emitEntry(entry); } if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { - this._pushToQueue(fullpath, entry.path); + this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); } } _emitEntry(entry) { this._emitter.emit('entry', entry); } } -exports.default = AsyncReader; +exports["default"] = AsyncReader; /***/ }), @@ -5273,7 +6296,7 @@ class Reader { this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator); } } -exports.default = Reader; +exports["default"] = Reader; /***/ }), @@ -5291,13 +6314,13 @@ class SyncReader extends reader_1.default { constructor() { super(...arguments); this._scandir = fsScandir.scandirSync; - this._storage = new Set(); + this._storage = []; this._queue = new Set(); } read() { this._pushToQueue(this._root, this._settings.basePath); this._handleQueue(); - return [...this._storage]; + return this._storage; } _pushToQueue(directory, base) { this._queue.add({ directory, base }); @@ -5333,14 +6356,14 @@ class SyncReader extends reader_1.default { this._pushToStorage(entry); } if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { - this._pushToQueue(fullpath, entry.path); + this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); } } _pushToStorage(entry) { - this._storage.add(entry); + this._storage.push(entry); } } -exports.default = SyncReader; +exports["default"] = SyncReader; /***/ }), @@ -5351,13 +6374,13 @@ exports.default = SyncReader; "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const path = __nccwpck_require__(5622); +const path = __nccwpck_require__(1017); const fsScandir = __nccwpck_require__(5667); class Settings { constructor(_options = {}) { this._options = _options; this.basePath = this._getValue(this._options.basePath, undefined); - this.concurrency = this._getValue(this._options.concurrency, Infinity); + this.concurrency = this._getValue(this._options.concurrency, Number.POSITIVE_INFINITY); this.deepFilter = this._getValue(this._options.deepFilter, null); this.entryFilter = this._getValue(this._options.entryFilter, null); this.errorFilter = this._getValue(this._options.errorFilter, null); @@ -5374,7 +6397,7 @@ class Settings { return option !== null && option !== void 0 ? option : value; } } -exports.default = Settings; +exports["default"] = Settings; /***/ }), @@ -5387,8 +6410,14 @@ exports.default = Settings; Object.defineProperty(exports, "__esModule", ({ value: true })); +const REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +const REGEX_IS_INSTALLATION = /^ghs_/; +const REGEX_IS_USER_TO_SERVER = /^ghu_/; async function auth(token) { - const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; return { type: "token", token: token, @@ -5486,8 +6515,9 @@ function _objectWithoutProperties(source, excluded) { return target; } -const VERSION = "3.2.1"; +const VERSION = "3.6.0"; +const _excluded = ["authStrategy"]; class Octokit { constructor(options = {}) { const hook = new beforeAfterHook.Collection(); @@ -5495,6 +6525,7 @@ class Octokit { baseUrl: request.request.endpoint.DEFAULTS.baseUrl, headers: {}, request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type hook: hook.bind(null, "request") }), mediaType: { @@ -5548,7 +6579,7 @@ class Octokit { const { authStrategy } = options, - otherOptions = _objectWithoutProperties(options, ["authStrategy"]); + otherOptions = _objectWithoutProperties(options, _excluded); const auth = authStrategy(Object.assign({ request: this.request, @@ -5626,7 +6657,7 @@ exports.Octokit = Octokit; Object.defineProperty(exports, "__esModule", ({ value: true })); -var isPlainObject = __nccwpck_require__(558); +var isPlainObject = __nccwpck_require__(3287); var universalUserAgent = __nccwpck_require__(5030); function lowercaseKeys(object) { @@ -5990,7 +7021,7 @@ function withDefaults(oldDefaults, newDefaults) { }); } -const VERSION = "6.0.9"; +const VERSION = "6.0.12"; const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. // So we use RequestParameters and add method as additional required property. @@ -6014,52 +7045,6 @@ exports.endpoint = endpoint; //# sourceMappingURL=index.js.map -/***/ }), - -/***/ 558: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -function isObject(o) { - return Object.prototype.toString.call(o) === '[object Object]'; -} - -function isPlainObject(o) { - var ctor,prot; - - if (isObject(o) === false) return false; - - // If has modified constructor - ctor = o.constructor; - if (ctor === undefined) return true; - - // If has modified prototype - prot = ctor.prototype; - if (isObject(prot) === false) return false; - - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; - } - - // Most likely a plain Object - return true; -} - -exports.isPlainObject = isPlainObject; - - /***/ }), /***/ 8467: @@ -6073,18 +7058,22 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); var request = __nccwpck_require__(6234); var universalUserAgent = __nccwpck_require__(5030); -const VERSION = "4.5.7"; +const VERSION = "4.8.0"; -class GraphqlError extends Error { - constructor(request, response) { - const message = response.data.errors[0].message; - super(message); - Object.assign(this, response.data); - Object.assign(this, { - headers: response.headers - }); - this.name = "GraphqlError"; - this.request = request; // Maintains proper stack trace (only available on V8) +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n"); +} + +class GraphqlResponseError extends Error { + constructor(request, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties. + + this.errors = response.errors; + this.data = response.data; // Maintains proper stack trace (only available on V8) /* istanbul ignore next */ @@ -6096,10 +7085,18 @@ class GraphqlError extends Error { } const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; +const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; function graphql(request, query, options) { - if (typeof query === "string" && options && "query" in options) { - return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); + } + + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue; + return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`)); + } } const parsedOptions = typeof query === "string" ? Object.assign({ @@ -6134,10 +7131,7 @@ function graphql(request, query, options) { headers[key] = response.headers[key]; } - throw new GraphqlError(requestOptions, { - headers, - data: response.data - }); + throw new GraphqlResponseError(requestOptions, headers, response.data); } return response.data.data; @@ -6171,6 +7165,7 @@ function withCustomRequest(customRequest) { }); } +exports.GraphqlResponseError = GraphqlResponseError; exports.graphql = graphql$1; exports.withCustomRequest = withCustomRequest; //# sourceMappingURL=index.js.map @@ -6186,7 +7181,48 @@ exports.withCustomRequest = withCustomRequest; Object.defineProperty(exports, "__esModule", ({ value: true })); -const VERSION = "2.6.0"; +const VERSION = "2.21.3"; + +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); + + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); + enumerableOnly && (symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + })), keys.push.apply(keys, symbols); + } + + return keys; +} + +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = null != arguments[i] ? arguments[i] : {}; + i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { + _defineProperty(target, key, source[key]); + }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); + }); + } + + return target; +} + +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } + + return obj; +} /** * Some “list” response that can be paginated have a different response structure @@ -6205,6 +7241,13 @@ const VERSION = "2.6.0"; * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref */ function normalizePaginatedListResponse(response) { + // endpoints can respond with 204 if repository is empty + if (!response.data) { + return _objectSpread2(_objectSpread2({}, response), {}, { + data: [] + }); + } + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way // to retrieve the same information. @@ -6243,19 +7286,32 @@ function iterator(octokit, route, parameters) { if (!url) return { done: true }; - const response = await requestMethod({ - method, - url, - headers - }); - const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: normalizedResponse - }; + try { + const response = await requestMethod({ + method, + url, + headers + }); + const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: + // '; rel="next", ; rel="last"' + // sets `url` to undefined if "next" URL is not present or `link` header is not set + + url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; + return { + value: normalizedResponse + }; + } catch (error) { + if (error.status !== 409) throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; + } } }) @@ -6297,6 +7353,16 @@ const composePaginateRest = Object.assign(paginate, { iterator }); +const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"]; + +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; + } +} + /** * @param octokit Octokit instance * @param options Options passed to Octokit constructor @@ -6312,7 +7378,9 @@ function paginateRest(octokit) { paginateRest.VERSION = VERSION; exports.composePaginateRest = composePaginateRest; +exports.isPaginatingEndpoint = isPaginatingEndpoint; exports.paginateRest = paginateRest; +exports.paginatingEndpoints = paginatingEndpoints; //# sourceMappingURL=index.js.map @@ -6326,10 +7394,67 @@ exports.paginateRest = paginateRest; Object.defineProperty(exports, "__esModule", ({ value: true })); +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); + + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); + + if (enumerableOnly) { + symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + }); + } + + keys.push.apply(keys, symbols); + } + + return keys; +} + +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] != null ? arguments[i] : {}; + + if (i % 2) { + ownKeys(Object(source), true).forEach(function (key) { + _defineProperty(target, key, source[key]); + }); + } else if (Object.getOwnPropertyDescriptors) { + Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); + } else { + ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); + }); + } + } + + return target; +} + +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } + + return obj; +} + const Endpoints = { actions: { + addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"], + addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"], cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], + createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], @@ -6337,44 +7462,96 @@ const Endpoints = { createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], + deleteActionsCacheById: ["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"], + deleteActionsCacheByKey: ["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"], deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"], + disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"], downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], + downloadWorkflowRunAttemptLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"], downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"], + enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: ["GET /orgs/{org}/actions/cache/usage-by-repository"], + getActionsCacheUsageForEnterprise: ["GET /enterprises/{enterprise}/actions/cache/usage"], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"], + getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"], getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"], + getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + getGithubActionsDefaultWorkflowPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/workflow"], + getGithubActionsDefaultWorkflowPermissionsOrganization: ["GET /orgs/{org}/actions/permissions/workflow"], + getGithubActionsDefaultWorkflowPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/workflow"], + getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"], + getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"], getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], + getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, { + renamed: ["actions", "getGithubActionsPermissionsRepository"] + }], getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"], getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: ["GET /repos/{owner}/{repo}/actions/permissions/access"], getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"], getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"], listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], + listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"], + listLabelsForSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}/labels"], + listLabelsForSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], + listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"], listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: ["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"], reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + reRunWorkflowFailedJobs: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], + removeCustomLabelFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"], + removeCustomLabelFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"], removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"] + reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], + setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"], + setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"], + setCustomLabelsForSelfHostedRunnerForOrg: ["PUT /orgs/{org}/actions/runners/{runner_id}/labels"], + setCustomLabelsForSelfHostedRunnerForRepo: ["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"], + setGithubActionsDefaultWorkflowPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/workflow"], + setGithubActionsDefaultWorkflowPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions/workflow"], + setGithubActionsDefaultWorkflowPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/workflow"], + setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"], + setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"], + setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"], + setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"], + setWorkflowAccessToRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/access"] }, activity: { checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], @@ -6410,13 +7587,11 @@ const Endpoints = { unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] }, apps: { - addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], - checkToken: ["POST /applications/{client_id}/token"], - createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", { - mediaType: { - previews: ["corsair"] - } + addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", {}, { + renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] }], + addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], + checkToken: ["POST /applications/{client_id}/token"], createFromManifest: ["POST /app-manifests/{code}/conversions"], createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], deleteAuthorization: ["DELETE /applications/{client_id}/grant"], @@ -6430,6 +7605,8 @@ const Endpoints = { getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"], @@ -6440,108 +7617,143 @@ const Endpoints = { listReposAccessibleToInstallation: ["GET /installation/repositories"], listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], - removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"], + listWebhookDeliveries: ["GET /app/hook/deliveries"], + redeliverWebhookDelivery: ["POST /app/hook/deliveries/{delivery_id}/attempts"], + removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", {}, { + renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] + }], + removeRepoFromInstallationForAuthenticatedUser: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"], resetToken: ["PATCH /applications/{client_id}/token"], revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"] + unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"], + updateWebhookConfigForApp: ["PATCH /app/hook/config"] }, billing: { getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], + getGithubAdvancedSecurityBillingGhe: ["GET /enterprises/{enterprise}/settings/billing/advanced-security"], + getGithubAdvancedSecurityBillingOrg: ["GET /orgs/{org}/settings/billing/advanced-security"], getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"] }, checks: { - create: ["POST /repos/{owner}/{repo}/check-runs", { - mediaType: { - previews: ["antiope"] - } - }], - createSuite: ["POST /repos/{owner}/{repo}/check-suites", { - mediaType: { - previews: ["antiope"] - } - }], - get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}", { - mediaType: { - previews: ["antiope"] - } - }], - getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}", { - mediaType: { - previews: ["antiope"] - } - }], - listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", { - mediaType: { - previews: ["antiope"] - } - }], - listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs", { - mediaType: { - previews: ["antiope"] - } - }], - listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", { - mediaType: { - previews: ["antiope"] - } - }], - listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites", { - mediaType: { - previews: ["antiope"] - } - }], - rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", { - mediaType: { - previews: ["antiope"] - } - }], - setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences", { - mediaType: { - previews: ["antiope"] - } - }], - update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}", { - mediaType: { - previews: ["antiope"] - } - }] + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestRun: ["POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"], + rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"], + setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] }, codeScanning: { + deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"], getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, { renamedParameters: { alert_id: "alert_number" } }], + getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"], + listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, { + renamed: ["codeScanning", "listAlertInstances"] + }], listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"], uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] }, codesOfConduct: { - getAllCodesOfConduct: ["GET /codes_of_conduct", { - mediaType: { - previews: ["scarlet-witch"] + getAllCodesOfConduct: ["GET /codes_of_conduct"], + getConductCode: ["GET /codes_of_conduct/{key}"] + }, + codespaces: { + addRepositoryForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], + codespaceMachinesForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/machines"], + createForAuthenticatedUser: ["POST /user/codespaces"], + createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], + createOrUpdateSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}"], + createWithPrForAuthenticatedUser: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"], + createWithRepoForAuthenticatedUser: ["POST /repos/{owner}/{repo}/codespaces"], + deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], + deleteFromOrganization: ["DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"], + deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], + deleteSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}"], + exportForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/exports"], + getExportDetailsForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/exports/{export_id}"], + getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], + getPublicKeyForAuthenticatedUser: ["GET /user/codespaces/secrets/public-key"], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/codespaces/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], + getSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}"], + listDevcontainersInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/devcontainers"], + listForAuthenticatedUser: ["GET /user/codespaces"], + listInOrganization: ["GET /orgs/{org}/codespaces", {}, { + renamedParameters: { + org_id: "org" } }], - getConductCode: ["GET /codes_of_conduct/{key}", { - mediaType: { - previews: ["scarlet-witch"] - } - }], - getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", { - mediaType: { - previews: ["scarlet-witch"] - } - }] + listInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], + listRepositoriesForSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}/repositories"], + listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], + removeRepositoryForSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"], + repoMachinesForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/machines"], + setRepositoriesForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories"], + startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], + stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], + stopInOrganization: ["POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"], + updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] + }, + dependabot: { + addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}"], + createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], + deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], + getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/dependabot/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"], + listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], + listSelectedReposForOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"], + removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"], + setSelectedReposForOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"] + }, + dependencyGraph: { + createRepositorySnapshot: ["POST /repos/{owner}/{repo}/dependency-graph/snapshots"], + diffRange: ["GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"] }, emojis: { get: ["GET /emojis"] }, + enterpriseAdmin: { + addCustomLabelsToSelfHostedRunnerForEnterprise: ["POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], + disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], + enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], + getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"], + getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"], + getServerStatistics: ["GET /enterprise-installation/{enterprise_or_org}/server-statistics"], + listLabelsForSelfHostedRunnerForEnterprise: ["GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], + listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"], + removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], + removeCustomLabelFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}"], + setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"], + setCustomLabelsForSelfHostedRunnerForEnterprise: ["PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels"], + setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"], + setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"] + }, gists: { checkIsStarred: ["GET /gists/{gist_id}/star"], create: ["POST /gists"], @@ -6584,35 +7796,23 @@ const Endpoints = { getTemplate: ["GET /gitignore/templates/{name}"] }, interactions: { - getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, { + renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] }], - getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"], + removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, { + renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] }], - removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, { + renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] }] }, issues: { @@ -6637,11 +7837,7 @@ const Endpoints = { listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", { - mediaType: { - previews: ["mockingbird"] - } - }], + listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"], listForAuthenticatedUser: ["GET /user/issues"], listForOrg: ["GET /orgs/{org}/issues"], listForRepo: ["GET /repos/{owner}/{repo}/issues"], @@ -6674,82 +7870,41 @@ const Endpoints = { }] }, meta: { - get: ["GET /meta"] + get: ["GET /meta"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"] }, migrations: { cancelImport: ["DELETE /repos/{owner}/{repo}/import"], - deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], + deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive"], + deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive"], + downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive"], + getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive"], getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], getImportStatus: ["GET /repos/{owner}/{repo}/import"], getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], - getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", { - mediaType: { - previews: ["wyandotte"] - } - }], - getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", { - mediaType: { - previews: ["wyandotte"] - } - }], - listForAuthenticatedUser: ["GET /user/migrations", { - mediaType: { - previews: ["wyandotte"] - } - }], - listForOrg: ["GET /orgs/{org}/migrations", { - mediaType: { - previews: ["wyandotte"] - } - }], - listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", { - mediaType: { - previews: ["wyandotte"] - } - }], - listReposForUser: ["GET /user/migrations/{migration_id}/repositories", { - mediaType: { - previews: ["wyandotte"] - } + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], + listForAuthenticatedUser: ["GET /user/migrations"], + listForOrg: ["GET /orgs/{org}/migrations"], + listReposForAuthenticatedUser: ["GET /user/migrations/{migration_id}/repositories"], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], + listReposForUser: ["GET /user/migrations/{migration_id}/repositories", {}, { + renamed: ["migrations", "listReposForAuthenticatedUser"] }], mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], startForAuthenticatedUser: ["POST /user/migrations"], startForOrg: ["POST /orgs/{org}/migrations"], startImport: ["PUT /repos/{owner}/{repo}/import"], - unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", { - mediaType: { - previews: ["wyandotte"] - } - }], - unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", { - mediaType: { - previews: ["wyandotte"] - } - }], + unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"], + unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"], updateImport: ["PATCH /repos/{owner}/{repo}/import"] }, orgs: { blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], @@ -6761,9 +7916,13 @@ const Endpoints = { getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + getWebhookDelivery: ["GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"], list: ["GET /organizations"], listAppInstallations: ["GET /orgs/{org}/installations"], listBlockedUsers: ["GET /orgs/{org}/blocks"], + listCustomRoles: ["GET /organizations/{organization_id}/custom_roles"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], listForAuthenticatedUser: ["GET /user/orgs"], listForUser: ["GET /users/{username}/orgs"], listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], @@ -6772,8 +7931,10 @@ const Endpoints = { listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], listPendingInvitations: ["GET /orgs/{org}/invitations"], listPublicMembers: ["GET /orgs/{org}/public_members"], + listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], listWebhooks: ["GET /orgs/{org}/hooks"], pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: ["POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], removeMember: ["DELETE /orgs/{org}/members/{username}"], removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], @@ -6783,134 +7944,67 @@ const Endpoints = { unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], update: ["PATCH /orgs/{org}"], updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], - updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"] + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] + }, + packages: { + deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"], + deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"], + deletePackageForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}"], + deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], + deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], + deletePackageVersionForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], + getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, { + renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] + }], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, { + renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"] + }], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"], + getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"], + getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"], + getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"], + getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"], + getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"], + getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], + getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], + getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], + listPackagesForAuthenticatedUser: ["GET /user/packages"], + listPackagesForOrganization: ["GET /orgs/{org}/packages"], + listPackagesForUser: ["GET /users/{username}/packages"], + restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"], + restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"], + restorePackageForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"], + restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], + restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], + restorePackageVersionForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"] }, projects: { - addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", { - mediaType: { - previews: ["inertia"] - } - }], - createCard: ["POST /projects/columns/{column_id}/cards", { - mediaType: { - previews: ["inertia"] - } - }], - createColumn: ["POST /projects/{project_id}/columns", { - mediaType: { - previews: ["inertia"] - } - }], - createForAuthenticatedUser: ["POST /user/projects", { - mediaType: { - previews: ["inertia"] - } - }], - createForOrg: ["POST /orgs/{org}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - createForRepo: ["POST /repos/{owner}/{repo}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - delete: ["DELETE /projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - deleteCard: ["DELETE /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] - } - }], - deleteColumn: ["DELETE /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] - } - }], - get: ["GET /projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - getCard: ["GET /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] - } - }], - getColumn: ["GET /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] - } - }], - getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", { - mediaType: { - previews: ["inertia"] - } - }], - listCards: ["GET /projects/columns/{column_id}/cards", { - mediaType: { - previews: ["inertia"] - } - }], - listCollaborators: ["GET /projects/{project_id}/collaborators", { - mediaType: { - previews: ["inertia"] - } - }], - listColumns: ["GET /projects/{project_id}/columns", { - mediaType: { - previews: ["inertia"] - } - }], - listForOrg: ["GET /orgs/{org}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - listForRepo: ["GET /repos/{owner}/{repo}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - listForUser: ["GET /users/{username}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - moveCard: ["POST /projects/columns/cards/{card_id}/moves", { - mediaType: { - previews: ["inertia"] - } - }], - moveColumn: ["POST /projects/columns/{column_id}/moves", { - mediaType: { - previews: ["inertia"] - } - }], - removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", { - mediaType: { - previews: ["inertia"] - } - }], - update: ["PATCH /projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - updateCard: ["PATCH /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] - } - }], - updateColumn: ["PATCH /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] - } - }] + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], + createCard: ["POST /projects/columns/{column_id}/cards"], + createColumn: ["POST /projects/{project_id}/columns"], + createForAuthenticatedUser: ["POST /user/projects"], + createForOrg: ["POST /orgs/{org}/projects"], + createForRepo: ["POST /repos/{owner}/{repo}/projects"], + delete: ["DELETE /projects/{project_id}"], + deleteCard: ["DELETE /projects/columns/cards/{card_id}"], + deleteColumn: ["DELETE /projects/columns/{column_id}"], + get: ["GET /projects/{project_id}"], + getCard: ["GET /projects/columns/cards/{card_id}"], + getColumn: ["GET /projects/columns/{column_id}"], + getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission"], + listCards: ["GET /projects/columns/{column_id}/cards"], + listCollaborators: ["GET /projects/{project_id}/collaborators"], + listColumns: ["GET /projects/{project_id}/columns"], + listForOrg: ["GET /orgs/{org}/projects"], + listForRepo: ["GET /repos/{owner}/{repo}/projects"], + listForUser: ["GET /users/{username}/projects"], + moveCard: ["POST /projects/columns/cards/{card_id}/moves"], + moveColumn: ["POST /projects/columns/{column_id}/moves"], + removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}"], + update: ["PATCH /projects/{project_id}"], + updateCard: ["PATCH /projects/columns/cards/{card_id}"], + updateColumn: ["PATCH /projects/columns/{column_id}"] }, pulls: { checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], @@ -6937,11 +8031,7 @@ const Endpoints = { requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", { - mediaType: { - previews: ["lydian"] - } - }], + updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"], updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] }, @@ -6949,106 +8039,33 @@ const Endpoints = { get: ["GET /rate_limit"] }, reactions: { - createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteLegacy: ["DELETE /reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }, { - deprecated: "octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy" - }], - listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }] + createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"], + createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], + createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], + createForRelease: ["POST /repos/{owner}/{repo}/releases/{release_id}/reactions"], + createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], + createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"], + deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"], + deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"], + deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"], + deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"], + deleteForRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"], + deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"], + deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"], + listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], + listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], + listForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"], + listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], + listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"] }, repos: { - acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], + acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}", {}, { + renamed: ["repos", "acceptInvitationForAuthenticatedUser"] + }], + acceptInvitationForAuthenticatedUser: ["PATCH /user/repository_invitations/{invitation_id}"], addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { mapToData: "apps" }], @@ -7063,18 +8080,13 @@ const Endpoints = { mapToData: "users" }], checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] - } - }], + checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts"], + codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"], + createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] - } - }], + createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], createDeployKey: ["POST /repos/{owner}/{repo}/keys"], createDeployment: ["POST /repos/{owner}/{repo}/deployments"], @@ -7083,74 +8095,55 @@ const Endpoints = { createForAuthenticatedUser: ["POST /user/repos"], createFork: ["POST /repos/{owner}/{repo}/forks"], createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"], createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], - createPagesSite: ["POST /repos/{owner}/{repo}/pages", { - mediaType: { - previews: ["switcheroo"] - } - }], + createPagesSite: ["POST /repos/{owner}/{repo}/pages"], createRelease: ["POST /repos/{owner}/{repo}/releases"], - createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", { - mediaType: { - previews: ["baptiste"] - } - }], + createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], + createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate"], createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], + declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}", {}, { + renamed: ["repos", "declineInvitationForAuthenticatedUser"] + }], + declineInvitationForAuthenticatedUser: ["DELETE /user/repository_invitations/{invitation_id}"], delete: ["DELETE /repos/{owner}/{repo}"], deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"], + deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] - } - }], + deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], - deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { - mediaType: { - previews: ["switcheroo"] - } - }], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], + deleteTagProtection: ["DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"], deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", { - mediaType: { - previews: ["london"] - } - }], - disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] - } - }], - downloadArchive: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}"], - enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", { - mediaType: { - previews: ["london"] - } - }], - enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] - } + disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes"], + disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"], + disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts"], + downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, { + renamed: ["repos", "downloadZipballArchive"] }], + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes"], + enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"], + enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts"], + generateReleaseNotes: ["POST /repos/{owner}/{repo}/releases/generate-notes"], get: ["GET /repos/{owner}/{repo}"], getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], - getAllTopics: ["GET /repos/{owner}/{repo}/topics", { - mediaType: { - previews: ["mercy"] - } - }], + getAllTopics: ["GET /repos/{owner}/{repo}/topics"], getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], + getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], @@ -7160,29 +8153,24 @@ const Endpoints = { getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] - } - }], - getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile", { - mediaType: { - previews: ["black-panther"] - } - }], + getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], + getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"], getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], getPages: ["GET /repos/{owner}/{repo}/pages"], getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], @@ -7193,12 +8181,11 @@ const Endpoints = { getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], getViews: ["GET /repos/{owner}/{repo}/traffic/views"], getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"], + getWebhookDelivery: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"], + listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", { - mediaType: { - previews: ["groot"] - } - }], + listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"], listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], @@ -7217,18 +8204,18 @@ const Endpoints = { listLanguages: ["GET /repos/{owner}/{repo}/languages"], listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", { - mediaType: { - previews: ["groot"] - } - }], + listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"], listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], listTags: ["GET /repos/{owner}/{repo}/tags"], listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhookDeliveries: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"], listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], merge: ["POST /repos/{owner}/{repo}/merges"], + mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"], removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { mapToData: "apps" }], @@ -7243,11 +8230,8 @@ const Endpoints = { removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { mapToData: "users" }], - replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", { - mediaType: { - previews: ["mercy"] - } - }], + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { @@ -7272,42 +8256,38 @@ const Endpoints = { updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], - updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { + renamed: ["repos", "updateStatusCheckProtection"] + }], + updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"], uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { baseUrl: "https://uploads.github.com" }] }, search: { code: ["GET /search/code"], - commits: ["GET /search/commits", { - mediaType: { - previews: ["cloak"] - } - }], + commits: ["GET /search/commits"], issuesAndPullRequests: ["GET /search/issues"], labels: ["GET /search/labels"], repos: ["GET /search/repositories"], - topics: ["GET /search/topics", { - mediaType: { - previews: ["mercy"] - } - }], + topics: ["GET /search/topics"], users: ["GET /search/users"] }, + secretScanning: { + getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"], + listAlertsForEnterprise: ["GET /enterprises/{enterprise}/secret-scanning/alerts"], + listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + listLocationsForAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"], + updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"] + }, teams: { addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], - addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], + addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"], addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], + checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"], checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], create: ["POST /orgs/{org}/teams"], createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], @@ -7326,11 +8306,7 @@ const Endpoints = { listForAuthenticatedUser: ["GET /user/teams"], listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], - listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", { - mediaType: { - previews: ["inertia"] - } - }], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], @@ -7340,42 +8316,87 @@ const Endpoints = { updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] }, users: { - addEmailForAuthenticated: ["POST /user/emails"], + addEmailForAuthenticated: ["POST /user/emails", {}, { + renamed: ["users", "addEmailForAuthenticatedUser"] + }], + addEmailForAuthenticatedUser: ["POST /user/emails"], block: ["PUT /user/blocks/{username}"], checkBlocked: ["GET /user/blocks/{username}"], checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: ["POST /user/keys"], - deleteEmailForAuthenticated: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], + createGpgKeyForAuthenticated: ["POST /user/gpg_keys", {}, { + renamed: ["users", "createGpgKeyForAuthenticatedUser"] + }], + createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: ["POST /user/keys", {}, { + renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] + }], + createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], + deleteEmailForAuthenticated: ["DELETE /user/emails", {}, { + renamed: ["users", "deleteEmailForAuthenticatedUser"] + }], + deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, { + renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] + }], + deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}", {}, { + renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] + }], + deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], follow: ["PUT /user/following/{username}"], getAuthenticated: ["GET /user"], getByUsername: ["GET /users/{username}"], getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], + getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}", {}, { + renamed: ["users", "getGpgKeyForAuthenticatedUser"] + }], + getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}", {}, { + renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] + }], + getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], list: ["GET /users"], - listBlockedByAuthenticated: ["GET /user/blocks"], - listEmailsForAuthenticated: ["GET /user/emails"], - listFollowedByAuthenticated: ["GET /user/following"], + listBlockedByAuthenticated: ["GET /user/blocks", {}, { + renamed: ["users", "listBlockedByAuthenticatedUser"] + }], + listBlockedByAuthenticatedUser: ["GET /user/blocks"], + listEmailsForAuthenticated: ["GET /user/emails", {}, { + renamed: ["users", "listEmailsForAuthenticatedUser"] + }], + listEmailsForAuthenticatedUser: ["GET /user/emails"], + listFollowedByAuthenticated: ["GET /user/following", {}, { + renamed: ["users", "listFollowedByAuthenticatedUser"] + }], + listFollowedByAuthenticatedUser: ["GET /user/following"], listFollowersForAuthenticatedUser: ["GET /user/followers"], listFollowersForUser: ["GET /users/{username}/followers"], listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], + listGpgKeysForAuthenticated: ["GET /user/gpg_keys", {}, { + renamed: ["users", "listGpgKeysForAuthenticatedUser"] + }], + listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: ["GET /user/public_emails"], + listPublicEmailsForAuthenticated: ["GET /user/public_emails", {}, { + renamed: ["users", "listPublicEmailsForAuthenticatedUser"] + }], + listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: ["GET /user/keys"], - setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], + listPublicSshKeysForAuthenticated: ["GET /user/keys", {}, { + renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] + }], + listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], + setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility", {}, { + renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] + }], + setPrimaryEmailVisibilityForAuthenticatedUser: ["PATCH /user/email/visibility"], unblock: ["DELETE /user/blocks/{username}"], unfollow: ["DELETE /user/following/{username}"], updateAuthenticated: ["PATCH /user"] } }; -const VERSION = "4.2.1"; +const VERSION = "5.16.2"; function endpointsToMethods(octokit, endpointsMap) { const newMethods = {}; @@ -7458,22 +8479,22 @@ function decorate(octokit, scope, methodName, defaults, decorations) { return Object.assign(withDecorations, requestWithDefaults); } -/** - * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary - * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is - * done, we will remove the registerEndpoints methods and return the methods - * directly as with the other plugins. At that point we will also remove the - * legacy workarounds and deprecations. - * - * See the plan at - * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1 - */ - function restEndpointMethods(octokit) { - return endpointsToMethods(octokit, Endpoints); + const api = endpointsToMethods(octokit, Endpoints); + return { + rest: api + }; } restEndpointMethods.VERSION = VERSION; +function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit, Endpoints); + return _objectSpread2(_objectSpread2({}, api), {}, { + rest: api + }); +} +legacyRestEndpointMethods.VERSION = VERSION; +exports.legacyRestEndpointMethods = legacyRestEndpointMethods; exports.restEndpointMethods = restEndpointMethods; //# sourceMappingURL=index.js.map @@ -7493,7 +8514,8 @@ function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'defau var deprecation = __nccwpck_require__(8932); var once = _interopDefault(__nccwpck_require__(1223)); -const logOnce = once(deprecation => console.warn(deprecation)); +const logOnceCode = once(deprecation => console.warn(deprecation)); +const logOnceHeaders = once(deprecation => console.warn(deprecation)); /** * Error with extra properties to help with debugging */ @@ -7510,14 +8532,17 @@ class RequestError extends Error { this.name = "HttpError"; this.status = statusCode; - Object.defineProperty(this, "code", { - get() { - logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - } + let headers; + + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } // redact request credentials without mutating original request options - }); - this.headers = options.headers || {}; // redact request credentials without mutating original request options const requestCopy = Object.assign({}, options.request); @@ -7532,7 +8557,22 @@ class RequestError extends Error { .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; + this.request = requestCopy; // deprecations + + Object.defineProperty(this, "code", { + get() { + logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + } + + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + return headers || {}; + } + + }); } } @@ -7555,17 +8595,19 @@ function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'defau var endpoint = __nccwpck_require__(9440); var universalUserAgent = __nccwpck_require__(5030); -var isPlainObject = __nccwpck_require__(9062); +var isPlainObject = __nccwpck_require__(3287); var nodeFetch = _interopDefault(__nccwpck_require__(467)); var requestError = __nccwpck_require__(537); -const VERSION = "5.4.10"; +const VERSION = "5.6.3"; function getBufferResponse(response) { return response.arrayBuffer(); } function fetchWrapper(requestOptions) { + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { requestOptions.body = JSON.stringify(requestOptions.body); } @@ -7579,7 +8621,9 @@ function fetchWrapper(requestOptions) { body: requestOptions.body, headers: requestOptions.headers, redirect: requestOptions.redirect - }, requestOptions.request)).then(response => { + }, // `requestOptions.request.agent` type is incompatible + // see https://github.com/octokit/types.ts/pull/264 + requestOptions.request)).then(async response => { url = response.url; status = response.status; @@ -7587,6 +8631,12 @@ function fetchWrapper(requestOptions) { headers[keyAndValue[0]] = keyAndValue[1]; } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`); + } + if (status === 204 || status === 205) { return; } // GitHub API returns 200 for HEAD requests @@ -7598,49 +8648,43 @@ function fetchWrapper(requestOptions) { } throw new requestError.RequestError(response.statusText, status, { - headers, + response: { + url, + status, + headers, + data: undefined + }, request: requestOptions }); } if (status === 304) { throw new requestError.RequestError("Not modified", status, { - headers, + response: { + url, + status, + headers, + data: await getResponseData(response) + }, request: requestOptions }); } if (status >= 400) { - return response.text().then(message => { - const error = new requestError.RequestError(message, status, { + const data = await getResponseData(response); + const error = new requestError.RequestError(toErrorMessage(data), status, { + response: { + url, + status, headers, - request: requestOptions - }); - - try { - let responseBody = JSON.parse(error.message); - Object.assign(error, responseBody); - let errors = responseBody.errors; // Assumption `errors` would always be in Array format - - error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); - } catch (e) {// ignore, see octokit/rest.js#684 - } - - throw error; + data + }, + request: requestOptions }); + throw error; } - const contentType = response.headers.get("content-type"); - - if (/application\/json/.test(contentType)) { - return response.json(); - } - - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); - } - - return getBufferResponse(response); + return getResponseData(response); }).then(data => { return { status, @@ -7649,17 +8693,42 @@ function fetchWrapper(requestOptions) { data }; }).catch(error => { - if (error instanceof requestError.RequestError) { - throw error; - } - + if (error instanceof requestError.RequestError) throw error; throw new requestError.RequestError(error.message, 500, { - headers, request: requestOptions }); }); } +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + + if (/application\/json/.test(contentType)) { + return response.json(); + } + + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + + return getBufferResponse(response); +} + +function toErrorMessage(data) { + if (typeof data === "string") return data; // istanbul ignore else - just in case + + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; + } + + return data.message; + } // istanbul ignore next - just in case + + + return `Unknown error: ${JSON.stringify(data)}`; +} + function withDefaults(oldEndpoint, newDefaults) { const endpoint = oldEndpoint.defaults(newDefaults); @@ -7697,886 +8766,315 @@ exports.request = request; //# sourceMappingURL=index.js.map -/***/ }), - -/***/ 9062: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -function isObject(o) { - return Object.prototype.toString.call(o) === '[object Object]'; -} - -function isPlainObject(o) { - var ctor,prot; - - if (isObject(o) === false) return false; - - // If has modified constructor - ctor = o.constructor; - if (ctor === undefined) return true; - - // If has modified prototype - prot = ctor.prototype; - if (isObject(prot) === false) return false; - - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; - } - - // Most likely a plain Object - return true; -} - -exports.isPlainObject = isPlainObject; - - -/***/ }), - -/***/ 7678: -/***/ ((module, exports) => { - -"use strict"; - -/// -/// -/// -Object.defineProperty(exports, "__esModule", ({ value: true })); -const typedArrayTypeNames = [ - 'Int8Array', - 'Uint8Array', - 'Uint8ClampedArray', - 'Int16Array', - 'Uint16Array', - 'Int32Array', - 'Uint32Array', - 'Float32Array', - 'Float64Array', - 'BigInt64Array', - 'BigUint64Array' -]; -function isTypedArrayName(name) { - return typedArrayTypeNames.includes(name); -} -const objectTypeNames = [ - 'Function', - 'Generator', - 'AsyncGenerator', - 'GeneratorFunction', - 'AsyncGeneratorFunction', - 'AsyncFunction', - 'Observable', - 'Array', - 'Buffer', - 'Object', - 'RegExp', - 'Date', - 'Error', - 'Map', - 'Set', - 'WeakMap', - 'WeakSet', - 'ArrayBuffer', - 'SharedArrayBuffer', - 'DataView', - 'Promise', - 'URL', - 'HTMLElement', - ...typedArrayTypeNames -]; -function isObjectTypeName(name) { - return objectTypeNames.includes(name); -} -const primitiveTypeNames = [ - 'null', - 'undefined', - 'string', - 'number', - 'bigint', - 'boolean', - 'symbol' -]; -function isPrimitiveTypeName(name) { - return primitiveTypeNames.includes(name); -} -// eslint-disable-next-line @typescript-eslint/ban-types -function isOfType(type) { - return (value) => typeof value === type; -} -const { toString } = Object.prototype; -const getObjectType = (value) => { - const objectTypeName = toString.call(value).slice(8, -1); - if (/HTML\w+Element/.test(objectTypeName) && is.domElement(value)) { - return 'HTMLElement'; - } - if (isObjectTypeName(objectTypeName)) { - return objectTypeName; - } - return undefined; -}; -const isObjectOfType = (type) => (value) => getObjectType(value) === type; -function is(value) { - if (value === null) { - return 'null'; - } - switch (typeof value) { - case 'undefined': - return 'undefined'; - case 'string': - return 'string'; - case 'number': - return 'number'; - case 'boolean': - return 'boolean'; - case 'function': - return 'Function'; - case 'bigint': - return 'bigint'; - case 'symbol': - return 'symbol'; - default: - } - if (is.observable(value)) { - return 'Observable'; - } - if (is.array(value)) { - return 'Array'; - } - if (is.buffer(value)) { - return 'Buffer'; - } - const tagType = getObjectType(value); - if (tagType) { - return tagType; - } - if (value instanceof String || value instanceof Boolean || value instanceof Number) { - throw new TypeError('Please don\'t use object wrappers for primitive types'); - } - return 'Object'; -} -is.undefined = isOfType('undefined'); -is.string = isOfType('string'); -const isNumberType = isOfType('number'); -is.number = (value) => isNumberType(value) && !is.nan(value); -is.bigint = isOfType('bigint'); -// eslint-disable-next-line @typescript-eslint/ban-types -is.function_ = isOfType('function'); -is.null_ = (value) => value === null; -is.class_ = (value) => is.function_(value) && value.toString().startsWith('class '); -is.boolean = (value) => value === true || value === false; -is.symbol = isOfType('symbol'); -is.numericString = (value) => is.string(value) && !is.emptyStringOrWhitespace(value) && !Number.isNaN(Number(value)); -is.array = (value, assertion) => { - if (!Array.isArray(value)) { - return false; - } - if (!is.function_(assertion)) { - return true; - } - return value.every(assertion); -}; -is.buffer = (value) => { var _a, _b, _c, _d; return (_d = (_c = (_b = (_a = value) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.isBuffer) === null || _c === void 0 ? void 0 : _c.call(_b, value)) !== null && _d !== void 0 ? _d : false; }; -is.nullOrUndefined = (value) => is.null_(value) || is.undefined(value); -is.object = (value) => !is.null_(value) && (typeof value === 'object' || is.function_(value)); -is.iterable = (value) => { var _a; return is.function_((_a = value) === null || _a === void 0 ? void 0 : _a[Symbol.iterator]); }; -is.asyncIterable = (value) => { var _a; return is.function_((_a = value) === null || _a === void 0 ? void 0 : _a[Symbol.asyncIterator]); }; -is.generator = (value) => is.iterable(value) && is.function_(value.next) && is.function_(value.throw); -is.asyncGenerator = (value) => is.asyncIterable(value) && is.function_(value.next) && is.function_(value.throw); -is.nativePromise = (value) => isObjectOfType('Promise')(value); -const hasPromiseAPI = (value) => { - var _a, _b; - return is.function_((_a = value) === null || _a === void 0 ? void 0 : _a.then) && - is.function_((_b = value) === null || _b === void 0 ? void 0 : _b.catch); -}; -is.promise = (value) => is.nativePromise(value) || hasPromiseAPI(value); -is.generatorFunction = isObjectOfType('GeneratorFunction'); -is.asyncGeneratorFunction = (value) => getObjectType(value) === 'AsyncGeneratorFunction'; -is.asyncFunction = (value) => getObjectType(value) === 'AsyncFunction'; -// eslint-disable-next-line no-prototype-builtins, @typescript-eslint/ban-types -is.boundFunction = (value) => is.function_(value) && !value.hasOwnProperty('prototype'); -is.regExp = isObjectOfType('RegExp'); -is.date = isObjectOfType('Date'); -is.error = isObjectOfType('Error'); -is.map = (value) => isObjectOfType('Map')(value); -is.set = (value) => isObjectOfType('Set')(value); -is.weakMap = (value) => isObjectOfType('WeakMap')(value); -is.weakSet = (value) => isObjectOfType('WeakSet')(value); -is.int8Array = isObjectOfType('Int8Array'); -is.uint8Array = isObjectOfType('Uint8Array'); -is.uint8ClampedArray = isObjectOfType('Uint8ClampedArray'); -is.int16Array = isObjectOfType('Int16Array'); -is.uint16Array = isObjectOfType('Uint16Array'); -is.int32Array = isObjectOfType('Int32Array'); -is.uint32Array = isObjectOfType('Uint32Array'); -is.float32Array = isObjectOfType('Float32Array'); -is.float64Array = isObjectOfType('Float64Array'); -is.bigInt64Array = isObjectOfType('BigInt64Array'); -is.bigUint64Array = isObjectOfType('BigUint64Array'); -is.arrayBuffer = isObjectOfType('ArrayBuffer'); -is.sharedArrayBuffer = isObjectOfType('SharedArrayBuffer'); -is.dataView = isObjectOfType('DataView'); -is.directInstanceOf = (instance, class_) => Object.getPrototypeOf(instance) === class_.prototype; -is.urlInstance = (value) => isObjectOfType('URL')(value); -is.urlString = (value) => { - if (!is.string(value)) { - return false; - } - try { - new URL(value); // eslint-disable-line no-new - return true; - } - catch (_a) { - return false; - } -}; -// TODO: Use the `not` operator with a type guard here when it's available. -// Example: `is.truthy = (value: unknown): value is (not false | not 0 | not '' | not undefined | not null) => Boolean(value);` -is.truthy = (value) => Boolean(value); -// Example: `is.falsy = (value: unknown): value is (not true | 0 | '' | undefined | null) => Boolean(value);` -is.falsy = (value) => !value; -is.nan = (value) => Number.isNaN(value); -is.primitive = (value) => is.null_(value) || isPrimitiveTypeName(typeof value); -is.integer = (value) => Number.isInteger(value); -is.safeInteger = (value) => Number.isSafeInteger(value); -is.plainObject = (value) => { - // From: https://github.com/sindresorhus/is-plain-obj/blob/master/index.js - if (toString.call(value) !== '[object Object]') { - return false; - } - const prototype = Object.getPrototypeOf(value); - return prototype === null || prototype === Object.getPrototypeOf({}); -}; -is.typedArray = (value) => isTypedArrayName(getObjectType(value)); -const isValidLength = (value) => is.safeInteger(value) && value >= 0; -is.arrayLike = (value) => !is.nullOrUndefined(value) && !is.function_(value) && isValidLength(value.length); -is.inRange = (value, range) => { - if (is.number(range)) { - return value >= Math.min(0, range) && value <= Math.max(range, 0); - } - if (is.array(range) && range.length === 2) { - return value >= Math.min(...range) && value <= Math.max(...range); - } - throw new TypeError(`Invalid range: ${JSON.stringify(range)}`); -}; -const NODE_TYPE_ELEMENT = 1; -const DOM_PROPERTIES_TO_CHECK = [ - 'innerHTML', - 'ownerDocument', - 'style', - 'attributes', - 'nodeValue' -]; -is.domElement = (value) => { - return is.object(value) && - value.nodeType === NODE_TYPE_ELEMENT && - is.string(value.nodeName) && - !is.plainObject(value) && - DOM_PROPERTIES_TO_CHECK.every(property => property in value); -}; -is.observable = (value) => { - var _a, _b, _c, _d; - if (!value) { - return false; - } - // eslint-disable-next-line no-use-extend-native/no-use-extend-native - if (value === ((_b = (_a = value)[Symbol.observable]) === null || _b === void 0 ? void 0 : _b.call(_a))) { - return true; - } - if (value === ((_d = (_c = value)['@@observable']) === null || _d === void 0 ? void 0 : _d.call(_c))) { - return true; - } - return false; -}; -is.nodeStream = (value) => is.object(value) && is.function_(value.pipe) && !is.observable(value); -is.infinite = (value) => value === Infinity || value === -Infinity; -const isAbsoluteMod2 = (remainder) => (value) => is.integer(value) && Math.abs(value % 2) === remainder; -is.evenInteger = isAbsoluteMod2(0); -is.oddInteger = isAbsoluteMod2(1); -is.emptyArray = (value) => is.array(value) && value.length === 0; -is.nonEmptyArray = (value) => is.array(value) && value.length > 0; -is.emptyString = (value) => is.string(value) && value.length === 0; -// TODO: Use `not ''` when the `not` operator is available. -is.nonEmptyString = (value) => is.string(value) && value.length > 0; -const isWhiteSpaceString = (value) => is.string(value) && !/\S/.test(value); -is.emptyStringOrWhitespace = (value) => is.emptyString(value) || isWhiteSpaceString(value); -is.emptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length === 0; -// TODO: Use `not` operator here to remove `Map` and `Set` from type guard: -// - https://github.com/Microsoft/TypeScript/pull/29317 -is.nonEmptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length > 0; -is.emptySet = (value) => is.set(value) && value.size === 0; -is.nonEmptySet = (value) => is.set(value) && value.size > 0; -is.emptyMap = (value) => is.map(value) && value.size === 0; -is.nonEmptyMap = (value) => is.map(value) && value.size > 0; -const predicateOnArray = (method, predicate, values) => { - if (!is.function_(predicate)) { - throw new TypeError(`Invalid predicate: ${JSON.stringify(predicate)}`); - } - if (values.length === 0) { - throw new TypeError('Invalid number of values'); - } - return method.call(values, predicate); -}; -is.any = (predicate, ...values) => { - const predicates = is.array(predicate) ? predicate : [predicate]; - return predicates.some(singlePredicate => predicateOnArray(Array.prototype.some, singlePredicate, values)); -}; -is.all = (predicate, ...values) => predicateOnArray(Array.prototype.every, predicate, values); -const assertType = (condition, description, value) => { - if (!condition) { - throw new TypeError(`Expected value which is \`${description}\`, received value of type \`${is(value)}\`.`); - } -}; -exports.assert = { - // Unknowns. - undefined: (value) => assertType(is.undefined(value), 'undefined', value), - string: (value) => assertType(is.string(value), 'string', value), - number: (value) => assertType(is.number(value), 'number', value), - bigint: (value) => assertType(is.bigint(value), 'bigint', value), - // eslint-disable-next-line @typescript-eslint/ban-types - function_: (value) => assertType(is.function_(value), 'Function', value), - null_: (value) => assertType(is.null_(value), 'null', value), - class_: (value) => assertType(is.class_(value), "Class" /* class_ */, value), - boolean: (value) => assertType(is.boolean(value), 'boolean', value), - symbol: (value) => assertType(is.symbol(value), 'symbol', value), - numericString: (value) => assertType(is.numericString(value), "string with a number" /* numericString */, value), - array: (value, assertion) => { - const assert = assertType; - assert(is.array(value), 'Array', value); - if (assertion) { - value.forEach(assertion); - } - }, - buffer: (value) => assertType(is.buffer(value), 'Buffer', value), - nullOrUndefined: (value) => assertType(is.nullOrUndefined(value), "null or undefined" /* nullOrUndefined */, value), - object: (value) => assertType(is.object(value), 'Object', value), - iterable: (value) => assertType(is.iterable(value), "Iterable" /* iterable */, value), - asyncIterable: (value) => assertType(is.asyncIterable(value), "AsyncIterable" /* asyncIterable */, value), - generator: (value) => assertType(is.generator(value), 'Generator', value), - asyncGenerator: (value) => assertType(is.asyncGenerator(value), 'AsyncGenerator', value), - nativePromise: (value) => assertType(is.nativePromise(value), "native Promise" /* nativePromise */, value), - promise: (value) => assertType(is.promise(value), 'Promise', value), - generatorFunction: (value) => assertType(is.generatorFunction(value), 'GeneratorFunction', value), - asyncGeneratorFunction: (value) => assertType(is.asyncGeneratorFunction(value), 'AsyncGeneratorFunction', value), - // eslint-disable-next-line @typescript-eslint/ban-types - asyncFunction: (value) => assertType(is.asyncFunction(value), 'AsyncFunction', value), - // eslint-disable-next-line @typescript-eslint/ban-types - boundFunction: (value) => assertType(is.boundFunction(value), 'Function', value), - regExp: (value) => assertType(is.regExp(value), 'RegExp', value), - date: (value) => assertType(is.date(value), 'Date', value), - error: (value) => assertType(is.error(value), 'Error', value), - map: (value) => assertType(is.map(value), 'Map', value), - set: (value) => assertType(is.set(value), 'Set', value), - weakMap: (value) => assertType(is.weakMap(value), 'WeakMap', value), - weakSet: (value) => assertType(is.weakSet(value), 'WeakSet', value), - int8Array: (value) => assertType(is.int8Array(value), 'Int8Array', value), - uint8Array: (value) => assertType(is.uint8Array(value), 'Uint8Array', value), - uint8ClampedArray: (value) => assertType(is.uint8ClampedArray(value), 'Uint8ClampedArray', value), - int16Array: (value) => assertType(is.int16Array(value), 'Int16Array', value), - uint16Array: (value) => assertType(is.uint16Array(value), 'Uint16Array', value), - int32Array: (value) => assertType(is.int32Array(value), 'Int32Array', value), - uint32Array: (value) => assertType(is.uint32Array(value), 'Uint32Array', value), - float32Array: (value) => assertType(is.float32Array(value), 'Float32Array', value), - float64Array: (value) => assertType(is.float64Array(value), 'Float64Array', value), - bigInt64Array: (value) => assertType(is.bigInt64Array(value), 'BigInt64Array', value), - bigUint64Array: (value) => assertType(is.bigUint64Array(value), 'BigUint64Array', value), - arrayBuffer: (value) => assertType(is.arrayBuffer(value), 'ArrayBuffer', value), - sharedArrayBuffer: (value) => assertType(is.sharedArrayBuffer(value), 'SharedArrayBuffer', value), - dataView: (value) => assertType(is.dataView(value), 'DataView', value), - urlInstance: (value) => assertType(is.urlInstance(value), 'URL', value), - urlString: (value) => assertType(is.urlString(value), "string with a URL" /* urlString */, value), - truthy: (value) => assertType(is.truthy(value), "truthy" /* truthy */, value), - falsy: (value) => assertType(is.falsy(value), "falsy" /* falsy */, value), - nan: (value) => assertType(is.nan(value), "NaN" /* nan */, value), - primitive: (value) => assertType(is.primitive(value), "primitive" /* primitive */, value), - integer: (value) => assertType(is.integer(value), "integer" /* integer */, value), - safeInteger: (value) => assertType(is.safeInteger(value), "integer" /* safeInteger */, value), - plainObject: (value) => assertType(is.plainObject(value), "plain object" /* plainObject */, value), - typedArray: (value) => assertType(is.typedArray(value), "TypedArray" /* typedArray */, value), - arrayLike: (value) => assertType(is.arrayLike(value), "array-like" /* arrayLike */, value), - domElement: (value) => assertType(is.domElement(value), "HTMLElement" /* domElement */, value), - observable: (value) => assertType(is.observable(value), 'Observable', value), - nodeStream: (value) => assertType(is.nodeStream(value), "Node.js Stream" /* nodeStream */, value), - infinite: (value) => assertType(is.infinite(value), "infinite number" /* infinite */, value), - emptyArray: (value) => assertType(is.emptyArray(value), "empty array" /* emptyArray */, value), - nonEmptyArray: (value) => assertType(is.nonEmptyArray(value), "non-empty array" /* nonEmptyArray */, value), - emptyString: (value) => assertType(is.emptyString(value), "empty string" /* emptyString */, value), - nonEmptyString: (value) => assertType(is.nonEmptyString(value), "non-empty string" /* nonEmptyString */, value), - emptyStringOrWhitespace: (value) => assertType(is.emptyStringOrWhitespace(value), "empty string or whitespace" /* emptyStringOrWhitespace */, value), - emptyObject: (value) => assertType(is.emptyObject(value), "empty object" /* emptyObject */, value), - nonEmptyObject: (value) => assertType(is.nonEmptyObject(value), "non-empty object" /* nonEmptyObject */, value), - emptySet: (value) => assertType(is.emptySet(value), "empty set" /* emptySet */, value), - nonEmptySet: (value) => assertType(is.nonEmptySet(value), "non-empty set" /* nonEmptySet */, value), - emptyMap: (value) => assertType(is.emptyMap(value), "empty map" /* emptyMap */, value), - nonEmptyMap: (value) => assertType(is.nonEmptyMap(value), "non-empty map" /* nonEmptyMap */, value), - // Numbers. - evenInteger: (value) => assertType(is.evenInteger(value), "even integer" /* evenInteger */, value), - oddInteger: (value) => assertType(is.oddInteger(value), "odd integer" /* oddInteger */, value), - // Two arguments. - directInstanceOf: (instance, class_) => assertType(is.directInstanceOf(instance, class_), "T" /* directInstanceOf */, instance), - inRange: (value, range) => assertType(is.inRange(value, range), "in range" /* inRange */, value), - // Variadic functions. - any: (predicate, ...values) => assertType(is.any(predicate, ...values), "predicate returns truthy for any value" /* any */, values), - all: (predicate, ...values) => assertType(is.all(predicate, ...values), "predicate returns truthy for all values" /* all */, values) -}; -// Some few keywords are reserved, but we'll populate them for Node.js users -// See https://github.com/Microsoft/TypeScript/issues/2536 -Object.defineProperties(is, { - class: { - value: is.class_ - }, - function: { - value: is.function_ - }, - null: { - value: is.null_ - } -}); -Object.defineProperties(exports.assert, { - class: { - value: exports.assert.class_ - }, - function: { - value: exports.assert.function_ - }, - null: { - value: exports.assert.null_ - } -}); -exports.default = is; -// For CommonJS default export support -module.exports = is; -module.exports.default = is; -module.exports.assert = exports.assert; - - -/***/ }), - -/***/ 8097: -/***/ ((module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const defer_to_connect_1 = __nccwpck_require__(6214); -const nodejsMajorVersion = Number(process.versions.node.split('.')[0]); -const timer = (request) => { - const timings = { - start: Date.now(), - socket: undefined, - lookup: undefined, - connect: undefined, - secureConnect: undefined, - upload: undefined, - response: undefined, - end: undefined, - error: undefined, - abort: undefined, - phases: { - wait: undefined, - dns: undefined, - tcp: undefined, - tls: undefined, - request: undefined, - firstByte: undefined, - download: undefined, - total: undefined - } - }; - request.timings = timings; - const handleError = (origin) => { - const emit = origin.emit.bind(origin); - origin.emit = (event, ...args) => { - // Catches the `error` event - if (event === 'error') { - timings.error = Date.now(); - timings.phases.total = timings.error - timings.start; - origin.emit = emit; - } - // Saves the original behavior - return emit(event, ...args); - }; - }; - handleError(request); - request.prependOnceListener('abort', () => { - timings.abort = Date.now(); - // Let the `end` response event be responsible for setting the total phase, - // unless the Node.js major version is >= 13. - if (!timings.response || nodejsMajorVersion >= 13) { - timings.phases.total = Date.now() - timings.start; - } - }); - const onSocket = (socket) => { - timings.socket = Date.now(); - timings.phases.wait = timings.socket - timings.start; - const lookupListener = () => { - timings.lookup = Date.now(); - timings.phases.dns = timings.lookup - timings.socket; - }; - socket.prependOnceListener('lookup', lookupListener); - defer_to_connect_1.default(socket, { - connect: () => { - timings.connect = Date.now(); - if (timings.lookup === undefined) { - socket.removeListener('lookup', lookupListener); - timings.lookup = timings.connect; - timings.phases.dns = timings.lookup - timings.socket; - } - timings.phases.tcp = timings.connect - timings.lookup; - // This callback is called before flushing any data, - // so we don't need to set `timings.phases.request` here. - }, - secureConnect: () => { - timings.secureConnect = Date.now(); - timings.phases.tls = timings.secureConnect - timings.connect; - } - }); - }; - if (request.socket) { - onSocket(request.socket); - } - else { - request.prependOnceListener('socket', onSocket); - } - const onUpload = () => { - var _a; - timings.upload = Date.now(); - timings.phases.request = timings.upload - (_a = timings.secureConnect, (_a !== null && _a !== void 0 ? _a : timings.connect)); - }; - const writableFinished = () => { - if (typeof request.writableFinished === 'boolean') { - return request.writableFinished; - } - // Node.js doesn't have `request.writableFinished` property - return request.finished && request.outputSize === 0 && (!request.socket || request.socket.writableLength === 0); - }; - if (writableFinished()) { - onUpload(); - } - else { - request.prependOnceListener('finish', onUpload); - } - request.prependOnceListener('response', (response) => { - timings.response = Date.now(); - timings.phases.firstByte = timings.response - timings.upload; - response.timings = timings; - handleError(response); - response.prependOnceListener('end', () => { - timings.end = Date.now(); - timings.phases.download = timings.end - timings.response; - timings.phases.total = timings.end - timings.start; - }); - }); - return timings; -}; -exports.default = timer; -// For CommonJS default export support -module.exports = timer; -module.exports.default = timer; - - /***/ }), /***/ 6761: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var Utils = __nccwpck_require__(5182); -var fs = Utils.FileSystem.require(), - pth = __nccwpck_require__(5622); +const Utils = __nccwpck_require__(5182); +const pth = __nccwpck_require__(1017); +const ZipEntry = __nccwpck_require__(4057); +const ZipFile = __nccwpck_require__(7744); -fs.existsSync = fs.existsSync || pth.existsSync; +const get_Bool = (val, def) => (typeof val === "boolean" ? val : def); +const get_Str = (val, def) => (typeof val === "string" ? val : def); -var ZipEntry = __nccwpck_require__(4057), - ZipFile = __nccwpck_require__(7744); +const defaultOptions = { + // option "noSort" : if true it disables files sorting + noSort: false, + // read entries during load (initial loading may be slower) + readEntries: false, + // default method is none + method: Utils.Constants.NONE, + // file system + fs: null +}; -var isWin = /^win/.test(process.platform); +module.exports = function (/**String*/ input, /** object */ options) { + let inBuffer = null; -function canonical(p) { - var safeSuffix = pth.normalize(p).replace(/^(\.\.(\/|\\|$))+/, ''); - return pth.join('./', safeSuffix); -} + // create object based default options, allowing them to be overwritten + const opts = Object.assign(Object.create(null), defaultOptions); -module.exports = function (/**String*/input) { - var _zip = undefined, - _filename = ""; + // test input variable + if (input && "object" === typeof input) { + // if value is not buffer we accept it to be object with options + if (!(input instanceof Uint8Array)) { + Object.assign(opts, input); + input = opts.input ? opts.input : undefined; + if (opts.input) delete opts.input; + } - if (input && typeof input === "string") { // load zip file - if (fs.existsSync(input)) { - _filename = input; - _zip = new ZipFile(input, Utils.Constants.FILE); - } else { - throw new Error(Utils.Errors.INVALID_FILENAME); - } - } else if (input && Buffer.isBuffer(input)) { // load buffer - _zip = new ZipFile(input, Utils.Constants.BUFFER); - } else { // create new zip file - _zip = new ZipFile(null, Utils.Constants.NONE); - } - - function sanitize(prefix, name) { - prefix = pth.resolve(pth.normalize(prefix)); - var parts = name.split('/'); - for (var i = 0, l = parts.length; i < l; i++) { - var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep))); - if (path.indexOf(prefix) === 0) { - return path; - } - } - return pth.normalize(pth.join(prefix, pth.basename(name))); - } - - function getEntry(/**Object*/entry) { - if (entry && _zip) { - var item; - // If entry was given as a file name - if (typeof entry === "string") - item = _zip.getEntry(entry); - // if entry was given as a ZipEntry object - if (typeof entry === "object" && typeof entry.entryName !== "undefined" && typeof entry.header !== "undefined") - item = _zip.getEntry(entry.entryName); - - if (item) { - return item; - } - } - return null; - } - - function fixPath(zipPath){ - // convert windows file separators - zipPath = zipPath.split("\\").join("/"); - // add separator if it wasnt given - if (zipPath.charAt(zipPath.length - 1) !== "/") { - zipPath += "/"; - } - return zipPath; + // if input is buffer + if (Buffer.isBuffer(input)) { + inBuffer = input; + opts.method = Utils.Constants.BUFFER; + input = undefined; + } } - return { - /** - * Extracts the given entry from the archive and returns the content as a Buffer object - * @param entry ZipEntry object or String with the full path of the entry - * - * @return Buffer or Null in case of error - */ - readFile: function (/**Object*/entry, /*String, Buffer*/pass) { - var item = getEntry(entry); - return item && item.getData(pass) || null; - }, + // assign options + Object.assign(opts, options); - /** - * Asynchronous readFile - * @param entry ZipEntry object or String with the full path of the entry - * @param callback - * - * @return Buffer or Null in case of error - */ - readFileAsync: function (/**Object*/entry, /**Function*/callback) { - var item = getEntry(entry); - if (item) { - item.getDataAsync(callback); - } else { - callback(null, "getEntry failed for:" + entry) - } - }, + // instanciate utils filesystem + const filetools = new Utils(opts); - /** - * Extracts the given entry from the archive and returns the content as plain text in the given encoding - * @param entry ZipEntry object or String with the full path of the entry - * @param encoding Optional. If no encoding is specified utf8 is used - * - * @return String - */ - readAsText: function (/**Object*/entry, /**String=*/encoding) { - var item = getEntry(entry); - if (item) { - var data = item.getData(); - if (data && data.length) { - return data.toString(encoding || "utf8"); - } - } - return ""; - }, + // if input is file name we retrieve its content + if (input && "string" === typeof input) { + // load zip file + if (filetools.fs.existsSync(input)) { + opts.method = Utils.Constants.FILE; + opts.filename = input; + inBuffer = filetools.fs.readFileSync(input); + } else { + throw new Error(Utils.Errors.INVALID_FILENAME); + } + } - /** - * Asynchronous readAsText - * @param entry ZipEntry object or String with the full path of the entry - * @param callback - * @param encoding Optional. If no encoding is specified utf8 is used - * - * @return String - */ - readAsTextAsync: function (/**Object*/entry, /**Function*/callback, /**String=*/encoding) { - var item = getEntry(entry); - if (item) { - item.getDataAsync(function (data, err) { - if (err) { - callback(data, err); - return; - } + // create variable + const _zip = new ZipFile(inBuffer, opts); - if (data && data.length) { - callback(data.toString(encoding || "utf8")); - } else { - callback(""); - } - }) - } else { - callback(""); - } - }, + const { canonical, sanitize } = Utils; - /** - * Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory - * - * @param entry - */ - deleteFile: function (/**Object*/entry) { // @TODO: test deleteFile - var item = getEntry(entry); - if (item) { - _zip.deleteEntry(item.entryName); - } - }, + function getEntry(/**Object*/ entry) { + if (entry && _zip) { + var item; + // If entry was given as a file name + if (typeof entry === "string") item = _zip.getEntry(entry); + // if entry was given as a ZipEntry object + if (typeof entry === "object" && typeof entry.entryName !== "undefined" && typeof entry.header !== "undefined") item = _zip.getEntry(entry.entryName); - /** - * Adds a comment to the zip. The zip must be rewritten after adding the comment. - * - * @param comment - */ - addZipComment: function (/**String*/comment) { // @TODO: test addZipComment - _zip.comment = comment; - }, + if (item) { + return item; + } + } + return null; + } - /** - * Returns the zip comment - * - * @return String - */ - getZipComment: function () { - return _zip.comment || ''; - }, + function fixPath(zipPath) { + const { join, normalize, sep } = pth.posix; + // convert windows file separators and normalize + return join(".", normalize(sep + zipPath.split("\\").join(sep) + sep)); + } - /** - * Adds a comment to a specified zipEntry. The zip must be rewritten after adding the comment - * The comment cannot exceed 65535 characters in length - * - * @param entry - * @param comment - */ - addZipEntryComment: function (/**Object*/entry, /**String*/comment) { - var item = getEntry(entry); - if (item) { - item.comment = comment; - } - }, + return { + /** + * Extracts the given entry from the archive and returns the content as a Buffer object + * @param entry ZipEntry object or String with the full path of the entry + * + * @return Buffer or Null in case of error + */ + readFile: function (/**Object*/ entry, /*String, Buffer*/ pass) { + var item = getEntry(entry); + return (item && item.getData(pass)) || null; + }, - /** - * Returns the comment of the specified entry - * - * @param entry - * @return String - */ - getZipEntryComment: function (/**Object*/entry) { - var item = getEntry(entry); - if (item) { - return item.comment || ''; - } - return '' - }, + /** + * Asynchronous readFile + * @param entry ZipEntry object or String with the full path of the entry + * @param callback + * + * @return Buffer or Null in case of error + */ + readFileAsync: function (/**Object*/ entry, /**Function*/ callback) { + var item = getEntry(entry); + if (item) { + item.getDataAsync(callback); + } else { + callback(null, "getEntry failed for:" + entry); + } + }, - /** - * Updates the content of an existing entry inside the archive. The zip must be rewritten after updating the content - * - * @param entry - * @param content - */ - updateFile: function (/**Object*/entry, /**Buffer*/content) { - var item = getEntry(entry); - if (item) { - item.setData(content); - } - }, + /** + * Extracts the given entry from the archive and returns the content as plain text in the given encoding + * @param entry ZipEntry object or String with the full path of the entry + * @param encoding Optional. If no encoding is specified utf8 is used + * + * @return String + */ + readAsText: function (/**Object*/ entry, /**String=*/ encoding) { + var item = getEntry(entry); + if (item) { + var data = item.getData(); + if (data && data.length) { + return data.toString(encoding || "utf8"); + } + } + return ""; + }, - /** - * Adds a file from the disk to the archive - * - * @param localPath File to add to zip - * @param zipPath Optional path inside the zip - * @param zipName Optional name for the file - */ - addLocalFile: function (/**String*/localPath, /**String=*/zipPath, /**String=*/zipName, /**String*/comment) { - if (fs.existsSync(localPath)) { - // fix ZipPath - zipPath = (zipPath) ? fixPath(zipPath) : ""; + /** + * Asynchronous readAsText + * @param entry ZipEntry object or String with the full path of the entry + * @param callback + * @param encoding Optional. If no encoding is specified utf8 is used + * + * @return String + */ + readAsTextAsync: function (/**Object*/ entry, /**Function*/ callback, /**String=*/ encoding) { + var item = getEntry(entry); + if (item) { + item.getDataAsync(function (data, err) { + if (err) { + callback(data, err); + return; + } - // p - local file name - var p = localPath.split("\\").join("/").split("/").pop(); + if (data && data.length) { + callback(data.toString(encoding || "utf8")); + } else { + callback(""); + } + }); + } else { + callback(""); + } + }, - // add file name into zippath - zipPath += (zipName) ? zipName : p; + /** + * Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory + * + * @param entry + */ + deleteFile: function (/**Object*/ entry) { + // @TODO: test deleteFile + var item = getEntry(entry); + if (item) { + _zip.deleteEntry(item.entryName); + } + }, - // read file attributes - const _attr = fs.statSync(localPath); + /** + * Adds a comment to the zip. The zip must be rewritten after adding the comment. + * + * @param comment + */ + addZipComment: function (/**String*/ comment) { + // @TODO: test addZipComment + _zip.comment = comment; + }, - // add file into zip file - this.addFile(zipPath, fs.readFileSync(localPath), comment, _attr) - } else { - throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); - } - }, + /** + * Returns the zip comment + * + * @return String + */ + getZipComment: function () { + return _zip.comment || ""; + }, - /** - * Adds a local directory and all its nested files and directories to the archive - * - * @param localPath - * @param zipPath optional path inside zip - * @param filter optional RegExp or Function if files match will - * be included. - */ - addLocalFolder: function (/**String*/localPath, /**String=*/zipPath, /**=RegExp|Function*/filter) { + /** + * Adds a comment to a specified zipEntry. The zip must be rewritten after adding the comment + * The comment cannot exceed 65535 characters in length + * + * @param entry + * @param comment + */ + addZipEntryComment: function (/**Object*/ entry, /**String*/ comment) { + var item = getEntry(entry); + if (item) { + item.comment = comment; + } + }, + + /** + * Returns the comment of the specified entry + * + * @param entry + * @return String + */ + getZipEntryComment: function (/**Object*/ entry) { + var item = getEntry(entry); + if (item) { + return item.comment || ""; + } + return ""; + }, + + /** + * Updates the content of an existing entry inside the archive. The zip must be rewritten after updating the content + * + * @param entry + * @param content + */ + updateFile: function (/**Object*/ entry, /**Buffer*/ content) { + var item = getEntry(entry); + if (item) { + item.setData(content); + } + }, + + /** + * Adds a file from the disk to the archive + * + * @param localPath File to add to zip + * @param zipPath Optional path inside the zip + * @param zipName Optional name for the file + */ + addLocalFile: function (/**String*/ localPath, /**String=*/ zipPath, /**String=*/ zipName, /**String*/ comment) { + if (filetools.fs.existsSync(localPath)) { + // fix ZipPath + zipPath = zipPath ? fixPath(zipPath) : ""; + + // p - local file name + var p = localPath.split("\\").join("/").split("/").pop(); + + // add file name into zippath + zipPath += zipName ? zipName : p; + + // read file attributes + const _attr = filetools.fs.statSync(localPath); + + // add file into zip file + this.addFile(zipPath, filetools.fs.readFileSync(localPath), comment, _attr); + } else { + throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); + } + }, + + /** + * Adds a local directory and all its nested files and directories to the archive + * + * @param localPath + * @param zipPath optional path inside zip + * @param filter optional RegExp or Function if files match will + * be included. + */ + addLocalFolder: function (/**String*/ localPath, /**String=*/ zipPath, /**=RegExp|Function*/ filter) { // Prepare filter - if (filter instanceof RegExp) { // if filter is RegExp wrap it - filter = (function (rx){ + if (filter instanceof RegExp) { + // if filter is RegExp wrap it + filter = (function (rx) { return function (filename) { return rx.test(filename); - } + }; })(filter); - } else if ('function' !== typeof filter) { // if filter is not function we will replace it + } else if ("function" !== typeof filter) { + // if filter is not function we will replace it filter = function () { return true; }; } // fix ZipPath - zipPath = (zipPath) ? fixPath(zipPath) : ""; + zipPath = zipPath ? fixPath(zipPath) : ""; // normalize the path first localPath = pth.normalize(localPath); - if (fs.existsSync(localPath)) { - - var items = Utils.findFiles(localPath), - self = this; + if (filetools.fs.existsSync(localPath)) { + const items = filetools.findFiles(localPath); + const self = this; if (items.length) { items.forEach(function (filepath) { var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix if (filter(p)) { - if (filepath.charAt(filepath.length - 1) !== pth.sep) { - self.addFile(zipPath + p, fs.readFileSync(filepath), "", fs.statSync(filepath)); + var stats = filetools.fs.statSync(filepath); + if (stats.isFile()) { + self.addFile(zipPath + p, filetools.fs.readFileSync(filepath), "", stats); } else { - self.addFile(zipPath + p + '/', Buffer.alloc(0), "", 0); + self.addFile(zipPath + p + "/", Buffer.alloc(0), "", stats); } } }); @@ -8586,376 +9084,475 @@ module.exports = function (/**String*/input) { } }, - /** - * Asynchronous addLocalFile - * @param localPath - * @param callback - * @param zipPath optional path inside zip - * @param filter optional RegExp or Function if files match will - * be included. - */ - addLocalFolderAsync: function (/*String*/localPath, /*Function*/callback, /*String*/zipPath, /*RegExp|Function*/filter) { - if (filter === undefined) { - filter = function () { - return true; - }; - } else if (filter instanceof RegExp) { - filter = function (filter) { - return function (filename) { - return filter.test(filename); - } - }(filter); - } + /** + * Asynchronous addLocalFile + * @param localPath + * @param callback + * @param zipPath optional path inside zip + * @param filter optional RegExp or Function if files match will + * be included. + */ + addLocalFolderAsync: function (/*String*/ localPath, /*Function*/ callback, /*String*/ zipPath, /*RegExp|Function*/ filter) { + if (filter instanceof RegExp) { + filter = (function (rx) { + return function (filename) { + return rx.test(filename); + }; + })(filter); + } else if ("function" !== typeof filter) { + filter = function () { + return true; + }; + } - if (zipPath) { - zipPath = zipPath.split("\\").join("/"); - if (zipPath.charAt(zipPath.length - 1) !== "/") { - zipPath += "/"; - } - } else { - zipPath = ""; - } - // normalize the path first - localPath = pth.normalize(localPath); - localPath = localPath.split("\\").join("/"); //windows fix - if (localPath.charAt(localPath.length - 1) !== "/") - localPath += "/"; + // fix ZipPath + zipPath = zipPath ? fixPath(zipPath) : ""; - var self = this; - fs.open(localPath, 'r', function (err, fd) { - if (err && err.code === 'ENOENT') { - callback(undefined, Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); - } else if (err) { - callback(undefined, err); - } else { - var items = Utils.findFiles(localPath); - var i = -1; + // normalize the path first + localPath = pth.normalize(localPath); - var next = function () { - i += 1; - if (i < items.length) { - var p = items[i].split("\\").join("/").replace(new RegExp(localPath.replace(/(\(|\))/g, '\\$1'), 'i'), ""); //windows fix - p = p.normalize('NFD').replace(/[\u0300-\u036f]/g, '').replace(/[^\x20-\x7E]/g, '') // accent fix - if (filter(p)) { - if (p.charAt(p.length - 1) !== "/") { - fs.readFile(items[i], function (err, data) { - if (err) { - callback(undefined, err); - } else { - self.addFile(zipPath + p, data, '', 0); - next(); - } - }) - } else { - self.addFile(zipPath + p, Buffer.alloc(0), "", 0); - next(); - } - } else { - next(); - } + var self = this; + filetools.fs.open(localPath, "r", function (err) { + if (err && err.code === "ENOENT") { + callback(undefined, Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); + } else if (err) { + callback(undefined, err); + } else { + var items = filetools.findFiles(localPath); + var i = -1; - } else { - callback(true, undefined); - } - } + var next = function () { + i += 1; + if (i < items.length) { + var filepath = items[i]; + var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix + p = p + .normalize("NFD") + .replace(/[\u0300-\u036f]/g, "") + .replace(/[^\x20-\x7E]/g, ""); // accent fix + if (filter(p)) { + filetools.fs.stat(filepath, function (er0, stats) { + if (er0) callback(undefined, er0); + if (stats.isFile()) { + filetools.fs.readFile(filepath, function (er1, data) { + if (er1) { + callback(undefined, er1); + } else { + self.addFile(zipPath + p, data, "", stats); + next(); + } + }); + } else { + self.addFile(zipPath + p + "/", Buffer.alloc(0), "", stats); + next(); + } + }); + } else { + next(); + } + } else { + callback(true, undefined); + } + }; - next(); - } - }); - }, + next(); + } + }); + }, - /** - * Allows you to create a entry (file or directory) in the zip file. - * If you want to create a directory the entryName must end in / and a null buffer should be provided. - * Comment and attributes are optional - * - * @param entryName - * @param content - * @param comment - * @param attr - */ - addFile: function (/**String*/entryName, /**Buffer*/content, /**String*/comment, /**Number*/attr) { - // prepare new entry - var entry = new ZipEntry(); - entry.entryName = entryName; - entry.comment = comment || ""; + /** + * + * @param {string} localPath - path where files will be extracted + * @param {object} props - optional properties + * @param {string} props.zipPath - optional path inside zip + * @param {regexp, function} props.filter - RegExp or Function if files match will be included. + */ + addLocalFolderPromise: function (/*String*/ localPath, /* object */ props) { + return new Promise((resolve, reject) => { + const { filter, zipPath } = Object.assign({}, props); + this.addLocalFolderAsync( + localPath, + (done, err) => { + if (err) reject(err); + if (done) resolve(this); + }, + zipPath, + filter + ); + }); + }, - var isStat = ('object' === typeof attr) && (attr instanceof fs.Stats); + /** + * Allows you to create a entry (file or directory) in the zip file. + * If you want to create a directory the entryName must end in / and a null buffer should be provided. + * Comment and attributes are optional + * + * @param {string} entryName + * @param {Buffer | string} content - file content as buffer or utf8 coded string + * @param {string} comment - file comment + * @param {number | object} attr - number as unix file permissions, object as filesystem Stats object + */ + addFile: function (/**String*/ entryName, /**Buffer*/ content, /**String*/ comment, /**Number*/ attr) { + let entry = getEntry(entryName); + const update = entry != null; - // last modification time from file stats - if (isStat){ - entry.header.time = attr.mtime; - } + // prepare new entry + if (!update) { + entry = new ZipEntry(); + entry.entryName = entryName; + } + entry.comment = comment || ""; - // Set file attribute - var fileattr = (entry.isDirectory) ? 0x10 : 0; // (MS-DOS directory flag) + const isStat = "object" === typeof attr && attr instanceof filetools.fs.Stats; - // extended attributes field for Unix - if('win32' !== process.platform){ - // set file type either S_IFDIR / S_IFREG - var unix = (entry.isDirectory) ? 0x4000 : 0x8000; + // last modification time from file stats + if (isStat) { + entry.header.time = attr.mtime; + } - if (isStat) { // File attributes from file stats - unix |= (0xfff & attr.mode) - }else if ('number' === typeof attr){ // attr from given attr values - unix |= (0xfff & attr); - }else{ // Default values: - unix |= (entry.isDirectory) ? 0o755 : 0o644; // permissions (drwxr-xr-x) or (-r-wr--r--) - } + // Set file attribute + var fileattr = entry.isDirectory ? 0x10 : 0; // (MS-DOS directory flag) - fileattr = (fileattr | (unix << 16)) >>> 0; // add attributes - } + // extended attributes field for Unix + if (!Utils.isWin) { + // set file type either S_IFDIR / S_IFREG + let unix = entry.isDirectory ? 0x4000 : 0x8000; - entry.attr = fileattr; + if (isStat) { + // File attributes from file stats + unix |= 0xfff & attr.mode; + } else if ("number" === typeof attr) { + // attr from given attr values + unix |= 0xfff & attr; + } else { + // Default values: + unix |= entry.isDirectory ? 0o755 : 0o644; // permissions (drwxr-xr-x) or (-r-wr--r--) + } - entry.setData(content); - _zip.setEntry(entry); - }, + fileattr = (fileattr | (unix << 16)) >>> 0; // add attributes + } - /** - * Returns an array of ZipEntry objects representing the files and folders inside the archive - * - * @return Array - */ - getEntries: function () { - if (_zip) { - return _zip.entries; - } else { - return []; - } - }, + entry.attr = fileattr; - /** - * Returns a ZipEntry object representing the file or folder specified by ``name``. - * - * @param name - * @return ZipEntry - */ - getEntry: function (/**String*/name) { - return getEntry(name); - }, + entry.setData(content); + if (!update) _zip.setEntry(entry); + }, - getEntryCount: function() { - return _zip.getEntryCount(); - }, + /** + * Returns an array of ZipEntry objects representing the files and folders inside the archive + * + * @return Array + */ + getEntries: function () { + return _zip ? _zip.entries : []; + }, - forEach: function(callback) { - return _zip.forEach(callback); - }, + /** + * Returns a ZipEntry object representing the file or folder specified by ``name``. + * + * @param name + * @return ZipEntry + */ + getEntry: function (/**String*/ name) { + return getEntry(name); + }, - /** - * Extracts the given entry to the given targetPath - * If the entry is a directory inside the archive, the entire directory and it's subdirectories will be extracted - * - * @param entry ZipEntry object or String with the full path of the entry - * @param targetPath Target folder where to write the file - * @param maintainEntryPath If maintainEntryPath is true and the entry is inside a folder, the entry folder - * will be created in targetPath as well. Default is TRUE - * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. - * Default is FALSE + getEntryCount: function () { + return _zip.getEntryCount(); + }, + + forEach: function (callback) { + return _zip.forEach(callback); + }, + + /** + * Extracts the given entry to the given targetPath + * If the entry is a directory inside the archive, the entire directory and it's subdirectories will be extracted + * + * @param entry ZipEntry object or String with the full path of the entry + * @param targetPath Target folder where to write the file + * @param maintainEntryPath If maintainEntryPath is true and the entry is inside a folder, the entry folder + * will be created in targetPath as well. Default is TRUE + * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. + * Default is FALSE + * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. + * Default is FALSE * @param outFileName String If set will override the filename of the extracted file (Only works if the entry is a file) - * - * @return Boolean - */ - extractEntryTo: function (/**Object*/entry, /**String*/targetPath, /**Boolean*/maintainEntryPath, /**Boolean*/overwrite, /**String**/outFileName) { - overwrite = overwrite || false; - maintainEntryPath = typeof maintainEntryPath === "undefined" ? true : maintainEntryPath; + * + * @return Boolean + */ + extractEntryTo: function ( + /**Object*/ entry, + /**String*/ targetPath, + /**Boolean*/ maintainEntryPath, + /**Boolean*/ overwrite, + /**Boolean*/ keepOriginalPermission, + /**String**/ outFileName + ) { + overwrite = get_Bool(overwrite, false); + keepOriginalPermission = get_Bool(keepOriginalPermission, false); + maintainEntryPath = get_Bool(maintainEntryPath, true); + outFileName = get_Str(outFileName, get_Str(keepOriginalPermission, undefined)); - var item = getEntry(entry); - if (!item) { - throw new Error(Utils.Errors.NO_ENTRY); - } + var item = getEntry(entry); + if (!item) { + throw new Error(Utils.Errors.NO_ENTRY); + } - var entryName = canonical(item.entryName); + var entryName = canonical(item.entryName); - var target = sanitize(targetPath,outFileName && !item.isDirectory ? outFileName : (maintainEntryPath ? entryName : pth.basename(entryName))); + var target = sanitize(targetPath, outFileName && !item.isDirectory ? outFileName : maintainEntryPath ? entryName : pth.basename(entryName)); - if (item.isDirectory) { - target = pth.resolve(target, ".."); - var children = _zip.getEntryChildren(item); - children.forEach(function (child) { - if (child.isDirectory) return; - var content = child.getData(); - if (!content) { - throw new Error(Utils.Errors.CANT_EXTRACT_FILE); - } - var name = canonical(child.entryName) - var childName = sanitize(targetPath, maintainEntryPath ? name : pth.basename(name)); + if (item.isDirectory) { + var children = _zip.getEntryChildren(item); + children.forEach(function (child) { + if (child.isDirectory) return; + var content = child.getData(); + if (!content) { + throw new Error(Utils.Errors.CANT_EXTRACT_FILE); + } + var name = canonical(child.entryName); + var childName = sanitize(targetPath, maintainEntryPath ? name : pth.basename(name)); + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? child.header.fileAttr : undefined; + filetools.writeFileTo(childName, content, overwrite, fileAttr); + }); + return true; + } - Utils.writeFileTo(childName, content, overwrite); - }); - return true; - } + var content = item.getData(); + if (!content) throw new Error(Utils.Errors.CANT_EXTRACT_FILE); - var content = item.getData(); - if (!content) throw new Error(Utils.Errors.CANT_EXTRACT_FILE); + if (filetools.fs.existsSync(target) && !overwrite) { + throw new Error(Utils.Errors.CANT_OVERRIDE); + } + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + filetools.writeFileTo(target, content, overwrite, fileAttr); - if (fs.existsSync(target) && !overwrite) { - throw new Error(Utils.Errors.CANT_OVERRIDE); - } - Utils.writeFileTo(target, content, overwrite); + return true; + }, - return true; - }, + /** + * Test the archive + * + */ + test: function (pass) { + if (!_zip) { + return false; + } - /** - * Test the archive - * - */ - test: function (pass) { - if (!_zip) { - return false; - } + for (var entry in _zip.entries) { + try { + if (entry.isDirectory) { + continue; + } + var content = _zip.entries[entry].getData(pass); + if (!content) { + return false; + } + } catch (err) { + return false; + } + } + return true; + }, - for (var entry in _zip.entries) { - try { - if (entry.isDirectory) { - continue; - } - var content = _zip.entries[entry].getData(pass); - if (!content) { - return false; - } - } catch (err) { - return false; - } - } - return true; - }, + /** + * Extracts the entire archive to the given location + * + * @param targetPath Target location + * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. + * Default is FALSE + * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. + * Default is FALSE + */ + extractAllTo: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /*String, Buffer*/ pass) { + overwrite = get_Bool(overwrite, false); + pass = get_Str(keepOriginalPermission, pass); + keepOriginalPermission = get_Bool(keepOriginalPermission, false); + if (!_zip) { + throw new Error(Utils.Errors.NO_ZIP); + } + _zip.entries.forEach(function (entry) { + var entryName = sanitize(targetPath, canonical(entry.entryName.toString())); + if (entry.isDirectory) { + filetools.makeDir(entryName); + return; + } + var content = entry.getData(pass); + if (!content) { + throw new Error(Utils.Errors.CANT_EXTRACT_FILE); + } + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + filetools.writeFileTo(entryName, content, overwrite, fileAttr); + try { + filetools.fs.utimesSync(entryName, entry.header.time, entry.header.time); + } catch (err) { + throw new Error(Utils.Errors.CANT_EXTRACT_FILE); + } + }); + }, - /** - * Extracts the entire archive to the given location - * - * @param targetPath Target location - * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. - * Default is FALSE - */ - extractAllTo: function (/**String*/targetPath, /**Boolean*/overwrite, /*String, Buffer*/pass) { - overwrite = overwrite || false; - if (!_zip) { - throw new Error(Utils.Errors.NO_ZIP); - } - _zip.entries.forEach(function (entry) { - var entryName = sanitize(targetPath, canonical(entry.entryName.toString())); - if (entry.isDirectory) { - Utils.makeDir(entryName); - return; - } - var content = entry.getData(pass); - if (!content) { - throw new Error(Utils.Errors.CANT_EXTRACT_FILE); - } - Utils.writeFileTo(entryName, content, overwrite); - try { - fs.utimesSync(entryName, entry.header.time, entry.header.time) - } catch (err) { - throw new Error(Utils.Errors.CANT_EXTRACT_FILE); - } - }) - }, + /** + * Asynchronous extractAllTo + * + * @param targetPath Target location + * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. + * Default is FALSE + * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. + * Default is FALSE + * @param callback The callback will be executed when all entries are extracted successfully or any error is thrown. + */ + extractAllToAsync: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /**Function*/ callback) { + if (!callback) { + callback = function () {}; + } + overwrite = get_Bool(overwrite, false); + if (typeof keepOriginalPermission === "function" && !callback) callback = keepOriginalPermission; + keepOriginalPermission = get_Bool(keepOriginalPermission, false); + if (!_zip) { + callback(new Error(Utils.Errors.NO_ZIP)); + return; + } - /** - * Asynchronous extractAllTo - * - * @param targetPath Target location - * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. - * Default is FALSE - * @param callback - */ - extractAllToAsync: function (/**String*/targetPath, /**Boolean*/overwrite, /**Function*/callback) { - if (!callback) { - callback = function() {} - } - overwrite = overwrite || false; - if (!_zip) { - callback(new Error(Utils.Errors.NO_ZIP)); - return; - } + targetPath = pth.resolve(targetPath); + // convert entryName to + const getPath = (entry) => sanitize(targetPath, pth.normalize(canonical(entry.entryName.toString()))); + const getError = (msg, file) => new Error(msg + ': "' + file + '"'); - var entries = _zip.entries; - var i = entries.length; - entries.forEach(function (entry) { - if (i <= 0) return; // Had an error already + // separate directories from files + const dirEntries = []; + const fileEntries = new Set(); + _zip.entries.forEach((e) => { + if (e.isDirectory) { + dirEntries.push(e); + } else { + fileEntries.add(e); + } + }); - var entryName = pth.normalize(canonical(entry.entryName.toString())); + // Create directory entries first synchronously + // this prevents race condition and assures folders are there before writing files + for (const entry of dirEntries) { + const dirPath = getPath(entry); + // The reverse operation for attr depend on method addFile() + const dirAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + try { + filetools.makeDir(dirPath); + if (dirAttr) filetools.fs.chmodSync(dirPath, dirAttr); + // in unix timestamp will change if files are later added to folder, but still + filetools.fs.utimesSync(dirPath, entry.header.time, entry.header.time); + } catch (er) { + callback(getError("Unable to create folder", dirPath)); + } + } - if (entry.isDirectory) { - Utils.makeDir(sanitize(targetPath, entryName)); - if (--i === 0) - callback(undefined); - return; - } - entry.getDataAsync(function (content, err) { - if (i <= 0) return; - if (err) { - callback(new Error(err)); - return; - } - if (!content) { - i = 0; - callback(new Error(Utils.Errors.CANT_EXTRACT_FILE)); - return; - } + // callback wrapper, for some house keeping + const done = () => { + if (fileEntries.size === 0) { + callback(); + } + }; - Utils.writeFileToAsync(sanitize(targetPath, entryName), content, overwrite, function (succ) { - try { - fs.utimesSync(pth.resolve(targetPath, entryName), entry.header.time, entry.header.time); - } catch (err) { - callback(new Error('Unable to set utimes')); - } - if (i <= 0) return; - if (!succ) { - i = 0; - callback(new Error('Unable to write')); - return; - } - if (--i === 0) - callback(undefined); - }); - }); - }) - }, + // Extract file entries asynchronously + for (const entry of fileEntries.values()) { + const entryName = pth.normalize(canonical(entry.entryName.toString())); + const filePath = sanitize(targetPath, entryName); + entry.getDataAsync(function (content, err_1) { + if (err_1) { + callback(new Error(err_1)); + return; + } + if (!content) { + callback(new Error(Utils.Errors.CANT_EXTRACT_FILE)); + } else { + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + filetools.writeFileToAsync(filePath, content, overwrite, fileAttr, function (succ) { + if (!succ) { + callback(getError("Unable to write file", filePath)); + return; + } + filetools.fs.utimes(filePath, entry.header.time, entry.header.time, function (err_2) { + if (err_2) { + callback(getError("Unable to set times", filePath)); + return; + } + fileEntries.delete(entry); + // call the callback if it was last entry + done(); + }); + }); + } + }); + } + // call the callback if fileEntries was empty + done(); + }, - /** - * Writes the newly created zip file to disk at the specified location or if a zip was opened and no ``targetFileName`` is provided, it will overwrite the opened zip - * - * @param targetFileName - * @param callback - */ - writeZip: function (/**String*/targetFileName, /**Function*/callback) { - if (arguments.length === 1) { - if (typeof targetFileName === "function") { - callback = targetFileName; - targetFileName = ""; - } - } + /** + * Writes the newly created zip file to disk at the specified location or if a zip was opened and no ``targetFileName`` is provided, it will overwrite the opened zip + * + * @param targetFileName + * @param callback + */ + writeZip: function (/**String*/ targetFileName, /**Function*/ callback) { + if (arguments.length === 1) { + if (typeof targetFileName === "function") { + callback = targetFileName; + targetFileName = ""; + } + } - if (!targetFileName && _filename) { - targetFileName = _filename; - } - if (!targetFileName) return; + if (!targetFileName && opts.filename) { + targetFileName = opts.filename; + } + if (!targetFileName) return; - var zipData = _zip.compressToBuffer(); - if (zipData) { - var ok = Utils.writeFileTo(targetFileName, zipData, true); - if (typeof callback === 'function') callback(!ok ? new Error("failed") : null, ""); - } - }, + var zipData = _zip.compressToBuffer(); + if (zipData) { + var ok = filetools.writeFileTo(targetFileName, zipData, true); + if (typeof callback === "function") callback(!ok ? new Error("failed") : null, ""); + } + }, - /** - * Returns the content of the entire zip file as a Buffer object - * - * @return Buffer - */ - toBuffer: function (/**Function=*/onSuccess, /**Function=*/onFail, /**Function=*/onItemStart, /**Function=*/onItemEnd) { - this.valueOf = 2; - if (typeof onSuccess === "function") { - _zip.toAsyncBuffer(onSuccess, onFail, onItemStart, onItemEnd); - return null; - } - return _zip.compressToBuffer() - } - } + writeZipPromise: function (/**String*/ targetFileName, /* object */ props) { + const { overwrite, perm } = Object.assign({ overwrite: true }, props); + + return new Promise((resolve, reject) => { + // find file name + if (!targetFileName && opts.filename) targetFileName = opts.filename; + if (!targetFileName) reject("ADM-ZIP: ZIP File Name Missing"); + + this.toBufferPromise().then((zipData) => { + const ret = (done) => (done ? resolve(done) : reject("ADM-ZIP: Wasn't able to write zip file")); + filetools.writeFileToAsync(targetFileName, zipData, overwrite, perm, ret); + }, reject); + }); + }, + + toBufferPromise: function () { + return new Promise((resolve, reject) => { + _zip.toAsyncBuffer(resolve, reject); + }); + }, + + /** + * Returns the content of the entire zip file as a Buffer object + * + * @return Buffer + */ + toBuffer: function (/**Function=*/ onSuccess, /**Function=*/ onFail, /**Function=*/ onItemStart, /**Function=*/ onItemEnd) { + this.valueOf = 2; + if (typeof onSuccess === "function") { + _zip.toAsyncBuffer(onSuccess, onFail, onItemStart, onItemEnd); + return null; + } + return _zip.compressToBuffer(); + } + }; }; @@ -8969,8 +9566,8 @@ var Utils = __nccwpck_require__(5182), /* The central directory file header */ module.exports = function () { - var _verMade = 0x14, - _version = 0x0A, + var _verMade = 20, // v2.0 + _version = 10, // v1.0 _flags = 0, _method = 0, _time = 0, @@ -8979,51 +9576,61 @@ module.exports = function () { _size = 0, _fnameLen = 0, _extraLen = 0, - _comLen = 0, _diskStart = 0, _inattr = 0, _attr = 0, _offset = 0; - switch(process.platform){ - case 'win32': - _verMade |= 0x0A00; - case 'darwin': - _verMade |= 0x1300; - default: - _verMade |= 0x0300; - } + _verMade |= Utils.isWin ? 0x0a00 : 0x0300; + + // Set EFS flag since filename and comment fields are all by default encoded using UTF-8. + // Without it file names may be corrupted for other apps when file names use unicode chars + _flags |= Constants.FLG_EFS; var _dataHeader = {}; function setTime(val) { val = new Date(val); - _time = (val.getFullYear() - 1980 & 0x7f) << 25 // b09-16 years from 1980 - | (val.getMonth() + 1) << 21 // b05-08 month - | val.getDate() << 16 // b00-04 hour - + _time = + (((val.getFullYear() - 1980) & 0x7f) << 25) | // b09-16 years from 1980 + ((val.getMonth() + 1) << 21) | // b05-08 month + (val.getDate() << 16) | // b00-04 hour // 2 bytes time - | val.getHours() << 11 // b11-15 hour - | val.getMinutes() << 5 // b05-10 minute - | val.getSeconds() >> 1; // b00-04 seconds divided by 2 + (val.getHours() << 11) | // b11-15 hour + (val.getMinutes() << 5) | // b05-10 minute + (val.getSeconds() >> 1); // b00-04 seconds divided by 2 } setTime(+new Date()); return { - get made () { return _verMade; }, - set made (val) { _verMade = val; }, + get made() { + return _verMade; + }, + set made(val) { + _verMade = val; + }, - get version () { return _version; }, - set version (val) { _version = val }, + get version() { + return _version; + }, + set version(val) { + _version = val; + }, - get flags () { return _flags }, - set flags (val) { _flags = val; }, + get flags() { + return _flags; + }, + set flags(val) { + _flags = val; + }, - get method () { return _method; }, - set method (val) { - switch (val){ + get method() { + return _method; + }, + set method(val) { + switch (val) { case Constants.STORED: this.version = 10; case Constants.DEFLATED: @@ -9031,66 +9638,107 @@ module.exports = function () { this.version = 20; } _method = val; - }, - - get time () { return new Date( - ((_time >> 25) & 0x7f) + 1980, - ((_time >> 21) & 0x0f) - 1, - (_time >> 16) & 0x1f, - (_time >> 11) & 0x1f, - (_time >> 5) & 0x3f, - (_time & 0x1f) << 1 - ); }, - set time (val) { + + get time() { + return new Date(((_time >> 25) & 0x7f) + 1980, ((_time >> 21) & 0x0f) - 1, (_time >> 16) & 0x1f, (_time >> 11) & 0x1f, (_time >> 5) & 0x3f, (_time & 0x1f) << 1); + }, + set time(val) { setTime(val); }, - get crc () { return _crc; }, - set crc (val) { _crc = val; }, + get crc() { + return _crc; + }, + set crc(val) { + _crc = Math.max(0, val) >>> 0; + }, - get compressedSize () { return _compressedSize; }, - set compressedSize (val) { _compressedSize = val; }, + get compressedSize() { + return _compressedSize; + }, + set compressedSize(val) { + _compressedSize = Math.max(0, val) >>> 0; + }, - get size () { return _size; }, - set size (val) { _size = val; }, + get size() { + return _size; + }, + set size(val) { + _size = Math.max(0, val) >>> 0; + }, - get fileNameLength () { return _fnameLen; }, - set fileNameLength (val) { _fnameLen = val; }, + get fileNameLength() { + return _fnameLen; + }, + set fileNameLength(val) { + _fnameLen = val; + }, - get extraLength () { return _extraLen }, - set extraLength (val) { _extraLen = val; }, + get extraLength() { + return _extraLen; + }, + set extraLength(val) { + _extraLen = val; + }, - get commentLength () { return _comLen }, - set commentLength (val) { _comLen = val }, + get commentLength() { + return _comLen; + }, + set commentLength(val) { + _comLen = val; + }, - get diskNumStart () { return _diskStart }, - set diskNumStart (val) { _diskStart = val }, + get diskNumStart() { + return _diskStart; + }, + set diskNumStart(val) { + _diskStart = Math.max(0, val) >>> 0; + }, - get inAttr () { return _inattr }, - set inAttr (val) { _inattr = val }, + get inAttr() { + return _inattr; + }, + set inAttr(val) { + _inattr = Math.max(0, val) >>> 0; + }, - get attr () { return _attr }, - set attr (val) { _attr = val }, + get attr() { + return _attr; + }, + set attr(val) { + _attr = Math.max(0, val) >>> 0; + }, - get offset () { return _offset }, - set offset (val) { _offset = val }, + // get Unix file permissions + get fileAttr() { + return _attr ? (((_attr >>> 0) | 0) >> 16) & 0xfff : 0; + }, - get encripted () { return (_flags & 1) === 1 }, + get offset() { + return _offset; + }, + set offset(val) { + _offset = Math.max(0, val) >>> 0; + }, - get entryHeaderSize () { + get encripted() { + return (_flags & 1) === 1; + }, + + get entryHeaderSize() { return Constants.CENHDR + _fnameLen + _extraLen + _comLen; }, - get realDataOffset () { + get realDataOffset() { return _offset + Constants.LOCHDR + _dataHeader.fnameLen + _dataHeader.extraLen; }, - get dataHeader () { + get dataHeader() { return _dataHeader; }, - loadDataHeaderFromBinary : function(/*Buffer*/input) { + loadDataHeaderFromBinary: function (/*Buffer*/ input) { var data = input.slice(_offset, _offset + Constants.LOCHDR); // 30 bytes and should start with "PK\003\004" if (data.readUInt32LE(0) !== Constants.LOCSIG) { @@ -9098,27 +9746,27 @@ module.exports = function () { } _dataHeader = { // version needed to extract - version : data.readUInt16LE(Constants.LOCVER), + version: data.readUInt16LE(Constants.LOCVER), // general purpose bit flag - flags : data.readUInt16LE(Constants.LOCFLG), + flags: data.readUInt16LE(Constants.LOCFLG), // compression method - method : data.readUInt16LE(Constants.LOCHOW), + method: data.readUInt16LE(Constants.LOCHOW), // modification time (2 bytes time, 2 bytes date) - time : data.readUInt32LE(Constants.LOCTIM), + time: data.readUInt32LE(Constants.LOCTIM), // uncompressed file crc-32 value - crc : data.readUInt32LE(Constants.LOCCRC), + crc: data.readUInt32LE(Constants.LOCCRC), // compressed size - compressedSize : data.readUInt32LE(Constants.LOCSIZ), + compressedSize: data.readUInt32LE(Constants.LOCSIZ), // uncompressed size - size : data.readUInt32LE(Constants.LOCLEN), + size: data.readUInt32LE(Constants.LOCLEN), // filename length - fnameLen : data.readUInt16LE(Constants.LOCNAM), + fnameLen: data.readUInt16LE(Constants.LOCNAM), // extra field length - extraLen : data.readUInt16LE(Constants.LOCEXT) - } + extraLen: data.readUInt16LE(Constants.LOCEXT) + }; }, - loadFromBinary : function(/*Buffer*/data) { + loadFromBinary: function (/*Buffer*/ data) { // data should be 46 bytes and start with "PK 01 02" if (data.length !== Constants.CENHDR || data.readUInt32LE(0) !== Constants.CENSIG) { throw new Error(Utils.Errors.INVALID_CEN); @@ -9155,7 +9803,7 @@ module.exports = function () { _offset = data.readUInt32LE(Constants.CENOFF); }, - dataHeaderToBinary : function() { + dataHeaderToBinary: function () { // LOC header size (30 bytes) var data = Buffer.alloc(Constants.LOCHDR); // "PK\003\004" @@ -9181,7 +9829,7 @@ module.exports = function () { return data; }, - entryHeaderToBinary : function() { + entryHeaderToBinary: function () { // CEN header size (46 bytes) var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen); // "PK\001\002" @@ -9221,27 +9869,35 @@ module.exports = function () { return data; }, - toString : function() { - return '{\n' + - '\t"made" : ' + _verMade + ",\n" + - '\t"version" : ' + _version + ",\n" + - '\t"flags" : ' + _flags + ",\n" + - '\t"method" : ' + Utils.methodToString(_method) + ",\n" + - '\t"time" : ' + this.time + ",\n" + - '\t"crc" : 0x' + _crc.toString(16).toUpperCase() + ",\n" + - '\t"compressedSize" : ' + _compressedSize + " bytes,\n" + - '\t"size" : ' + _size + " bytes,\n" + - '\t"fileNameLength" : ' + _fnameLen + ",\n" + - '\t"extraLength" : ' + _extraLen + " bytes,\n" + - '\t"commentLength" : ' + _comLen + " bytes,\n" + - '\t"diskNumStart" : ' + _diskStart + ",\n" + - '\t"inAttr" : ' + _inattr + ",\n" + - '\t"attr" : ' + _attr + ",\n" + - '\t"offset" : ' + _offset + ",\n" + - '\t"entryHeaderSize" : ' + (Constants.CENHDR + _fnameLen + _extraLen + _comLen) + " bytes\n" + - '}'; + toJSON: function () { + const bytes = function (nr) { + return nr + " bytes"; + }; + + return { + made: _verMade, + version: _version, + flags: _flags, + method: Utils.methodToString(_method), + time: this.time, + crc: "0x" + _crc.toString(16).toUpperCase(), + compressedSize: bytes(_compressedSize), + size: bytes(_size), + fileNameLength: bytes(_fnameLen), + extraLength: bytes(_extraLen), + commentLength: bytes(_comLen), + diskNumStart: _diskStart, + inAttr: _inattr, + attr: _attr, + offset: _offset, + entryHeaderSize: bytes(Constants.CENHDR + _fnameLen + _extraLen + _comLen) + }; + }, + + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); } - } + }; }; @@ -9271,31 +9927,52 @@ module.exports = function () { _commentLength = 0; return { - get diskEntries () { return _volumeEntries }, - set diskEntries (/*Number*/val) { _volumeEntries = _totalEntries = val; }, + get diskEntries() { + return _volumeEntries; + }, + set diskEntries(/*Number*/ val) { + _volumeEntries = _totalEntries = val; + }, - get totalEntries () { return _totalEntries }, - set totalEntries (/*Number*/val) { _totalEntries = _volumeEntries = val; }, + get totalEntries() { + return _totalEntries; + }, + set totalEntries(/*Number*/ val) { + _totalEntries = _volumeEntries = val; + }, - get size () { return _size }, - set size (/*Number*/val) { _size = val; }, + get size() { + return _size; + }, + set size(/*Number*/ val) { + _size = val; + }, - get offset () { return _offset }, - set offset (/*Number*/val) { _offset = val; }, + get offset() { + return _offset; + }, + set offset(/*Number*/ val) { + _offset = val; + }, - get commentLength () { return _commentLength }, - set commentLength (/*Number*/val) { _commentLength = val; }, + get commentLength() { + return _commentLength; + }, + set commentLength(/*Number*/ val) { + _commentLength = val; + }, - get mainHeaderSize () { + get mainHeaderSize() { return Constants.ENDHDR + _commentLength; }, - loadFromBinary : function(/*Buffer*/data) { + loadFromBinary: function (/*Buffer*/ data) { // data should be 22 bytes and start with "PK 05 06" // or be 56+ bytes and start with "PK 06 06" for Zip64 - if ((data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) && - (data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG)) { - + if ( + (data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) && + (data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG) + ) { throw new Error(Utils.Errors.INVALID_END); } @@ -9322,11 +9999,10 @@ module.exports = function () { _commentLength = 0; } - }, - toBinary : function() { - var b = Buffer.alloc(Constants.ENDHDR + _commentLength); + toBinary: function () { + var b = Buffer.alloc(Constants.ENDHDR + _commentLength); // "PK 05 06" signature b.writeUInt32LE(Constants.ENDSIG, 0); b.writeUInt32LE(0, 4); @@ -9346,53 +10022,67 @@ module.exports = function () { return b; }, - toString : function() { - return '{\n' + - '\t"diskEntries" : ' + _volumeEntries + ",\n" + - '\t"totalEntries" : ' + _totalEntries + ",\n" + - '\t"size" : ' + _size + " bytes,\n" + - '\t"offset" : 0x' + _offset.toString(16).toUpperCase() + ",\n" + - '\t"commentLength" : 0x' + _commentLength + "\n" + - '}'; + toJSON: function () { + // creates 0x0000 style output + const offset = function (nr, len) { + let offs = nr.toString(16).toUpperCase(); + while (offs.length < len) offs = "0" + offs; + return "0x" + offs; + }; + + return { + diskEntries: _volumeEntries, + totalEntries: _totalEntries, + size: _size + " bytes", + offset: offset(_offset, 4), + commentLength: _commentLength + }; + }, + + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); } - } + }; }; + /***/ }), /***/ 7686: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = function (/*Buffer*/inbuf) { +module.exports = function (/*Buffer*/ inbuf) { + var zlib = __nccwpck_require__(9796); - var zlib = __nccwpck_require__(8761); - - var opts = {chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024}; - - return { - deflate: function () { - return zlib.deflateRawSync(inbuf, opts); - }, + var opts = { chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024 }; - deflateAsync: function (/*Function*/callback) { - var tmp = zlib.createDeflateRaw(opts), parts = [], total = 0; - tmp.on('data', function (data) { - parts.push(data); - total += data.length; - }); - tmp.on('end', function () { - var buf = Buffer.alloc(total), written = 0; - buf.fill(0); - for (var i = 0; i < parts.length; i++) { - var part = parts[i]; - part.copy(buf, written); - written += part.length; + return { + deflate: function () { + return zlib.deflateRawSync(inbuf, opts); + }, + + deflateAsync: function (/*Function*/ callback) { + var tmp = zlib.createDeflateRaw(opts), + parts = [], + total = 0; + tmp.on("data", function (data) { + parts.push(data); + total += data.length; + }); + tmp.on("end", function () { + var buf = Buffer.alloc(total), + written = 0; + buf.fill(0); + for (var i = 0; i < parts.length; i++) { + var part = parts[i]; + part.copy(buf, written); + written += part.length; + } + callback && callback(buf); + }); + tmp.end(inbuf); } - callback && callback(buf); - }); - tmp.end(inbuf); - } - } + }; }; @@ -9405,124 +10095,221 @@ exports.Deflater = __nccwpck_require__(7686); exports.Inflater = __nccwpck_require__(2153); exports.ZipCrypto = __nccwpck_require__(3228); + /***/ }), /***/ 2153: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = function (/*Buffer*/inbuf) { +module.exports = function (/*Buffer*/ inbuf) { + var zlib = __nccwpck_require__(9796); - var zlib = __nccwpck_require__(8761); + return { + inflate: function () { + return zlib.inflateRawSync(inbuf); + }, - return { - inflate: function () { - return zlib.inflateRawSync(inbuf); - }, - - inflateAsync: function (/*Function*/callback) { - var tmp = zlib.createInflateRaw(), parts = [], total = 0; - tmp.on('data', function (data) { - parts.push(data); - total += data.length; - }); - tmp.on('end', function () { - var buf = Buffer.alloc(total), written = 0; - buf.fill(0); - for (var i = 0; i < parts.length; i++) { - var part = parts[i]; - part.copy(buf, written); - written += part.length; + inflateAsync: function (/*Function*/ callback) { + var tmp = zlib.createInflateRaw(), + parts = [], + total = 0; + tmp.on("data", function (data) { + parts.push(data); + total += data.length; + }); + tmp.on("end", function () { + var buf = Buffer.alloc(total), + written = 0; + buf.fill(0); + for (var i = 0; i < parts.length; i++) { + var part = parts[i]; + part.copy(buf, written); + written += part.length; + } + callback && callback(buf); + }); + tmp.end(inbuf); } - callback && callback(buf); - }); - tmp.end(inbuf); - } - } + }; }; /***/ }), /***/ 3228: -/***/ ((module) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +// node crypt, we use it for generate salt +// eslint-disable-next-line node/no-unsupported-features/node-builtins +const { randomFillSync } = __nccwpck_require__(6113); // generate CRC32 lookup table -const crctable = (new Uint32Array(256)).map((t,crc)=>{ - for(let j=0;j<8;j++){ - if (0 !== (crc & 1)){ - crc = (crc >>> 1) ^ 0xEDB88320 - }else{ - crc >>>= 1 +const crctable = new Uint32Array(256).map((t, crc) => { + for (let j = 0; j < 8; j++) { + if (0 !== (crc & 1)) { + crc = (crc >>> 1) ^ 0xedb88320; + } else { + crc >>>= 1; } } - return crc>>>0; + return crc >>> 0; }); -function make_decrypter(/*Buffer*/pwd){ - // C-style uInt32 Multiply - const uMul = (a,b) => Math.imul(a, b) >>> 0; - // Initialize keys with default values - const keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]); - // crc32 byte update - const crc32update = (pCrc32, bval) => { - return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8); - } - // update keys with byteValues - const updateKeys = (byteValue) => { - keys[0] = crc32update(keys[0], byteValue); - keys[1] += keys[0] & 0xff; - keys[1] = uMul(keys[1], 134775813) + 1; - keys[2] = crc32update(keys[2], keys[1] >>> 24); - } +// C-style uInt32 Multiply (discards higher bits, when JS multiply discards lower bits) +const uMul = (a, b) => Math.imul(a, b) >>> 0; - // 1. Stage initialize key - const pass = (Buffer.isBuffer(pwd)) ? pwd : Buffer.from(pwd); - for(let i=0; i< pass.length; i++){ - updateKeys(pass[i]); +// crc32 byte single update (actually same function is part of utils.crc32 function :) ) +const crc32update = (pCrc32, bval) => { + return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8); +}; + +// function for generating salt for encrytion header +const genSalt = () => { + if ("function" === typeof randomFillSync) { + return randomFillSync(Buffer.alloc(12)); + } else { + // fallback if function is not defined + return genSalt.node(); } +}; + +// salt generation with node random function (mainly as fallback) +genSalt.node = () => { + const salt = Buffer.alloc(12); + const len = salt.length; + for (let i = 0; i < len; i++) salt[i] = (Math.random() * 256) & 0xff; + return salt; +}; + +// general config +const config = { + genSalt +}; + +// Class Initkeys handles same basic ops with keys +function Initkeys(pw) { + const pass = Buffer.isBuffer(pw) ? pw : Buffer.from(pw); + this.keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]); + for (let i = 0; i < pass.length; i++) { + this.updateKeys(pass[i]); + } +} + +Initkeys.prototype.updateKeys = function (byteValue) { + const keys = this.keys; + keys[0] = crc32update(keys[0], byteValue); + keys[1] += keys[0] & 0xff; + keys[1] = uMul(keys[1], 134775813) + 1; + keys[2] = crc32update(keys[2], keys[1] >>> 24); + return byteValue; +}; + +Initkeys.prototype.next = function () { + const k = (this.keys[2] | 2) >>> 0; // key + return (uMul(k, k ^ 1) >> 8) & 0xff; // decode +}; + +function make_decrypter(/*Buffer*/ pwd) { + // 1. Stage initialize key + const keys = new Initkeys(pwd); // return decrypter function - return function (/*Buffer*/data){ - if (!Buffer.isBuffer(data)){ - throw 'decrypter needs Buffer' - } + return function (/*Buffer*/ data) { // result - we create new Buffer for results const result = Buffer.alloc(data.length); let pos = 0; // process input data - for(let c of data){ - const k = (keys[2] | 2) >>> 0; // key - c ^= (uMul(k, k^1) >> 8) & 0xff; // decode - result[pos++] = c; // Save Value - updateKeys(c); // update keys with decoded byte + for (let c of data) { + //c ^= keys.next(); + //result[pos++] = c; // decode & Save Value + result[pos++] = keys.updateKeys(c ^ keys.next()); // update keys with decoded byte } return result; - } + }; } -function decrypt(/*Buffer*/ data, /*Object*/header, /*String, Buffer*/ pwd){ +function make_encrypter(/*Buffer*/ pwd) { + // 1. Stage initialize key + const keys = new Initkeys(pwd); + + // return encrypting function, result and pos is here so we dont have to merge buffers later + return function (/*Buffer*/ data, /*Buffer*/ result, /* Number */ pos = 0) { + // result - we create new Buffer for results + if (!result) result = Buffer.alloc(data.length); + // process input data + for (let c of data) { + const k = keys.next(); // save key byte + result[pos++] = c ^ k; // save val + keys.updateKeys(c); // update keys with decoded byte + } + return result; + }; +} + +function decrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd) { if (!data || !Buffer.isBuffer(data) || data.length < 12) { return Buffer.alloc(0); } - - // We Initialize and generate decrypting function + + // 1. We Initialize and generate decrypting function const decrypter = make_decrypter(pwd); - // check - for testing password - const check = header.crc >>> 24; - // decrypt salt what is always 12 bytes and is a part of file content - const testbyte = decrypter(data.slice(0, 12))[11]; + // 2. decrypt salt what is always 12 bytes and is a part of file content + const salt = decrypter(data.slice(0, 12)); - // does password meet expectations - if (check !== testbyte){ - throw 'ADM-ZIP: Wrong Password'; + // 3. does password meet expectations + if (salt[11] !== header.crc >>> 24) { + throw "ADM-ZIP: Wrong Password"; } - // decode content + // 4. decode content return decrypter(data.slice(12)); } -module.exports = {decrypt}; +// lets add way to populate salt, NOT RECOMMENDED for production but maybe useful for testing general functionality +function _salter(data) { + if (Buffer.isBuffer(data) && data.length >= 12) { + // be aware - currently salting buffer data is modified + config.genSalt = function () { + return data.slice(0, 12); + }; + } else if (data === "node") { + // test salt generation with node random function + config.genSalt = genSalt.node; + } else { + // if value is not acceptable config gets reset. + config.genSalt = genSalt; + } +} + +function encrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd, /*Boolean*/ oldlike = false) { + // 1. test data if data is not Buffer we make buffer from it + if (data == null) data = Buffer.alloc(0); + // if data is not buffer be make buffer from it + if (!Buffer.isBuffer(data)) data = Buffer.from(data.toString()); + + // 2. We Initialize and generate encrypting function + const encrypter = make_encrypter(pwd); + + // 3. generate salt (12-bytes of random data) + const salt = config.genSalt(); + salt[11] = (header.crc >>> 24) & 0xff; + + // old implementations (before PKZip 2.04g) used two byte check + if (oldlike) salt[10] = (header.crc >>> 16) & 0xff; + + // 4. create output + const result = Buffer.alloc(data.length + 12); + encrypter(salt, result); + + // finally encode content + return encrypter(data, result, 12); +} + +module.exports = { decrypt, encrypt, _salter }; /***/ }), @@ -9607,32 +10394,39 @@ module.exports = { REDUCED3 : 4, // reduced with compression factor 3 REDUCED4 : 5, // reduced with compression factor 4 IMPLODED : 6, // imploded - // 7 reserved + // 7 reserved for Tokenizing compression algorithm DEFLATED : 8, // deflated ENHANCED_DEFLATED: 9, // enhanced deflated PKWARE : 10,// PKWare DCL imploded - // 11 reserved + // 11 reserved by PKWARE BZIP2 : 12, // compressed using BZIP2 - // 13 reserved + // 13 reserved by PKWARE LZMA : 14, // LZMA - // 15-17 reserved + // 15-17 reserved by PKWARE IBM_TERSE : 18, // compressed using IBM TERSE - IBM_LZ77 : 19, //IBM LZ77 z + IBM_LZ77 : 19, // IBM LZ77 z + AES_ENCRYPT : 99, // WinZIP AES encryption method /* General purpose bit flag */ - FLG_ENC : 0, // encripted file - FLG_COMP1 : 1, // compression option - FLG_COMP2 : 2, // compression option - FLG_DESC : 4, // data descriptor - FLG_ENH : 8, // enhanced deflation - FLG_STR : 16, // strong encryption - FLG_LNG : 1024, // language encoding + // values can obtained with expression 2**bitnr + FLG_ENC : 1, // Bit 0: encrypted file + FLG_COMP1 : 2, // Bit 1, compression option + FLG_COMP2 : 4, // Bit 2, compression option + FLG_DESC : 8, // Bit 3, data descriptor + FLG_ENH : 16, // Bit 4, enhanced deflating + FLG_PATCH : 32, // Bit 5, indicates that the file is compressed patched data. + FLG_STR : 64, // Bit 6, strong encryption (patented) + // Bits 7-10: Currently unused. + FLG_EFS : 2048, // Bit 11: Language encoding flag (EFS) + // Bit 12: Reserved by PKWARE for enhanced compression. + // Bit 13: encrypted the Central Directory (patented). + // Bits 14-15: Reserved by PKWARE. FLG_MSK : 4096, // mask header values /* Load type */ - FILE : 0, + FILE : 2, BUFFER : 1, - NONE : 2, + NONE : 0, /* 4.5 Extensible data fields */ EF_ID : 0, @@ -9674,66 +10468,65 @@ module.exports = { module.exports = { /* Header error messages */ - "INVALID_LOC" : "Invalid LOC header (bad signature)", - "INVALID_CEN" : "Invalid CEN header (bad signature)", - "INVALID_END" : "Invalid END header (bad signature)", + INVALID_LOC: "Invalid LOC header (bad signature)", + INVALID_CEN: "Invalid CEN header (bad signature)", + INVALID_END: "Invalid END header (bad signature)", /* ZipEntry error messages*/ - "NO_DATA" : "Nothing to decompress", - "BAD_CRC" : "CRC32 checksum failed", - "FILE_IN_THE_WAY" : "There is a file in the way: %s", - "UNKNOWN_METHOD" : "Invalid/unsupported compression method", + NO_DATA: "Nothing to decompress", + BAD_CRC: "CRC32 checksum failed", + FILE_IN_THE_WAY: "There is a file in the way: %s", + UNKNOWN_METHOD: "Invalid/unsupported compression method", /* Inflater error messages */ - "AVAIL_DATA" : "inflate::Available inflate data did not terminate", - "INVALID_DISTANCE" : "inflate::Invalid literal/length or distance code in fixed or dynamic block", - "TO_MANY_CODES" : "inflate::Dynamic block code description: too many length or distance codes", - "INVALID_REPEAT_LEN" : "inflate::Dynamic block code description: repeat more than specified lengths", - "INVALID_REPEAT_FIRST" : "inflate::Dynamic block code description: repeat lengths with no first length", - "INCOMPLETE_CODES" : "inflate::Dynamic block code description: code lengths codes incomplete", - "INVALID_DYN_DISTANCE": "inflate::Dynamic block code description: invalid distance code lengths", - "INVALID_CODES_LEN": "inflate::Dynamic block code description: invalid literal/length code lengths", - "INVALID_STORE_BLOCK" : "inflate::Stored block length did not match one's complement", - "INVALID_BLOCK_TYPE" : "inflate::Invalid block type (type == 3)", + AVAIL_DATA: "inflate::Available inflate data did not terminate", + INVALID_DISTANCE: "inflate::Invalid literal/length or distance code in fixed or dynamic block", + TO_MANY_CODES: "inflate::Dynamic block code description: too many length or distance codes", + INVALID_REPEAT_LEN: "inflate::Dynamic block code description: repeat more than specified lengths", + INVALID_REPEAT_FIRST: "inflate::Dynamic block code description: repeat lengths with no first length", + INCOMPLETE_CODES: "inflate::Dynamic block code description: code lengths codes incomplete", + INVALID_DYN_DISTANCE: "inflate::Dynamic block code description: invalid distance code lengths", + INVALID_CODES_LEN: "inflate::Dynamic block code description: invalid literal/length code lengths", + INVALID_STORE_BLOCK: "inflate::Stored block length did not match one's complement", + INVALID_BLOCK_TYPE: "inflate::Invalid block type (type == 3)", /* ADM-ZIP error messages */ - "CANT_EXTRACT_FILE" : "Could not extract the file", - "CANT_OVERRIDE" : "Target file already exists", - "NO_ZIP" : "No zip file was loaded", - "NO_ENTRY" : "Entry doesn't exist", - "DIRECTORY_CONTENT_ERROR" : "A directory cannot have content", - "FILE_NOT_FOUND" : "File not found: %s", - "NOT_IMPLEMENTED" : "Not implemented", - "INVALID_FILENAME" : "Invalid filename", - "INVALID_FORMAT" : "Invalid or unsupported zip format. No END header found" + CANT_EXTRACT_FILE: "Could not extract the file", + CANT_OVERRIDE: "Target file already exists", + NO_ZIP: "No zip file was loaded", + NO_ENTRY: "Entry doesn't exist", + DIRECTORY_CONTENT_ERROR: "A directory cannot have content", + FILE_NOT_FOUND: "File not found: %s", + NOT_IMPLEMENTED: "Not implemented", + INVALID_FILENAME: "Invalid filename", + INVALID_FORMAT: "Invalid or unsupported zip format. No END header found" }; + /***/ }), /***/ 8321: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var fs = __nccwpck_require__(2895).require(), - pth = __nccwpck_require__(5622); - +const fs = (__nccwpck_require__(2895).require)(); +const pth = __nccwpck_require__(1017); + fs.existsSync = fs.existsSync || pth.existsSync; -module.exports = function(/*String*/path) { - +module.exports = function (/*String*/ path) { var _path = path || "", - _permissions = 0, _obj = newAttr(), _stat = null; function newAttr() { return { - directory : false, - readonly : false, - hidden : false, - executable : false, - mtime : 0, - atime : 0 - } + directory: false, + readonly: false, + hidden: false, + executable: false, + mtime: 0, + atime: 0 + }; } if (_path && fs.existsSync(_path)) { @@ -9741,61 +10534,58 @@ module.exports = function(/*String*/path) { _obj.directory = _stat.isDirectory(); _obj.mtime = _stat.mtime; _obj.atime = _stat.atime; - _obj.executable = (0o111 & _stat.mode) != 0; // file is executable who ever har right not just owner - _obj.readonly = (0o200 & _stat.mode) == 0; // readonly if owner has no write right + _obj.executable = (0o111 & _stat.mode) !== 0; // file is executable who ever har right not just owner + _obj.readonly = (0o200 & _stat.mode) === 0; // readonly if owner has no write right _obj.hidden = pth.basename(_path)[0] === "."; } else { - console.warn("Invalid path: " + _path) + console.warn("Invalid path: " + _path); } return { - - get directory () { + get directory() { return _obj.directory; }, - get readOnly () { + get readOnly() { return _obj.readonly; }, - get hidden () { + get hidden() { return _obj.hidden; }, - get mtime () { + get mtime() { return _obj.mtime; }, - get atime () { - return _obj.atime; + get atime() { + return _obj.atime; }, - - get executable () { + get executable() { return _obj.executable; }, - decodeAttributes : function(val) { + decodeAttributes: function () {}, + encodeAttributes: function () {}, + + toJSON: function () { + return { + path: _path, + isDirectory: _obj.directory, + isReadOnly: _obj.readonly, + isHidden: _obj.hidden, + isExecutable: _obj.executable, + mTime: _obj.mtime, + aTime: _obj.atime + }; }, - encodeAttributes : function (val) { - - }, - - toString : function() { - return '{\n' + - '\t"path" : "' + _path + ",\n" + - '\t"isDirectory" : ' + _obj.directory + ",\n" + - '\t"isReadOnly" : ' + _obj.readonly + ",\n" + - '\t"isHidden" : ' + _obj.hidden + ",\n" + - '\t"isExecutable" : ' + _obj.executable + ",\n" + - '\t"mTime" : ' + _obj.mtime + "\n" + - '\t"aTime" : ' + _obj.atime + "\n" + - '}'; + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); } - } - + }; }; @@ -9804,17 +10594,16 @@ module.exports = function(/*String*/path) { /***/ 2895: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -exports.require = function() { - var fs = __nccwpck_require__(5747); - if (process && process.versions && process.versions['electron']) { - try { - originalFs = __nccwpck_require__(2941); - if (Object.keys(originalFs).length > 0) { - fs = originalFs; - } - } catch (e) {} - } - return fs +exports.require = function () { + if (typeof process === "object" && process.versions && process.versions["electron"]) { + try { + const originalFs = __nccwpck_require__(2941); + if (Object.keys(originalFs).length > 0) { + return originalFs; + } + } catch (e) {} + } + return __nccwpck_require__(7147); }; @@ -9824,234 +10613,262 @@ exports.require = function() { /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = __nccwpck_require__(1291); -module.exports.FileSystem = __nccwpck_require__(2895); module.exports.Constants = __nccwpck_require__(4522); module.exports.Errors = __nccwpck_require__(1255); module.exports.FileAttr = __nccwpck_require__(8321); + /***/ }), /***/ 1291: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var fs = __nccwpck_require__(2895).require(), - pth = __nccwpck_require__(5622); +const fsystem = (__nccwpck_require__(2895).require)(); +const pth = __nccwpck_require__(1017); +const Constants = __nccwpck_require__(4522); +const isWin = typeof process === "object" && "win32" === process.platform; -fs.existsSync = fs.existsSync || pth.existsSync; +const is_Obj = (obj) => obj && typeof obj === "object"; -module.exports = (function() { +// generate CRC32 lookup table +const crcTable = new Uint32Array(256).map((t, c) => { + for (let k = 0; k < 8; k++) { + if ((c & 1) !== 0) { + c = 0xedb88320 ^ (c >>> 1); + } else { + c >>>= 1; + } + } + return c >>> 0; +}); - var crcTable = [], - Constants = __nccwpck_require__(4522), - Errors = __nccwpck_require__(1255), +// UTILS functions - PATH_SEPARATOR = pth.sep; +function Utils(opts) { + this.sep = pth.sep; + this.fs = fsystem; + if (is_Obj(opts)) { + // custom filesystem + if (is_Obj(opts.fs) && typeof opts.fs.statSync === "function") { + this.fs = opts.fs; + } + } +} - function mkdirSync(/*String*/path) { - var resolvedPath = path.split(PATH_SEPARATOR)[0]; - path.split(PATH_SEPARATOR).forEach(function(name) { - if (!name || name.substr(-1,1) === ":") return; - resolvedPath += PATH_SEPARATOR + name; +module.exports = Utils; + +// INSTANCED functions + +Utils.prototype.makeDir = function (/*String*/ folder) { + const self = this; + + // Sync - make directories tree + function mkdirSync(/*String*/ fpath) { + let resolvedPath = fpath.split(self.sep)[0]; + fpath.split(self.sep).forEach(function (name) { + if (!name || name.substr(-1, 1) === ":") return; + resolvedPath += self.sep + name; var stat; try { - stat = fs.statSync(resolvedPath); + stat = self.fs.statSync(resolvedPath); } catch (e) { - fs.mkdirSync(resolvedPath); + self.fs.mkdirSync(resolvedPath); } - if (stat && stat.isFile()) - throw Errors.FILE_IN_THE_WAY.replace("%s", resolvedPath); + if (stat && stat.isFile()) throw Errors.FILE_IN_THE_WAY.replace("%s", resolvedPath); }); } - function findSync(/*String*/dir, /*RegExp*/pattern, /*Boolean*/recoursive) { - if (typeof pattern === 'boolean') { - recoursive = pattern; - pattern = undefined; + mkdirSync(folder); +}; + +Utils.prototype.writeFileTo = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr) { + const self = this; + if (self.fs.existsSync(path)) { + if (!overwrite) return false; // cannot overwrite + + var stat = self.fs.statSync(path); + if (stat.isDirectory()) { + return false; } - var files = []; - fs.readdirSync(dir).forEach(function(file) { - var path = pth.join(dir, file); + } + var folder = pth.dirname(path); + if (!self.fs.existsSync(folder)) { + self.makeDir(folder); + } - if (fs.statSync(path).isDirectory() && recoursive) - files = files.concat(findSync(path, pattern, recoursive)); + var fd; + try { + fd = self.fs.openSync(path, "w", 438); // 0666 + } catch (e) { + self.fs.chmodSync(path, 438); + fd = self.fs.openSync(path, "w", 438); + } + if (fd) { + try { + self.fs.writeSync(fd, content, 0, content.length, 0); + } finally { + self.fs.closeSync(fd); + } + } + self.fs.chmodSync(path, attr || 438); + return true; +}; - if (!pattern || pattern.test(path)) { - files.push(pth.normalize(path) + (fs.statSync(path).isDirectory() ? PATH_SEPARATOR : "")); +Utils.prototype.writeFileToAsync = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr, /*Function*/ callback) { + if (typeof attr === "function") { + callback = attr; + attr = undefined; + } + + const self = this; + + self.fs.exists(path, function (exist) { + if (exist && !overwrite) return callback(false); + + self.fs.stat(path, function (err, stat) { + if (exist && stat.isDirectory()) { + return callback(false); } + var folder = pth.dirname(path); + self.fs.exists(folder, function (exists) { + if (!exists) self.makeDir(folder); + + self.fs.open(path, "w", 438, function (err, fd) { + if (err) { + self.fs.chmod(path, 438, function () { + self.fs.open(path, "w", 438, function (err, fd) { + self.fs.write(fd, content, 0, content.length, 0, function () { + self.fs.close(fd, function () { + self.fs.chmod(path, attr || 438, function () { + callback(true); + }); + }); + }); + }); + }); + } else if (fd) { + self.fs.write(fd, content, 0, content.length, 0, function () { + self.fs.close(fd, function () { + self.fs.chmod(path, attr || 438, function () { + callback(true); + }); + }); + }); + } else { + self.fs.chmod(path, attr || 438, function () { + callback(true); + }); + } + }); + }); + }); + }); +}; + +Utils.prototype.findFiles = function (/*String*/ path) { + const self = this; + + function findSync(/*String*/ dir, /*RegExp*/ pattern, /*Boolean*/ recursive) { + if (typeof pattern === "boolean") { + recursive = pattern; + pattern = undefined; + } + let files = []; + self.fs.readdirSync(dir).forEach(function (file) { + var path = pth.join(dir, file); + + if (self.fs.statSync(path).isDirectory() && recursive) files = files.concat(findSync(path, pattern, recursive)); + + if (!pattern || pattern.test(path)) { + files.push(pth.normalize(path) + (self.fs.statSync(path).isDirectory() ? self.sep : "")); + } }); return files; } - function readBigUInt64LE(/*Buffer*/buffer, /*int*/index) { - var slice = Buffer.from(buffer.slice(index, index + 8)); - slice.swap64(); + return findSync(path, undefined, true); +}; - return parseInt(`0x${ slice.toString('hex') }`); +Utils.prototype.getAttributes = function () {}; + +Utils.prototype.setAttributes = function () {}; + +// STATIC functions + +// crc32 single update (it is part of crc32) +Utils.crc32update = function (crc, byte) { + return crcTable[(crc ^ byte) & 0xff] ^ (crc >>> 8); +}; + +Utils.crc32 = function (buf) { + if (typeof buf === "string") { + buf = Buffer.from(buf, "utf8"); } + // Generate crcTable + if (!crcTable.length) genCRCTable(); - return { - makeDir : function(/*String*/path) { - mkdirSync(path); - }, + let len = buf.length; + let crc = ~0; + for (let off = 0; off < len; ) crc = Utils.crc32update(crc, buf[off++]); + // xor and cast as uint32 number + return ~crc >>> 0; +}; - crc32 : function(buf) { - if (typeof buf === 'string') { - buf = Buffer.alloc(buf.length, buf); - } - var b = Buffer.alloc(4); - if (!crcTable.length) { - for (var n = 0; n < 256; n++) { - var c = n; - for (var k = 8; --k >= 0;) // - if ((c & 1) !== 0) { c = 0xedb88320 ^ (c >>> 1); } else { c = c >>> 1; } - if (c < 0) { - b.writeInt32LE(c, 0); - c = b.readUInt32LE(0); - } - crcTable[n] = c; - } - } - var crc = 0, off = 0, len = buf.length, c1 = ~crc; - while(--len >= 0) c1 = crcTable[(c1 ^ buf[off++]) & 0xff] ^ (c1 >>> 8); - crc = ~c1; - b.writeInt32LE(crc & 0xffffffff, 0); - return b.readUInt32LE(0); - }, - - methodToString : function(/*Number*/method) { - switch (method) { - case Constants.STORED: - return 'STORED (' + method + ')'; - case Constants.DEFLATED: - return 'DEFLATED (' + method + ')'; - default: - return 'UNSUPPORTED (' + method + ')'; - } - - }, - - writeFileTo : function(/*String*/path, /*Buffer*/content, /*Boolean*/overwrite, /*Number*/attr) { - if (fs.existsSync(path)) { - if (!overwrite) - return false; // cannot overwrite - - var stat = fs.statSync(path); - if (stat.isDirectory()) { - return false; - } - } - var folder = pth.dirname(path); - if (!fs.existsSync(folder)) { - mkdirSync(folder); - } - - var fd; - try { - fd = fs.openSync(path, 'w', 438); // 0666 - } catch(e) { - fs.chmodSync(path, 438); - fd = fs.openSync(path, 'w', 438); - } - if (fd) { - try { - fs.writeSync(fd, content, 0, content.length, 0); - } - catch (e){ - throw e; - } - finally { - fs.closeSync(fd); - } - } - fs.chmodSync(path, attr || 438); - return true; - }, - - writeFileToAsync : function(/*String*/path, /*Buffer*/content, /*Boolean*/overwrite, /*Number*/attr, /*Function*/callback) { - if(typeof attr === 'function') { - callback = attr; - attr = undefined; - } - - fs.exists(path, function(exists) { - if(exists && !overwrite) - return callback(false); - - fs.stat(path, function(err, stat) { - if(exists &&stat.isDirectory()) { - return callback(false); - } - - var folder = pth.dirname(path); - fs.exists(folder, function(exists) { - if(!exists) - mkdirSync(folder); - - fs.open(path, 'w', 438, function(err, fd) { - if(err) { - fs.chmod(path, 438, function() { - fs.open(path, 'w', 438, function(err, fd) { - fs.write(fd, content, 0, content.length, 0, function() { - fs.close(fd, function() { - fs.chmod(path, attr || 438, function() { - callback(true); - }) - }); - }); - }); - }) - } else { - if(fd) { - fs.write(fd, content, 0, content.length, 0, function() { - fs.close(fd, function() { - fs.chmod(path, attr || 438, function() { - callback(true); - }) - }); - }); - } else { - fs.chmod(path, attr || 438, function() { - callback(true); - }) - } - } - }); - }) - }) - }) - }, - - findFiles : function(/*String*/path) { - return findSync(path, true); - }, - - getAttributes : function(/*String*/path) { - - }, - - setAttributes : function(/*String*/path) { - - }, - - toBuffer : function(input) { - if (Buffer.isBuffer(input)) { - return input; - } else { - if (input.length === 0) { - return Buffer.alloc(0) - } - return Buffer.from(input, 'utf8'); - } - }, - - readBigUInt64LE, - - Constants : Constants, - Errors : Errors +Utils.methodToString = function (/*Number*/ method) { + switch (method) { + case Constants.STORED: + return "STORED (" + method + ")"; + case Constants.DEFLATED: + return "DEFLATED (" + method + ")"; + default: + return "UNSUPPORTED (" + method + ")"; } -})(); +}; + +// removes ".." style path elements +Utils.canonical = function (/*string*/ path) { + if (!path) return ""; + // trick normalize think path is absolute + var safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/")); + return pth.join(".", safeSuffix); +}; + +// make abolute paths taking prefix as root folder +Utils.sanitize = function (/*string*/ prefix, /*string*/ name) { + prefix = pth.resolve(pth.normalize(prefix)); + var parts = name.split("/"); + for (var i = 0, l = parts.length; i < l; i++) { + var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep))); + if (path.indexOf(prefix) === 0) { + return path; + } + } + return pth.normalize(pth.join(prefix, pth.basename(name))); +}; + +// converts buffer, Uint8Array, string types to buffer +Utils.toBuffer = function toBuffer(/*buffer, Uint8Array, string*/ input) { + if (Buffer.isBuffer(input)) { + return input; + } else if (input instanceof Uint8Array) { + return Buffer.from(input); + } else { + // expect string all other values are invalid and return empty buffer + return typeof input === "string" ? Buffer.from(input, "utf8") : Buffer.alloc(0); + } +}; + +Utils.readBigUInt64LE = function (/*Buffer*/ buffer, /*int*/ index) { + var slice = Buffer.from(buffer.slice(index, index + 8)); + slice.swap64(); + + return parseInt(`0x${slice.toString("hex")}`); +}; + +Utils.isWin = isWin; // Do we have windows system +Utils.crcTable = crcTable; /***/ }), @@ -10064,8 +10881,7 @@ var Utils = __nccwpck_require__(5182), Constants = Utils.Constants, Methods = __nccwpck_require__(3928); -module.exports = function (/*Buffer*/input) { - +module.exports = function (/*Buffer*/ input) { var _entryHeader = new Headers.EntryHeader(), _entryName = Buffer.alloc(0), _comment = Buffer.alloc(0), @@ -10078,15 +10894,15 @@ module.exports = function (/*Buffer*/input) { return Buffer.alloc(0); } _entryHeader.loadDataHeaderFromBinary(input); - return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize) + return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize); } function crc32OK(data) { // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written if ((_entryHeader.flags & 0x8) !== 0x8) { - if (Utils.crc32(data) !== _entryHeader.dataHeader.crc) { - return false; - } + if (Utils.crc32(data) !== _entryHeader.dataHeader.crc) { + return false; + } } else { // @TODO: load and check data descriptor header // The fields in the local header are filled with zero, and the CRC-32 and size are appended in a 12-byte structure @@ -10095,10 +10911,10 @@ module.exports = function (/*Buffer*/input) { return true; } - function decompress(/*Boolean*/async, /*Function*/callback, /*String, Buffer*/pass) { - if(typeof callback === 'undefined' && typeof async === 'string') { - pass=async; - async=void 0; + function decompress(/*Boolean*/ async, /*Function*/ callback, /*String, Buffer*/ pass) { + if (typeof callback === "undefined" && typeof async === "string") { + pass = async; + async = void 0; } if (_isDirectory) { if (async && callback) { @@ -10115,9 +10931,9 @@ module.exports = function (/*Buffer*/input) { return compressedData; } - if (_entryHeader.encripted){ - if ('string' !== typeof pass && !Buffer.isBuffer(pass)){ - throw new Error('ADM-ZIP: Incompatible password parameter'); + if (_entryHeader.encripted) { + if ("string" !== typeof pass && !Buffer.isBuffer(pass)) { + throw new Error("ADM-ZIP: Incompatible password parameter"); } compressedData = Methods.ZipCrypto.decrypt(compressedData, _entryHeader, pass); } @@ -10128,30 +10944,33 @@ module.exports = function (/*Buffer*/input) { case Utils.Constants.STORED: compressedData.copy(data); if (!crc32OK(data)) { - if (async && callback) callback(data, Utils.Errors.BAD_CRC);//si added error + if (async && callback) callback(data, Utils.Errors.BAD_CRC); //si added error throw new Error(Utils.Errors.BAD_CRC); - } else {//si added otherwise did not seem to return data. + } else { + //si added otherwise did not seem to return data. if (async && callback) callback(data); return data; } case Utils.Constants.DEFLATED: var inflater = new Methods.Inflater(compressedData); if (!async) { - var result = inflater.inflate(data); + const result = inflater.inflate(data); result.copy(data, 0); if (!crc32OK(data)) { throw new Error(Utils.Errors.BAD_CRC + " " + _entryName.toString()); } return data; } else { - inflater.inflateAsync(function(result) { - result.copy(data, 0); - if (!crc32OK(data)) { - if (callback) callback(data, Utils.Errors.BAD_CRC); //si added error - } else { //si added otherwise did not seem to return data. - if (callback) callback(data); + inflater.inflateAsync(function (result) { + result.copy(result, 0); + if (callback) { + if (!crc32OK(result)) { + callback(result, Utils.Errors.BAD_CRC); //si added error + } else { + callback(result); + } } - }) + }); } break; default: @@ -10160,7 +10979,7 @@ module.exports = function (/*Buffer*/input) { } } - function compress(/*Boolean*/async, /*Function*/callback) { + function compress(/*Boolean*/ async, /*Function*/ callback) { if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) { // no data set or the data wasn't changed to require recompression if (async && callback) callback(getCompressedDataFromZip()); @@ -10181,29 +11000,26 @@ module.exports = function (/*Buffer*/input) { return compressedData; default: case Utils.Constants.DEFLATED: - var deflater = new Methods.Deflater(uncompressedData); if (!async) { var deflated = deflater.deflate(); _entryHeader.compressedSize = deflated.length; return deflated; } else { - deflater.deflateAsync(function(data) { + deflater.deflateAsync(function (data) { compressedData = Buffer.alloc(data.length); _entryHeader.compressedSize = data.length; data.copy(compressedData); callback && callback(compressedData); - }) + }); } deflater = null; break; } + } else if (async && callback) { + callback(Buffer.alloc(0)); } else { - if (async && callback) { - callback(Buffer.alloc(0)); - } else { - return Buffer.alloc(0); - } + return Buffer.alloc(0); } } @@ -10214,14 +11030,14 @@ module.exports = function (/*Buffer*/input) { function parseExtra(data) { var offset = 0; var signature, size, part; - while(offset= Constants.EF_ZIP64_SCOMP) { + if (data.length >= Constants.EF_ZIP64_SCOMP) { size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP); - if(_entryHeader.size === Constants.EF_ZIP64_OR_32) { + if (_entryHeader.size === Constants.EF_ZIP64_OR_32) { _entryHeader.size = size; } } - if(data.length >= Constants.EF_ZIP64_RHO) { + if (data.length >= Constants.EF_ZIP64_RHO) { compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP); - if(_entryHeader.compressedSize === Constants.EF_ZIP64_OR_32) { + if (_entryHeader.compressedSize === Constants.EF_ZIP64_OR_32) { _entryHeader.compressedSize = compressedSize; } } - if(data.length >= Constants.EF_ZIP64_DSN) { + if (data.length >= Constants.EF_ZIP64_DSN) { offset = readUInt64LE(data, Constants.EF_ZIP64_RHO); - if(_entryHeader.offset === Constants.EF_ZIP64_OR_32) { + if (_entryHeader.offset === Constants.EF_ZIP64_OR_32) { _entryHeader.offset = offset; } } - if(data.length >= Constants.EF_ZIP64_DSN+4) { + if (data.length >= Constants.EF_ZIP64_DSN + 4) { diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN); - if(_entryHeader.diskNumStart === Constants.EF_ZIP64_OR_16) { + if (_entryHeader.diskNumStart === Constants.EF_ZIP64_OR_16) { _entryHeader.diskNumStart = diskNumStart; } } } - return { - get entryName () { return _entryName.toString(); }, - get rawEntryName() { return _entryName; }, - set entryName (val) { + get entryName() { + return _entryName.toString(); + }, + get rawEntryName() { + return _entryName; + }, + set entryName(val) { _entryName = Utils.toBuffer(val); var lastChar = _entryName[_entryName.length - 1]; - _isDirectory = (lastChar === 47) || (lastChar === 92); + _isDirectory = lastChar === 47 || lastChar === 92; _entryHeader.fileNameLength = _entryName.length; }, - get extra () { return _extra; }, - set extra (val) { + get extra() { + return _extra; + }, + set extra(val) { _extra = val; _entryHeader.extraLength = val.length; parseExtra(val); }, - get comment () { return _comment.toString(); }, - set comment (val) { + get comment() { + return _comment.toString(); + }, + set comment(val) { _comment = Utils.toBuffer(val); _entryHeader.commentLength = _comment.length; }, - get name () { var n = _entryName.toString(); return _isDirectory ? n.substr(n.length - 1).split("/").pop() : n.split("/").pop(); }, - get isDirectory () { return _isDirectory }, - - getCompressedData : function() { - return compress(false, null) + get name() { + var n = _entryName.toString(); + return _isDirectory + ? n + .substr(n.length - 1) + .split("/") + .pop() + : n.split("/").pop(); + }, + get isDirectory() { + return _isDirectory; }, - getCompressedDataAsync : function(/*Function*/callback) { - compress(true, callback) + getCompressedData: function () { + return compress(false, null); }, - setData : function(value) { + getCompressedDataAsync: function (/*Function*/ callback) { + compress(true, callback); + }, + + setData: function (value) { uncompressedData = Utils.toBuffer(value); if (!_isDirectory && uncompressedData.length) { _entryHeader.size = uncompressedData.length; _entryHeader.method = Utils.Constants.DEFLATED; _entryHeader.crc = Utils.crc32(value); _entryHeader.changed = true; - } else { // folders and blank files should be stored + } else { + // folders and blank files should be stored _entryHeader.method = Utils.Constants.STORED; } }, - getData : function(pass) { + getData: function (pass) { if (_entryHeader.changed) { - return uncompressedData; - } else { - return decompress(false, null, pass); + return uncompressedData; + } else { + return decompress(false, null, pass); } }, - getDataAsync : function(/*Function*/callback, pass) { - if (_entryHeader.changed) { - callback(uncompressedData) - } else { - decompress(true, callback, pass) + getDataAsync: function (/*Function*/ callback, pass) { + if (_entryHeader.changed) { + callback(uncompressedData); + } else { + decompress(true, callback, pass); } }, - set attr(attr) { _entryHeader.attr = attr; }, - get attr() { return _entryHeader.attr; }, + set attr(attr) { + _entryHeader.attr = attr; + }, + get attr() { + return _entryHeader.attr; + }, - set header(/*Buffer*/data) { + set header(/*Buffer*/ data) { _entryHeader.loadFromBinary(data); }, @@ -10331,31 +11169,45 @@ module.exports = function (/*Buffer*/input) { return _entryHeader; }, - packHeader : function() { + packHeader: function () { + // 1. create header (buffer) var header = _entryHeader.entryHeaderToBinary(); - // add - _entryName.copy(header, Utils.Constants.CENHDR); + var addpos = Utils.Constants.CENHDR; + // 2. add file name + _entryName.copy(header, addpos); + addpos += _entryName.length; + // 3. add extra data if (_entryHeader.extraLength) { - _extra.copy(header, Utils.Constants.CENHDR + _entryName.length) + _extra.copy(header, addpos); + addpos += _entryHeader.extraLength; } + // 4. add file comment if (_entryHeader.commentLength) { - _comment.copy(header, Utils.Constants.CENHDR + _entryName.length + _entryHeader.extraLength, _comment.length); + _comment.copy(header, addpos); } return header; }, - toString : function() { - return '{\n' + - '\t"entryName" : "' + _entryName.toString() + "\",\n" + - '\t"name" : "' + (_isDirectory ? _entryName.toString().replace(/\/$/, '').split("/").pop() : _entryName.toString().split("/").pop()) + "\",\n" + - '\t"comment" : "' + _comment.toString() + "\",\n" + - '\t"isDirectory" : ' + _isDirectory + ",\n" + - '\t"header" : ' + _entryHeader.toString().replace(/\t/mg, "\t\t").replace(/}/mg, "\t}") + ",\n" + - '\t"compressedData" : <' + (input && input.length + " bytes buffer" || "null") + ">\n" + - '\t"data" : <' + (uncompressedData && uncompressedData.length + " bytes buffer" || "null") + ">\n" + - '}'; + toJSON: function () { + const bytes = function (nr) { + return "<" + ((nr && nr.length + " bytes buffer") || "null") + ">"; + }; + + return { + entryName: this.entryName, + name: this.name, + comment: this.comment, + isDirectory: this.isDirectory, + header: _entryHeader.toJSON(), + compressedData: bytes(input), + data: bytes(uncompressedData) + }; + }, + + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); } - } + }; }; @@ -10364,413 +11216,389 @@ module.exports = function (/*Buffer*/input) { /***/ 7744: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var ZipEntry = __nccwpck_require__(4057), - Headers = __nccwpck_require__(4958), - Utils = __nccwpck_require__(5182); +const ZipEntry = __nccwpck_require__(4057); +const Headers = __nccwpck_require__(4958); +const Utils = __nccwpck_require__(5182); -module.exports = function (/*String|Buffer*/input, /*Number*/inputType) { - var entryList = [], - entryTable = {}, - _comment = Buffer.alloc(0), - filename = "", - fs = Utils.FileSystem.require(), - inBuffer = null, - mainHeader = new Headers.MainHeader(), - loadedEntries = false; +module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { + var entryList = [], + entryTable = {}, + _comment = Buffer.alloc(0), + mainHeader = new Headers.MainHeader(), + loadedEntries = false; - if (inputType === Utils.Constants.FILE) { - // is a filename - filename = input; - inBuffer = fs.readFileSync(filename); - readMainHeader(); - } else if (inputType === Utils.Constants.BUFFER) { - // is a memory buffer - inBuffer = input; - readMainHeader(); - } else { - // none. is a new file - loadedEntries = true; - } + // assign options + const opts = Object.assign(Object.create(null), options); - function iterateEntries(callback) { - const totalEntries = mainHeader.diskEntries; // total number of entries - let index = mainHeader.offset; // offset of first CEN header + const { noSort } = opts; - for (let i = 0; i < totalEntries; i++) { - let tmp = index; - const entry = new ZipEntry(inBuffer); + if (inBuffer) { + // is a memory buffer + readMainHeader(opts.readEntries); + } else { + // none. is a new file + loadedEntries = true; + } - entry.header = inBuffer.slice(tmp, tmp += Utils.Constants.CENHDR); - entry.entryName = inBuffer.slice(tmp, tmp += entry.header.fileNameLength); + function iterateEntries(callback) { + const totalEntries = mainHeader.diskEntries; // total number of entries + let index = mainHeader.offset; // offset of first CEN header - index += entry.header.entryHeaderSize; + for (let i = 0; i < totalEntries; i++) { + let tmp = index; + const entry = new ZipEntry(inBuffer); - callback(entry); - } - } + entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR)); + entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength)); - function readEntries() { - loadedEntries = true; - entryTable = {}; - entryList = new Array(mainHeader.diskEntries); // total number of entries - var index = mainHeader.offset; // offset of first CEN header - for (var i = 0; i < entryList.length; i++) { + index += entry.header.entryHeaderSize; - var tmp = index, - entry = new ZipEntry(inBuffer); - entry.header = inBuffer.slice(tmp, tmp += Utils.Constants.CENHDR); + callback(entry); + } + } - entry.entryName = inBuffer.slice(tmp, tmp += entry.header.fileNameLength); + function readEntries() { + loadedEntries = true; + entryTable = {}; + entryList = new Array(mainHeader.diskEntries); // total number of entries + var index = mainHeader.offset; // offset of first CEN header + for (var i = 0; i < entryList.length; i++) { + var tmp = index, + entry = new ZipEntry(inBuffer); + entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR)); - if (entry.header.extraLength) { - entry.extra = inBuffer.slice(tmp, tmp += entry.header.extraLength); - } + entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength)); - if (entry.header.commentLength) - entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength); + if (entry.header.extraLength) { + entry.extra = inBuffer.slice(tmp, (tmp += entry.header.extraLength)); + } - index += entry.header.entryHeaderSize; + if (entry.header.commentLength) entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength); - entryList[i] = entry; - entryTable[entry.entryName] = entry; - } - } + index += entry.header.entryHeaderSize; - function readMainHeader() { - var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size - max = Math.max(0, i - 0xFFFF), // 0xFFFF is the max zip file comment length - n = max, - endStart = inBuffer.length, - endOffset = -1, // Start offset of the END header - commentEnd = 0; + entryList[i] = entry; + entryTable[entry.entryName] = entry; + } + } - for (i; i >= n; i--) { - if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P' - if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) { // "PK\005\006" - endOffset = i; - commentEnd = i; - endStart = i + Utils.Constants.ENDHDR; - // We already found a regular signature, let's look just a bit further to check if there's any zip64 signature - n = i - Utils.Constants.END64HDR; - continue; - } + function readMainHeader(/*Boolean*/ readNow) { + var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size + max = Math.max(0, i - 0xffff), // 0xFFFF is the max zip file comment length + n = max, + endStart = inBuffer.length, + endOffset = -1, // Start offset of the END header + commentEnd = 0; - if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) { - // Found a zip64 signature, let's continue reading the whole zip64 record - n = max; - continue; - } + for (i; i >= n; i--) { + if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P' + if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) { + // "PK\005\006" + endOffset = i; + commentEnd = i; + endStart = i + Utils.Constants.ENDHDR; + // We already found a regular signature, let's look just a bit further to check if there's any zip64 signature + n = i - Utils.Constants.END64HDR; + continue; + } - if (inBuffer.readUInt32LE(i) == Utils.Constants.ZIP64SIG) { - // Found the zip64 record, let's determine it's size - endOffset = i; - endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD; - break; - } - } + if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) { + // Found a zip64 signature, let's continue reading the whole zip64 record + n = max; + continue; + } - if (!~endOffset) - throw new Error(Utils.Errors.INVALID_FORMAT); + if (inBuffer.readUInt32LE(i) === Utils.Constants.ZIP64SIG) { + // Found the zip64 record, let's determine it's size + endOffset = i; + endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD; + break; + } + } - mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart)); - if (mainHeader.commentLength) { - _comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR); - } - // readEntries(); - } + if (!~endOffset) throw new Error(Utils.Errors.INVALID_FORMAT); - return { - /** - * Returns an array of ZipEntry objects existent in the current opened archive - * @return Array - */ - get entries() { - if (!loadedEntries) { - readEntries(); - } - return entryList; - }, + mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart)); + if (mainHeader.commentLength) { + _comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR); + } + if (readNow) readEntries(); + } - /** - * Archive comment - * @return {String} - */ - get comment() { - return _comment.toString(); - }, - set comment(val) { - _comment = Utils.toBuffer(val); - mainHeader.commentLength = _comment.length; - }, + function sortEntries() { + if (entryList.length > 1 && !noSort) { + entryList.sort((a, b) => a.entryName.toLowerCase().localeCompare(b.entryName.toLowerCase())); + } + } - getEntryCount: function() { - if (!loadedEntries) { - return mainHeader.diskEntries; - } + return { + /** + * Returns an array of ZipEntry objects existent in the current opened archive + * @return Array + */ + get entries() { + if (!loadedEntries) { + readEntries(); + } + return entryList; + }, - return entryList.length; - }, + /** + * Archive comment + * @return {String} + */ + get comment() { + return _comment.toString(); + }, + set comment(val) { + _comment = Utils.toBuffer(val); + mainHeader.commentLength = _comment.length; + }, - forEach: function(callback) { - if (!loadedEntries) { - iterateEntries(callback); - return; - } + getEntryCount: function () { + if (!loadedEntries) { + return mainHeader.diskEntries; + } - entryList.forEach(callback); - }, + return entryList.length; + }, - /** - * Returns a reference to the entry with the given name or null if entry is inexistent - * - * @param entryName - * @return ZipEntry - */ - getEntry: function (/*String*/entryName) { - if (!loadedEntries) { - readEntries(); - } - return entryTable[entryName] || null; - }, + forEach: function (callback) { + if (!loadedEntries) { + iterateEntries(callback); + return; + } - /** - * Adds the given entry to the entry list - * - * @param entry - */ - setEntry: function (/*ZipEntry*/entry) { - if (!loadedEntries) { - readEntries(); - } - entryList.push(entry); - entryTable[entry.entryName] = entry; - mainHeader.totalEntries = entryList.length; - }, + entryList.forEach(callback); + }, - /** - * Removes the entry with the given name from the entry list. - * - * If the entry is a directory, then all nested files and directories will be removed - * @param entryName - */ - deleteEntry: function (/*String*/entryName) { - if (!loadedEntries) { - readEntries(); - } - var entry = entryTable[entryName]; - if (entry && entry.isDirectory) { - var _self = this; - this.getEntryChildren(entry).forEach(function (child) { - if (child.entryName !== entryName) { - _self.deleteEntry(child.entryName) - } - }) - } - entryList.splice(entryList.indexOf(entry), 1); - delete(entryTable[entryName]); - mainHeader.totalEntries = entryList.length; - }, + /** + * Returns a reference to the entry with the given name or null if entry is inexistent + * + * @param entryName + * @return ZipEntry + */ + getEntry: function (/*String*/ entryName) { + if (!loadedEntries) { + readEntries(); + } + return entryTable[entryName] || null; + }, - /** - * Iterates and returns all nested files and directories of the given entry - * - * @param entry - * @return Array - */ - getEntryChildren: function (/*ZipEntry*/entry) { - if (!loadedEntries) { - readEntries(); - } - if (entry.isDirectory) { - var list = [], - name = entry.entryName, - len = name.length; + /** + * Adds the given entry to the entry list + * + * @param entry + */ + setEntry: function (/*ZipEntry*/ entry) { + if (!loadedEntries) { + readEntries(); + } + entryList.push(entry); + entryTable[entry.entryName] = entry; + mainHeader.totalEntries = entryList.length; + }, - entryList.forEach(function (zipEntry) { - if (zipEntry.entryName.substr(0, len) === name) { - list.push(zipEntry); - } - }); - return list; - } - return [] - }, + /** + * Removes the entry with the given name from the entry list. + * + * If the entry is a directory, then all nested files and directories will be removed + * @param entryName + */ + deleteEntry: function (/*String*/ entryName) { + if (!loadedEntries) { + readEntries(); + } + var entry = entryTable[entryName]; + if (entry && entry.isDirectory) { + var _self = this; + this.getEntryChildren(entry).forEach(function (child) { + if (child.entryName !== entryName) { + _self.deleteEntry(child.entryName); + } + }); + } + entryList.splice(entryList.indexOf(entry), 1); + delete entryTable[entryName]; + mainHeader.totalEntries = entryList.length; + }, - /** - * Returns the zip file - * - * @return Buffer - */ - compressToBuffer: function () { - if (!loadedEntries) { - readEntries(); - } - if (entryList.length > 1) { - entryList.sort(function (a, b) { - var nameA = a.entryName.toLowerCase(); - var nameB = b.entryName.toLowerCase(); - if (nameA < nameB) { - return -1 - } - if (nameA > nameB) { - return 1 - } - return 0; - }); - } + /** + * Iterates and returns all nested files and directories of the given entry + * + * @param entry + * @return Array + */ + getEntryChildren: function (/*ZipEntry*/ entry) { + if (!loadedEntries) { + readEntries(); + } + if (entry && entry.isDirectory) { + const list = []; + const name = entry.entryName; + const len = name.length; - var totalSize = 0, - dataBlock = [], - entryHeaders = [], - dindex = 0; + entryList.forEach(function (zipEntry) { + if (zipEntry.entryName.substr(0, len) === name) { + list.push(zipEntry); + } + }); + return list; + } + return []; + }, - mainHeader.size = 0; - mainHeader.offset = 0; + /** + * Returns the zip file + * + * @return Buffer + */ + compressToBuffer: function () { + if (!loadedEntries) { + readEntries(); + } + sortEntries(); - entryList.forEach(function (entry) { - // compress data and set local and entry header accordingly. Reason why is called first - var compressedData = entry.getCompressedData(); - // data header - entry.header.offset = dindex; - var dataHeader = entry.header.dataHeaderToBinary(); - var entryNameLen = entry.rawEntryName.length; - var extra = entry.extra.toString(); - var postHeader = Buffer.alloc(entryNameLen + extra.length); - entry.rawEntryName.copy(postHeader, 0); - postHeader.fill(extra, entryNameLen); + const dataBlock = []; + const entryHeaders = []; + let totalSize = 0; + let dindex = 0; - var dataLength = dataHeader.length + postHeader.length + compressedData.length; + mainHeader.size = 0; + mainHeader.offset = 0; - dindex += dataLength; + for (const entry of entryList) { + // compress data and set local and entry header accordingly. Reason why is called first + const compressedData = entry.getCompressedData(); + // 1. construct data header + entry.header.offset = dindex; + const dataHeader = entry.header.dataHeaderToBinary(); + const entryNameLen = entry.rawEntryName.length; + // 1.2. postheader - data after data header + const postHeader = Buffer.alloc(entryNameLen + entry.extra.length); + entry.rawEntryName.copy(postHeader, 0); + postHeader.copy(entry.extra, entryNameLen); - dataBlock.push(dataHeader); - dataBlock.push(postHeader); - dataBlock.push(compressedData); + // 2. offsets + const dataLength = dataHeader.length + postHeader.length + compressedData.length; + dindex += dataLength; - var entryHeader = entry.packHeader(); - entryHeaders.push(entryHeader); - mainHeader.size += entryHeader.length; - totalSize += (dataLength + entryHeader.length); - }); + // 3. store values in sequence + dataBlock.push(dataHeader); + dataBlock.push(postHeader); + dataBlock.push(compressedData); - totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length - // point to end of data and beginning of central directory first record - mainHeader.offset = dindex; + // 4. construct entry header + const entryHeader = entry.packHeader(); + entryHeaders.push(entryHeader); + // 5. update main header + mainHeader.size += entryHeader.length; + totalSize += dataLength + entryHeader.length; + } - dindex = 0; - var outBuffer = Buffer.alloc(totalSize); - dataBlock.forEach(function (content) { - content.copy(outBuffer, dindex); // write data blocks - dindex += content.length; - }); - entryHeaders.forEach(function (content) { - content.copy(outBuffer, dindex); // write central directory entries - dindex += content.length; - }); + totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length + // point to end of data and beginning of central directory first record + mainHeader.offset = dindex; - var mh = mainHeader.toBinary(); - if (_comment) { - Buffer.from(_comment).copy(mh, Utils.Constants.ENDHDR); // add zip file comment - } + dindex = 0; + const outBuffer = Buffer.alloc(totalSize); + // write data blocks + for (const content of dataBlock) { + content.copy(outBuffer, dindex); + dindex += content.length; + } - mh.copy(outBuffer, dindex); // write main header + // write central directory entries + for (const content of entryHeaders) { + content.copy(outBuffer, dindex); + dindex += content.length; + } - return outBuffer - }, + // write main header + const mh = mainHeader.toBinary(); + if (_comment) { + _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment + } + mh.copy(outBuffer, dindex); - toAsyncBuffer: function (/*Function*/onSuccess, /*Function*/onFail, /*Function*/onItemStart, /*Function*/onItemEnd) { - if (!loadedEntries) { - readEntries(); - } - if (entryList.length > 1) { - entryList.sort(function (a, b) { - var nameA = a.entryName.toLowerCase(); - var nameB = b.entryName.toLowerCase(); - if (nameA > nameB) { - return -1 - } - if (nameA < nameB) { - return 1 - } - return 0; - }); - } + return outBuffer; + }, - var totalSize = 0, - dataBlock = [], - entryHeaders = [], - dindex = 0; + toAsyncBuffer: function (/*Function*/ onSuccess, /*Function*/ onFail, /*Function*/ onItemStart, /*Function*/ onItemEnd) { + try { + if (!loadedEntries) { + readEntries(); + } + sortEntries(); - mainHeader.size = 0; - mainHeader.offset = 0; + const dataBlock = []; + const entryHeaders = []; + let totalSize = 0; + let dindex = 0; - var compress = function (entryList) { - var self = arguments.callee; - if (entryList.length) { - var entry = entryList.pop(); - var name = entry.entryName + entry.extra.toString(); - if (onItemStart) onItemStart(name); - entry.getCompressedDataAsync(function (compressedData) { - if (onItemEnd) onItemEnd(name); + mainHeader.size = 0; + mainHeader.offset = 0; - entry.header.offset = dindex; - // data header - var dataHeader = entry.header.dataHeaderToBinary(); - var postHeader; - try { - postHeader = Buffer.alloc(name.length, name); // using alloc will work on node 5.x+ - } catch(e){ - postHeader = new Buffer(name); // use deprecated method if alloc fails... - } - var dataLength = dataHeader.length + postHeader.length + compressedData.length; + const compress2Buffer = function (entryLists) { + if (entryLists.length) { + const entry = entryLists.pop(); + const name = entry.entryName + entry.extra.toString(); + if (onItemStart) onItemStart(name); + entry.getCompressedDataAsync(function (compressedData) { + if (onItemEnd) onItemEnd(name); - dindex += dataLength; + entry.header.offset = dindex; + // data header + const dataHeader = entry.header.dataHeaderToBinary(); + const postHeader = Buffer.alloc(name.length, name); + const dataLength = dataHeader.length + postHeader.length + compressedData.length; - dataBlock.push(dataHeader); - dataBlock.push(postHeader); - dataBlock.push(compressedData); + dindex += dataLength; - var entryHeader = entry.packHeader(); - entryHeaders.push(entryHeader); - mainHeader.size += entryHeader.length; - totalSize += (dataLength + entryHeader.length); + dataBlock.push(dataHeader); + dataBlock.push(postHeader); + dataBlock.push(compressedData); - if (entryList.length) { - self(entryList); - } else { + const entryHeader = entry.packHeader(); + entryHeaders.push(entryHeader); + mainHeader.size += entryHeader.length; + totalSize += dataLength + entryHeader.length; + compress2Buffer(entryLists); + }); + } else { + totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length + // point to end of data and beginning of central directory first record + mainHeader.offset = dindex; - totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length - // point to end of data and beginning of central directory first record - mainHeader.offset = dindex; + dindex = 0; + const outBuffer = Buffer.alloc(totalSize); + dataBlock.forEach(function (content) { + content.copy(outBuffer, dindex); // write data blocks + dindex += content.length; + }); + entryHeaders.forEach(function (content) { + content.copy(outBuffer, dindex); // write central directory entries + dindex += content.length; + }); - dindex = 0; - var outBuffer = Buffer.alloc(totalSize); - dataBlock.forEach(function (content) { - content.copy(outBuffer, dindex); // write data blocks - dindex += content.length; - }); - entryHeaders.forEach(function (content) { - content.copy(outBuffer, dindex); // write central directory entries - dindex += content.length; - }); + const mh = mainHeader.toBinary(); + if (_comment) { + _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment + } - var mh = mainHeader.toBinary(); - if (_comment) { - _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment - } + mh.copy(outBuffer, dindex); // write main header - mh.copy(outBuffer, dindex); // write main header + onSuccess(outBuffer); + } + }; - onSuccess(outBuffer); - } - }); - } - }; - - compress(entryList); - } - } + compress2Buffer(entryList); + } catch (e) { + onFail(e); + } + } + }; }; @@ -10843,51 +11671,51 @@ module.exports.Collection = Hook.Collection /***/ 5549: /***/ ((module) => { -module.exports = addHook +module.exports = addHook; -function addHook (state, kind, name, hook) { - var orig = hook +function addHook(state, kind, name, hook) { + var orig = hook; if (!state.registry[name]) { - state.registry[name] = [] + state.registry[name] = []; } - if (kind === 'before') { + if (kind === "before") { hook = function (method, options) { return Promise.resolve() .then(orig.bind(null, options)) - .then(method.bind(null, options)) - } + .then(method.bind(null, options)); + }; } - if (kind === 'after') { + if (kind === "after") { hook = function (method, options) { - var result + var result; return Promise.resolve() .then(method.bind(null, options)) .then(function (result_) { - result = result_ - return orig(result, options) + result = result_; + return orig(result, options); }) .then(function () { - return result - }) - } + return result; + }); + }; } - if (kind === 'error') { + if (kind === "error") { hook = function (method, options) { return Promise.resolve() .then(method.bind(null, options)) .catch(function (error) { - return orig(error, options) - }) - } + return orig(error, options); + }); + }; } state.registry[name].push({ hook: hook, - orig: orig - }) + orig: orig, + }); } @@ -10896,33 +11724,32 @@ function addHook (state, kind, name, hook) { /***/ 4670: /***/ ((module) => { -module.exports = register +module.exports = register; -function register (state, name, method, options) { - if (typeof method !== 'function') { - throw new Error('method for before hook must be a function') +function register(state, name, method, options) { + if (typeof method !== "function") { + throw new Error("method for before hook must be a function"); } if (!options) { - options = {} + options = {}; } if (Array.isArray(name)) { return name.reverse().reduce(function (callback, name) { - return register.bind(null, state, name, callback, options) - }, method)() + return register.bind(null, state, name, callback, options); + }, method)(); } - return Promise.resolve() - .then(function () { - if (!state.registry[name]) { - return method(options) - } + return Promise.resolve().then(function () { + if (!state.registry[name]) { + return method(options); + } - return (state.registry[name]).reduce(function (method, registered) { - return registered.hook.bind(null, method, options) - }, method)() - }) + return state.registry[name].reduce(function (method, registered) { + return registered.hook.bind(null, method, options); + }, method)(); + }); } @@ -10931,1226 +11758,39 @@ function register (state, name, method, options) { /***/ 6819: /***/ ((module) => { -module.exports = removeHook +module.exports = removeHook; -function removeHook (state, name, method) { +function removeHook(state, name, method) { if (!state.registry[name]) { - return + return; } var index = state.registry[name] - .map(function (registered) { return registered.orig }) - .indexOf(method) + .map(function (registered) { + return registered.orig; + }) + .indexOf(method); if (index === -1) { - return + return; } - state.registry[name].splice(index, 1) + state.registry[name].splice(index, 1); } /***/ }), -/***/ 2286: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const { - V4MAPPED, - ADDRCONFIG, - ALL, - promises: { - Resolver: AsyncResolver - }, - lookup: dnsLookup -} = __nccwpck_require__(881); -const {promisify} = __nccwpck_require__(1669); -const os = __nccwpck_require__(2087); - -const kCacheableLookupCreateConnection = Symbol('cacheableLookupCreateConnection'); -const kCacheableLookupInstance = Symbol('cacheableLookupInstance'); -const kExpires = Symbol('expires'); - -const supportsALL = typeof ALL === 'number'; - -const verifyAgent = agent => { - if (!(agent && typeof agent.createConnection === 'function')) { - throw new Error('Expected an Agent instance as the first argument'); - } -}; - -const map4to6 = entries => { - for (const entry of entries) { - if (entry.family === 6) { - continue; - } - - entry.address = `::ffff:${entry.address}`; - entry.family = 6; - } -}; - -const getIfaceInfo = () => { - let has4 = false; - let has6 = false; - - for (const device of Object.values(os.networkInterfaces())) { - for (const iface of device) { - if (iface.internal) { - continue; - } - - if (iface.family === 'IPv6') { - has6 = true; - } else { - has4 = true; - } - - if (has4 && has6) { - return {has4, has6}; - } - } - } - - return {has4, has6}; -}; - -const isIterable = map => { - return Symbol.iterator in map; -}; - -const ttl = {ttl: true}; -const all = {all: true}; - -class CacheableLookup { - constructor({ - cache = new Map(), - maxTtl = Infinity, - fallbackDuration = 3600, - errorTtl = 0.15, - resolver = new AsyncResolver(), - lookup = dnsLookup - } = {}) { - this.maxTtl = maxTtl; - this.errorTtl = errorTtl; - - this._cache = cache; - this._resolver = resolver; - this._dnsLookup = promisify(lookup); - - if (this._resolver instanceof AsyncResolver) { - this._resolve4 = this._resolver.resolve4.bind(this._resolver); - this._resolve6 = this._resolver.resolve6.bind(this._resolver); - } else { - this._resolve4 = promisify(this._resolver.resolve4.bind(this._resolver)); - this._resolve6 = promisify(this._resolver.resolve6.bind(this._resolver)); - } - - this._iface = getIfaceInfo(); - - this._pending = {}; - this._nextRemovalTime = false; - this._hostnamesToFallback = new Set(); - - if (fallbackDuration < 1) { - this._fallback = false; - } else { - this._fallback = true; - - const interval = setInterval(() => { - this._hostnamesToFallback.clear(); - }, fallbackDuration * 1000); - - /* istanbul ignore next: There is no `interval.unref()` when running inside an Electron renderer */ - if (interval.unref) { - interval.unref(); - } - } - - this.lookup = this.lookup.bind(this); - this.lookupAsync = this.lookupAsync.bind(this); - } - - set servers(servers) { - this.clear(); - - this._resolver.setServers(servers); - } - - get servers() { - return this._resolver.getServers(); - } - - lookup(hostname, options, callback) { - if (typeof options === 'function') { - callback = options; - options = {}; - } else if (typeof options === 'number') { - options = { - family: options - }; - } - - if (!callback) { - throw new Error('Callback must be a function.'); - } - - // eslint-disable-next-line promise/prefer-await-to-then - this.lookupAsync(hostname, options).then(result => { - if (options.all) { - callback(null, result); - } else { - callback(null, result.address, result.family, result.expires, result.ttl); - } - }, callback); - } - - async lookupAsync(hostname, options = {}) { - if (typeof options === 'number') { - options = { - family: options - }; - } - - let cached = await this.query(hostname); - - if (options.family === 6) { - const filtered = cached.filter(entry => entry.family === 6); - - if (options.hints & V4MAPPED) { - if ((supportsALL && options.hints & ALL) || filtered.length === 0) { - map4to6(cached); - } else { - cached = filtered; - } - } else { - cached = filtered; - } - } else if (options.family === 4) { - cached = cached.filter(entry => entry.family === 4); - } - - if (options.hints & ADDRCONFIG) { - const {_iface} = this; - cached = cached.filter(entry => entry.family === 6 ? _iface.has6 : _iface.has4); - } - - if (cached.length === 0) { - const error = new Error(`cacheableLookup ENOTFOUND ${hostname}`); - error.code = 'ENOTFOUND'; - error.hostname = hostname; - - throw error; - } - - if (options.all) { - return cached; - } - - return cached[0]; - } - - async query(hostname) { - let cached = await this._cache.get(hostname); - - if (!cached) { - const pending = this._pending[hostname]; - - if (pending) { - cached = await pending; - } else { - const newPromise = this.queryAndCache(hostname); - this._pending[hostname] = newPromise; - - try { - cached = await newPromise; - } finally { - delete this._pending[hostname]; - } - } - } - - cached = cached.map(entry => { - return {...entry}; - }); - - return cached; - } - - async _resolve(hostname) { - const wrap = async promise => { - try { - return await promise; - } catch (error) { - if (error.code === 'ENODATA' || error.code === 'ENOTFOUND') { - return []; - } - - throw error; - } - }; - - // ANY is unsafe as it doesn't trigger new queries in the underlying server. - const [A, AAAA] = await Promise.all([ - this._resolve4(hostname, ttl), - this._resolve6(hostname, ttl) - ].map(promise => wrap(promise))); - - let aTtl = 0; - let aaaaTtl = 0; - let cacheTtl = 0; - - const now = Date.now(); - - for (const entry of A) { - entry.family = 4; - entry.expires = now + (entry.ttl * 1000); - - aTtl = Math.max(aTtl, entry.ttl); - } - - for (const entry of AAAA) { - entry.family = 6; - entry.expires = now + (entry.ttl * 1000); - - aaaaTtl = Math.max(aaaaTtl, entry.ttl); - } - - if (A.length > 0) { - if (AAAA.length > 0) { - cacheTtl = Math.min(aTtl, aaaaTtl); - } else { - cacheTtl = aTtl; - } - } else { - cacheTtl = aaaaTtl; - } - - return { - entries: [ - ...A, - ...AAAA - ], - cacheTtl - }; - } - - async _lookup(hostname) { - try { - const entries = await this._dnsLookup(hostname, { - all: true - }); - - return { - entries, - cacheTtl: 0 - }; - } catch (_) { - return { - entries: [], - cacheTtl: 0 - }; - } - } - - async _set(hostname, data, cacheTtl) { - if (this.maxTtl > 0 && cacheTtl > 0) { - cacheTtl = Math.min(cacheTtl, this.maxTtl) * 1000; - data[kExpires] = Date.now() + cacheTtl; - - try { - await this._cache.set(hostname, data, cacheTtl); - } catch (error) { - this.lookupAsync = async () => { - const cacheError = new Error('Cache Error. Please recreate the CacheableLookup instance.'); - cacheError.cause = error; - - throw cacheError; - }; - } - - if (isIterable(this._cache)) { - this._tick(cacheTtl); - } - } - } - - async queryAndCache(hostname) { - if (this._hostnamesToFallback.has(hostname)) { - return this._dnsLookup(hostname, all); - } - - let query = await this._resolve(hostname); - - if (query.entries.length === 0 && this._fallback) { - query = await this._lookup(hostname); - - if (query.entries.length !== 0) { - // Use `dns.lookup(...)` for that particular hostname - this._hostnamesToFallback.add(hostname); - } - } - - const cacheTtl = query.entries.length === 0 ? this.errorTtl : query.cacheTtl; - await this._set(hostname, query.entries, cacheTtl); - - return query.entries; - } - - _tick(ms) { - const nextRemovalTime = this._nextRemovalTime; - - if (!nextRemovalTime || ms < nextRemovalTime) { - clearTimeout(this._removalTimeout); - - this._nextRemovalTime = ms; - - this._removalTimeout = setTimeout(() => { - this._nextRemovalTime = false; - - let nextExpiry = Infinity; - - const now = Date.now(); - - for (const [hostname, entries] of this._cache) { - const expires = entries[kExpires]; - - if (now >= expires) { - this._cache.delete(hostname); - } else if (expires < nextExpiry) { - nextExpiry = expires; - } - } - - if (nextExpiry !== Infinity) { - this._tick(nextExpiry - now); - } - }, ms); - - /* istanbul ignore next: There is no `timeout.unref()` when running inside an Electron renderer */ - if (this._removalTimeout.unref) { - this._removalTimeout.unref(); - } - } - } - - install(agent) { - verifyAgent(agent); - - if (kCacheableLookupCreateConnection in agent) { - throw new Error('CacheableLookup has been already installed'); - } - - agent[kCacheableLookupCreateConnection] = agent.createConnection; - agent[kCacheableLookupInstance] = this; - - agent.createConnection = (options, callback) => { - if (!('lookup' in options)) { - options.lookup = this.lookup; - } - - return agent[kCacheableLookupCreateConnection](options, callback); - }; - } - - uninstall(agent) { - verifyAgent(agent); - - if (agent[kCacheableLookupCreateConnection]) { - if (agent[kCacheableLookupInstance] !== this) { - throw new Error('The agent is not owned by this CacheableLookup instance'); - } - - agent.createConnection = agent[kCacheableLookupCreateConnection]; - - delete agent[kCacheableLookupCreateConnection]; - delete agent[kCacheableLookupInstance]; - } - } - - updateInterfaceInfo() { - const {_iface} = this; - - this._iface = getIfaceInfo(); - - if ((_iface.has4 && !this._iface.has4) || (_iface.has6 && !this._iface.has6)) { - this._cache.clear(); - } - } - - clear(hostname) { - if (hostname) { - this._cache.delete(hostname); - return; - } - - this._cache.clear(); - } -} - -module.exports = CacheableLookup; -module.exports.default = CacheableLookup; - - -/***/ }), - -/***/ 4340: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const {PassThrough: PassThroughStream} = __nccwpck_require__(2413); - -module.exports = options => { - options = {...options}; - - const {array} = options; - let {encoding} = options; - const isBuffer = encoding === 'buffer'; - let objectMode = false; - - if (array) { - objectMode = !(encoding || isBuffer); - } else { - encoding = encoding || 'utf8'; - } - - if (isBuffer) { - encoding = null; - } - - const stream = new PassThroughStream({objectMode}); - - if (encoding) { - stream.setEncoding(encoding); - } - - let length = 0; - const chunks = []; - - stream.on('data', chunk => { - chunks.push(chunk); - - if (objectMode) { - length = chunks.length; - } else { - length += chunk.length; - } - }); - - stream.getBufferedValue = () => { - if (array) { - return chunks; - } - - return isBuffer ? Buffer.concat(chunks, length) : chunks.join(''); - }; - - stream.getBufferedLength = () => length; - - return stream; -}; - - -/***/ }), - -/***/ 7040: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const {constants: BufferConstants} = __nccwpck_require__(4293); -const pump = __nccwpck_require__(8341); -const bufferStream = __nccwpck_require__(4340); - -class MaxBufferError extends Error { - constructor() { - super('maxBuffer exceeded'); - this.name = 'MaxBufferError'; - } -} - -async function getStream(inputStream, options) { - if (!inputStream) { - return Promise.reject(new Error('Expected a stream')); - } - - options = { - maxBuffer: Infinity, - ...options - }; - - const {maxBuffer} = options; - - let stream; - await new Promise((resolve, reject) => { - const rejectPromise = error => { - // Don't retrieve an oversized buffer. - if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) { - error.bufferedData = stream.getBufferedValue(); - } - - reject(error); - }; - - stream = pump(inputStream, bufferStream(options), error => { - if (error) { - rejectPromise(error); - return; - } - - resolve(); - }); - - stream.on('data', () => { - if (stream.getBufferedLength() > maxBuffer) { - rejectPromise(new MaxBufferError()); - } - }); - }); - - return stream.getBufferedValue(); -} - -module.exports = getStream; -// TODO: Remove this for the next major release -module.exports.default = getStream; -module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'}); -module.exports.array = (stream, options) => getStream(stream, {...options, array: true}); -module.exports.MaxBufferError = MaxBufferError; - - -/***/ }), - -/***/ 8116: +/***/ 610: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const EventEmitter = __nccwpck_require__(8614); -const urlLib = __nccwpck_require__(8835); -const normalizeUrl = __nccwpck_require__(7952); -const getStream = __nccwpck_require__(7040); -const CachePolicy = __nccwpck_require__(1002); -const Response = __nccwpck_require__(9004); -const lowercaseKeys = __nccwpck_require__(9662); -const cloneResponse = __nccwpck_require__(1312); -const Keyv = __nccwpck_require__(1531); - -class CacheableRequest { - constructor(request, cacheAdapter) { - if (typeof request !== 'function') { - throw new TypeError('Parameter `request` must be a function'); - } - - this.cache = new Keyv({ - uri: typeof cacheAdapter === 'string' && cacheAdapter, - store: typeof cacheAdapter !== 'string' && cacheAdapter, - namespace: 'cacheable-request' - }); - - return this.createCacheableRequest(request); - } - - createCacheableRequest(request) { - return (opts, cb) => { - let url; - if (typeof opts === 'string') { - url = normalizeUrlObject(urlLib.parse(opts)); - opts = {}; - } else if (opts instanceof urlLib.URL) { - url = normalizeUrlObject(urlLib.parse(opts.toString())); - opts = {}; - } else { - const [pathname, ...searchParts] = (opts.path || '').split('?'); - const search = searchParts.length > 0 ? - `?${searchParts.join('?')}` : - ''; - url = normalizeUrlObject({ ...opts, pathname, search }); - } - - opts = { - headers: {}, - method: 'GET', - cache: true, - strictTtl: false, - automaticFailover: false, - ...opts, - ...urlObjectToRequestOptions(url) - }; - opts.headers = lowercaseKeys(opts.headers); - - const ee = new EventEmitter(); - const normalizedUrlString = normalizeUrl( - urlLib.format(url), - { - stripWWW: false, - removeTrailingSlash: false, - stripAuthentication: false - } - ); - const key = `${opts.method}:${normalizedUrlString}`; - let revalidate = false; - let madeRequest = false; - - const makeRequest = opts => { - madeRequest = true; - let requestErrored = false; - let requestErrorCallback; - - const requestErrorPromise = new Promise(resolve => { - requestErrorCallback = () => { - if (!requestErrored) { - requestErrored = true; - resolve(); - } - }; - }); - - const handler = response => { - if (revalidate && !opts.forceRefresh) { - response.status = response.statusCode; - const revalidatedPolicy = CachePolicy.fromObject(revalidate.cachePolicy).revalidatedPolicy(opts, response); - if (!revalidatedPolicy.modified) { - const headers = revalidatedPolicy.policy.responseHeaders(); - response = new Response(revalidate.statusCode, headers, revalidate.body, revalidate.url); - response.cachePolicy = revalidatedPolicy.policy; - response.fromCache = true; - } - } - - if (!response.fromCache) { - response.cachePolicy = new CachePolicy(opts, response, opts); - response.fromCache = false; - } - - let clonedResponse; - if (opts.cache && response.cachePolicy.storable()) { - clonedResponse = cloneResponse(response); - - (async () => { - try { - const bodyPromise = getStream.buffer(response); - - await Promise.race([ - requestErrorPromise, - new Promise(resolve => response.once('end', resolve)) - ]); - - if (requestErrored) { - return; - } - - const body = await bodyPromise; - - const value = { - cachePolicy: response.cachePolicy.toObject(), - url: response.url, - statusCode: response.fromCache ? revalidate.statusCode : response.statusCode, - body - }; - - let ttl = opts.strictTtl ? response.cachePolicy.timeToLive() : undefined; - if (opts.maxTtl) { - ttl = ttl ? Math.min(ttl, opts.maxTtl) : opts.maxTtl; - } - - await this.cache.set(key, value, ttl); - } catch (error) { - ee.emit('error', new CacheableRequest.CacheError(error)); - } - })(); - } else if (opts.cache && revalidate) { - (async () => { - try { - await this.cache.delete(key); - } catch (error) { - ee.emit('error', new CacheableRequest.CacheError(error)); - } - })(); - } - - ee.emit('response', clonedResponse || response); - if (typeof cb === 'function') { - cb(clonedResponse || response); - } - }; - - try { - const req = request(opts, handler); - req.once('error', requestErrorCallback); - req.once('abort', requestErrorCallback); - ee.emit('request', req); - } catch (error) { - ee.emit('error', new CacheableRequest.RequestError(error)); - } - }; - - (async () => { - const get = async opts => { - await Promise.resolve(); - - const cacheEntry = opts.cache ? await this.cache.get(key) : undefined; - if (typeof cacheEntry === 'undefined') { - return makeRequest(opts); - } - - const policy = CachePolicy.fromObject(cacheEntry.cachePolicy); - if (policy.satisfiesWithoutRevalidation(opts) && !opts.forceRefresh) { - const headers = policy.responseHeaders(); - const response = new Response(cacheEntry.statusCode, headers, cacheEntry.body, cacheEntry.url); - response.cachePolicy = policy; - response.fromCache = true; - - ee.emit('response', response); - if (typeof cb === 'function') { - cb(response); - } - } else { - revalidate = cacheEntry; - opts.headers = policy.revalidationHeaders(opts); - makeRequest(opts); - } - }; - - const errorHandler = error => ee.emit('error', new CacheableRequest.CacheError(error)); - this.cache.once('error', errorHandler); - ee.on('response', () => this.cache.removeListener('error', errorHandler)); - - try { - await get(opts); - } catch (error) { - if (opts.automaticFailover && !madeRequest) { - makeRequest(opts); - } - - ee.emit('error', new CacheableRequest.CacheError(error)); - } - })(); - - return ee; - }; - } -} - -function urlObjectToRequestOptions(url) { - const options = { ...url }; - options.path = `${url.pathname || '/'}${url.search || ''}`; - delete options.pathname; - delete options.search; - return options; -} - -function normalizeUrlObject(url) { - // If url was parsed by url.parse or new URL: - // - hostname will be set - // - host will be hostname[:port] - // - port will be set if it was explicit in the parsed string - // Otherwise, url was from request options: - // - hostname or host may be set - // - host shall not have port encoded - return { - protocol: url.protocol, - auth: url.auth, - hostname: url.hostname || url.host || 'localhost', - port: url.port, - pathname: url.pathname, - search: url.search - }; -} - -CacheableRequest.RequestError = class extends Error { - constructor(error) { - super(error.message); - this.name = 'RequestError'; - Object.assign(this, error); - } -}; - -CacheableRequest.CacheError = class extends Error { - constructor(error) { - super(error.message); - this.name = 'CacheError'; - Object.assign(this, error); - } -}; - -module.exports = CacheableRequest; - - -/***/ }), - -/***/ 1312: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const PassThrough = __nccwpck_require__(2413).PassThrough; -const mimicResponse = __nccwpck_require__(2610); - -const cloneResponse = response => { - if (!(response && response.pipe)) { - throw new TypeError('Parameter `response` must be a response stream.'); - } - - const clone = new PassThrough(); - mimicResponse(response, clone); - - return response.pipe(clone); -}; - -module.exports = cloneResponse; - - -/***/ }), - -/***/ 2391: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -const {Transform, PassThrough} = __nccwpck_require__(2413); -const zlib = __nccwpck_require__(8761); -const mimicResponse = __nccwpck_require__(3877); - -module.exports = response => { - const contentEncoding = (response.headers['content-encoding'] || '').toLowerCase(); - - if (!['gzip', 'deflate', 'br'].includes(contentEncoding)) { - return response; - } - - // TODO: Remove this when targeting Node.js 12. - const isBrotli = contentEncoding === 'br'; - if (isBrotli && typeof zlib.createBrotliDecompress !== 'function') { - response.destroy(new Error('Brotli is not supported on Node.js < 12')); - return response; - } - - let isEmpty = true; - - const checker = new Transform({ - transform(data, _encoding, callback) { - isEmpty = false; - - callback(null, data); - }, - - flush(callback) { - callback(); - } - }); - - const finalStream = new PassThrough({ - autoDestroy: false, - destroy(error, callback) { - response.destroy(); - - callback(error); - } - }); - - const decompressStream = isBrotli ? zlib.createBrotliDecompress() : zlib.createUnzip(); - - decompressStream.once('error', error => { - if (isEmpty && !response.readable) { - finalStream.end(); - return; - } - - finalStream.destroy(error); - }); - - mimicResponse(response, finalStream); - response.pipe(checker).pipe(decompressStream).pipe(finalStream); - - return finalStream; -}; - - -/***/ }), - -/***/ 3877: -/***/ ((module) => { - -"use strict"; - - -// We define these manually to ensure they're always copied -// even if they would move up the prototype chain -// https://nodejs.org/api/http.html#http_class_http_incomingmessage -const knownProperties = [ - 'aborted', - 'complete', - 'headers', - 'httpVersion', - 'httpVersionMinor', - 'httpVersionMajor', - 'method', - 'rawHeaders', - 'rawTrailers', - 'setTimeout', - 'socket', - 'statusCode', - 'statusMessage', - 'trailers', - 'url' -]; - -module.exports = (fromStream, toStream) => { - if (toStream._readableState.autoDestroy) { - throw new Error('The second stream must have the `autoDestroy` option set to `false`'); - } - - const fromProperties = new Set(Object.keys(fromStream).concat(knownProperties)); - - const properties = {}; - - for (const property of fromProperties) { - // Don't overwrite existing properties. - if (property in toStream) { - continue; - } - - properties[property] = { - get() { - const value = fromStream[property]; - const isFunction = typeof value === 'function'; - - return isFunction ? value.bind(fromStream) : value; - }, - set(value) { - fromStream[property] = value; - }, - enumerable: true, - configurable: false - }; - } - - Object.defineProperties(toStream, properties); - - fromStream.once('aborted', () => { - toStream.destroy(); - - toStream.emit('aborted'); - }); - - fromStream.once('close', () => { - if (fromStream.complete) { - if (toStream.readable) { - toStream.once('end', () => { - toStream.emit('close'); - }); - } else { - toStream.emit('close'); - } - } else { - toStream.emit('close'); - } - }); - - return toStream; -}; - - -/***/ }), - -/***/ 6214: -/***/ ((module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tls_1 = __nccwpck_require__(4016); -const deferToConnect = (socket, fn) => { - let listeners; - if (typeof fn === 'function') { - const connect = fn; - listeners = { connect }; - } - else { - listeners = fn; - } - const hasConnectListener = typeof listeners.connect === 'function'; - const hasSecureConnectListener = typeof listeners.secureConnect === 'function'; - const hasCloseListener = typeof listeners.close === 'function'; - const onConnect = () => { - if (hasConnectListener) { - listeners.connect(); - } - if (socket instanceof tls_1.TLSSocket && hasSecureConnectListener) { - if (socket.authorized) { - listeners.secureConnect(); - } - else if (!socket.authorizationError) { - socket.once('secureConnect', listeners.secureConnect); - } - } - if (hasCloseListener) { - socket.once('close', listeners.close); - } - }; - if (socket.writable && !socket.connecting) { - onConnect(); - } - else if (socket.connecting) { - socket.once('connect', onConnect); - } - else if (socket.destroyed && hasCloseListener) { - listeners.close(socket._hadError); - } -}; -exports.default = deferToConnect; -// For CommonJS default export support -module.exports = deferToConnect; -module.exports.default = deferToConnect; - - -/***/ }), - -/***/ 8932: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -class Deprecation extends Error { - constructor(message) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - - this.name = 'Deprecation'; - } - -} - -exports.Deprecation = Deprecation; - - -/***/ }), - -/***/ 1205: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var once = __nccwpck_require__(1223); - -var noop = function() {}; - -var isRequest = function(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -}; - -var isChildProcess = function(stream) { - return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 -}; - -var eos = function(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - - callback = once(callback || noop); - - var ws = stream._writableState; - var rs = stream._readableState; - var readable = opts.readable || (opts.readable !== false && stream.readable); - var writable = opts.writable || (opts.writable !== false && stream.writable); - var cancelled = false; - - var onlegacyfinish = function() { - if (!stream.writable) onfinish(); - }; - - var onfinish = function() { - writable = false; - if (!readable) callback.call(stream); - }; - - var onend = function() { - readable = false; - if (!writable) callback.call(stream); - }; - - var onexit = function(exitCode) { - callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); - }; - - var onerror = function(err) { - callback.call(stream, err); - }; - - var onclose = function() { - process.nextTick(onclosenexttick); - }; - - var onclosenexttick = function() { - if (cancelled) return; - if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); - if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); - }; - - var onrequest = function() { - stream.req.on('finish', onfinish); - }; - - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest(); - else stream.on('request', onrequest); - } else if (writable && !ws) { // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } - - if (isChildProcess(stream)) stream.on('exit', onexit); - - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); - - return function() { - cancelled = true; - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('exit', onexit); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; -}; - -module.exports = eos; - - -/***/ }), - -/***/ 5582: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const stringify = __nccwpck_require__(4810); -const compile = __nccwpck_require__(7123); -const expand = __nccwpck_require__(6944); -const parse = __nccwpck_require__(9889); +const stringify = __nccwpck_require__(8750); +const compile = __nccwpck_require__(9434); +const expand = __nccwpck_require__(5873); +const parse = __nccwpck_require__(6477); /** * Expand the given pattern or create a regex-compatible string. @@ -12319,14 +11959,14 @@ module.exports = braces; /***/ }), -/***/ 7123: +/***/ 9434: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const fill = __nccwpck_require__(791); -const utils = __nccwpck_require__(7691); +const fill = __nccwpck_require__(6330); +const utils = __nccwpck_require__(5207); const compile = (ast, options = {}) => { let walk = (node, parent = {}) => { @@ -12384,7 +12024,7 @@ module.exports = compile; /***/ }), -/***/ 5412: +/***/ 8774: /***/ ((module) => { "use strict"; @@ -12449,15 +12089,15 @@ module.exports = { /***/ }), -/***/ 6944: +/***/ 5873: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const fill = __nccwpck_require__(791); -const stringify = __nccwpck_require__(4810); -const utils = __nccwpck_require__(7691); +const fill = __nccwpck_require__(6330); +const stringify = __nccwpck_require__(8750); +const utils = __nccwpck_require__(5207); const append = (queue = '', stash = '', enclose = false) => { let result = []; @@ -12570,13 +12210,13 @@ module.exports = expand; /***/ }), -/***/ 9889: +/***/ 6477: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const stringify = __nccwpck_require__(4810); +const stringify = __nccwpck_require__(8750); /** * Constants @@ -12598,7 +12238,7 @@ const { CHAR_SINGLE_QUOTE, /* ' */ CHAR_NO_BREAK_SPACE, CHAR_ZERO_WIDTH_NOBREAK_SPACE -} = __nccwpck_require__(5412); +} = __nccwpck_require__(8774); /** * parse @@ -12911,13 +12551,13 @@ module.exports = parse; /***/ }), -/***/ 4810: +/***/ 8750: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const utils = __nccwpck_require__(7691); +const utils = __nccwpck_require__(5207); module.exports = (ast, options = {}) => { let stringify = (node, parent = {}) => { @@ -12951,7 +12591,7 @@ module.exports = (ast, options = {}) => { /***/ }), -/***/ 7691: +/***/ 5207: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -13071,7 +12711,2881 @@ exports.flatten = (...args) => { /***/ }), -/***/ 791: +/***/ 2286: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const { + V4MAPPED, + ADDRCONFIG, + ALL, + promises: { + Resolver: AsyncResolver + }, + lookup: dnsLookup +} = __nccwpck_require__(9523); +const {promisify} = __nccwpck_require__(3837); +const os = __nccwpck_require__(2037); + +const kCacheableLookupCreateConnection = Symbol('cacheableLookupCreateConnection'); +const kCacheableLookupInstance = Symbol('cacheableLookupInstance'); +const kExpires = Symbol('expires'); + +const supportsALL = typeof ALL === 'number'; + +const verifyAgent = agent => { + if (!(agent && typeof agent.createConnection === 'function')) { + throw new Error('Expected an Agent instance as the first argument'); + } +}; + +const map4to6 = entries => { + for (const entry of entries) { + if (entry.family === 6) { + continue; + } + + entry.address = `::ffff:${entry.address}`; + entry.family = 6; + } +}; + +const getIfaceInfo = () => { + let has4 = false; + let has6 = false; + + for (const device of Object.values(os.networkInterfaces())) { + for (const iface of device) { + if (iface.internal) { + continue; + } + + if (iface.family === 'IPv6') { + has6 = true; + } else { + has4 = true; + } + + if (has4 && has6) { + return {has4, has6}; + } + } + } + + return {has4, has6}; +}; + +const isIterable = map => { + return Symbol.iterator in map; +}; + +const ignoreNoResultErrors = dnsPromise => { + return dnsPromise.catch(error => { + if ( + error.code === 'ENODATA' || + error.code === 'ENOTFOUND' || + error.code === 'ENOENT' // Windows: name exists, but not this record type + ) { + return []; + } + + throw error; + }); +}; + +const ttl = {ttl: true}; +const all = {all: true}; +const all4 = {all: true, family: 4}; +const all6 = {all: true, family: 6}; + +class CacheableLookup { + constructor({ + cache = new Map(), + maxTtl = Infinity, + fallbackDuration = 3600, + errorTtl = 0.15, + resolver = new AsyncResolver(), + lookup = dnsLookup + } = {}) { + this.maxTtl = maxTtl; + this.errorTtl = errorTtl; + + this._cache = cache; + this._resolver = resolver; + this._dnsLookup = lookup && promisify(lookup); + + if (this._resolver instanceof AsyncResolver) { + this._resolve4 = this._resolver.resolve4.bind(this._resolver); + this._resolve6 = this._resolver.resolve6.bind(this._resolver); + } else { + this._resolve4 = promisify(this._resolver.resolve4.bind(this._resolver)); + this._resolve6 = promisify(this._resolver.resolve6.bind(this._resolver)); + } + + this._iface = getIfaceInfo(); + + this._pending = {}; + this._nextRemovalTime = false; + this._hostnamesToFallback = new Set(); + + this.fallbackDuration = fallbackDuration; + + if (fallbackDuration > 0) { + const interval = setInterval(() => { + this._hostnamesToFallback.clear(); + }, fallbackDuration * 1000); + + /* istanbul ignore next: There is no `interval.unref()` when running inside an Electron renderer */ + if (interval.unref) { + interval.unref(); + } + + this._fallbackInterval = interval; + } + + this.lookup = this.lookup.bind(this); + this.lookupAsync = this.lookupAsync.bind(this); + } + + set servers(servers) { + this.clear(); + + this._resolver.setServers(servers); + } + + get servers() { + return this._resolver.getServers(); + } + + lookup(hostname, options, callback) { + if (typeof options === 'function') { + callback = options; + options = {}; + } else if (typeof options === 'number') { + options = { + family: options + }; + } + + if (!callback) { + throw new Error('Callback must be a function.'); + } + + // eslint-disable-next-line promise/prefer-await-to-then + this.lookupAsync(hostname, options).then(result => { + if (options.all) { + callback(null, result); + } else { + callback(null, result.address, result.family, result.expires, result.ttl, result.source); + } + }, callback); + } + + async lookupAsync(hostname, options = {}) { + if (typeof options === 'number') { + options = { + family: options + }; + } + + let cached = await this.query(hostname); + + if (options.family === 6) { + const filtered = cached.filter(entry => entry.family === 6); + + if (options.hints & V4MAPPED) { + if ((supportsALL && options.hints & ALL) || filtered.length === 0) { + map4to6(cached); + } else { + cached = filtered; + } + } else { + cached = filtered; + } + } else if (options.family === 4) { + cached = cached.filter(entry => entry.family === 4); + } + + if (options.hints & ADDRCONFIG) { + const {_iface} = this; + cached = cached.filter(entry => entry.family === 6 ? _iface.has6 : _iface.has4); + } + + if (cached.length === 0) { + const error = new Error(`cacheableLookup ENOTFOUND ${hostname}`); + error.code = 'ENOTFOUND'; + error.hostname = hostname; + + throw error; + } + + if (options.all) { + return cached; + } + + return cached[0]; + } + + async query(hostname) { + let source = 'cache'; + let cached = await this._cache.get(hostname); + + if (!cached) { + const pending = this._pending[hostname]; + + if (pending) { + cached = await pending; + } else { + source = 'query'; + const newPromise = this.queryAndCache(hostname); + this._pending[hostname] = newPromise; + + try { + cached = await newPromise; + } finally { + delete this._pending[hostname]; + } + } + } + + cached = cached.map(entry => { + return {...entry, source}; + }); + + return cached; + } + + async _resolve(hostname) { + // ANY is unsafe as it doesn't trigger new queries in the underlying server. + const [A, AAAA] = await Promise.all([ + ignoreNoResultErrors(this._resolve4(hostname, ttl)), + ignoreNoResultErrors(this._resolve6(hostname, ttl)) + ]); + + let aTtl = 0; + let aaaaTtl = 0; + let cacheTtl = 0; + + const now = Date.now(); + + for (const entry of A) { + entry.family = 4; + entry.expires = now + (entry.ttl * 1000); + + aTtl = Math.max(aTtl, entry.ttl); + } + + for (const entry of AAAA) { + entry.family = 6; + entry.expires = now + (entry.ttl * 1000); + + aaaaTtl = Math.max(aaaaTtl, entry.ttl); + } + + if (A.length > 0) { + if (AAAA.length > 0) { + cacheTtl = Math.min(aTtl, aaaaTtl); + } else { + cacheTtl = aTtl; + } + } else { + cacheTtl = aaaaTtl; + } + + return { + entries: [ + ...A, + ...AAAA + ], + cacheTtl + }; + } + + async _lookup(hostname) { + try { + const [A, AAAA] = await Promise.all([ + // Passing {all: true} doesn't return all IPv4 and IPv6 entries. + // See https://github.com/szmarczak/cacheable-lookup/issues/42 + ignoreNoResultErrors(this._dnsLookup(hostname, all4)), + ignoreNoResultErrors(this._dnsLookup(hostname, all6)) + ]); + + return { + entries: [ + ...A, + ...AAAA + ], + cacheTtl: 0 + }; + } catch { + return { + entries: [], + cacheTtl: 0 + }; + } + } + + async _set(hostname, data, cacheTtl) { + if (this.maxTtl > 0 && cacheTtl > 0) { + cacheTtl = Math.min(cacheTtl, this.maxTtl) * 1000; + data[kExpires] = Date.now() + cacheTtl; + + try { + await this._cache.set(hostname, data, cacheTtl); + } catch (error) { + this.lookupAsync = async () => { + const cacheError = new Error('Cache Error. Please recreate the CacheableLookup instance.'); + cacheError.cause = error; + + throw cacheError; + }; + } + + if (isIterable(this._cache)) { + this._tick(cacheTtl); + } + } + } + + async queryAndCache(hostname) { + if (this._hostnamesToFallback.has(hostname)) { + return this._dnsLookup(hostname, all); + } + + let query = await this._resolve(hostname); + + if (query.entries.length === 0 && this._dnsLookup) { + query = await this._lookup(hostname); + + if (query.entries.length !== 0 && this.fallbackDuration > 0) { + // Use `dns.lookup(...)` for that particular hostname + this._hostnamesToFallback.add(hostname); + } + } + + const cacheTtl = query.entries.length === 0 ? this.errorTtl : query.cacheTtl; + await this._set(hostname, query.entries, cacheTtl); + + return query.entries; + } + + _tick(ms) { + const nextRemovalTime = this._nextRemovalTime; + + if (!nextRemovalTime || ms < nextRemovalTime) { + clearTimeout(this._removalTimeout); + + this._nextRemovalTime = ms; + + this._removalTimeout = setTimeout(() => { + this._nextRemovalTime = false; + + let nextExpiry = Infinity; + + const now = Date.now(); + + for (const [hostname, entries] of this._cache) { + const expires = entries[kExpires]; + + if (now >= expires) { + this._cache.delete(hostname); + } else if (expires < nextExpiry) { + nextExpiry = expires; + } + } + + if (nextExpiry !== Infinity) { + this._tick(nextExpiry - now); + } + }, ms); + + /* istanbul ignore next: There is no `timeout.unref()` when running inside an Electron renderer */ + if (this._removalTimeout.unref) { + this._removalTimeout.unref(); + } + } + } + + install(agent) { + verifyAgent(agent); + + if (kCacheableLookupCreateConnection in agent) { + throw new Error('CacheableLookup has been already installed'); + } + + agent[kCacheableLookupCreateConnection] = agent.createConnection; + agent[kCacheableLookupInstance] = this; + + agent.createConnection = (options, callback) => { + if (!('lookup' in options)) { + options.lookup = this.lookup; + } + + return agent[kCacheableLookupCreateConnection](options, callback); + }; + } + + uninstall(agent) { + verifyAgent(agent); + + if (agent[kCacheableLookupCreateConnection]) { + if (agent[kCacheableLookupInstance] !== this) { + throw new Error('The agent is not owned by this CacheableLookup instance'); + } + + agent.createConnection = agent[kCacheableLookupCreateConnection]; + + delete agent[kCacheableLookupCreateConnection]; + delete agent[kCacheableLookupInstance]; + } + } + + updateInterfaceInfo() { + const {_iface} = this; + + this._iface = getIfaceInfo(); + + if ((_iface.has4 && !this._iface.has4) || (_iface.has6 && !this._iface.has6)) { + this._cache.clear(); + } + } + + clear(hostname) { + if (hostname) { + this._cache.delete(hostname); + return; + } + + this._cache.clear(); + } +} + +module.exports = CacheableLookup; +module.exports["default"] = CacheableLookup; + + +/***/ }), + +/***/ 4340: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const {PassThrough: PassThroughStream} = __nccwpck_require__(2781); + +module.exports = options => { + options = {...options}; + + const {array} = options; + let {encoding} = options; + const isBuffer = encoding === 'buffer'; + let objectMode = false; + + if (array) { + objectMode = !(encoding || isBuffer); + } else { + encoding = encoding || 'utf8'; + } + + if (isBuffer) { + encoding = null; + } + + const stream = new PassThroughStream({objectMode}); + + if (encoding) { + stream.setEncoding(encoding); + } + + let length = 0; + const chunks = []; + + stream.on('data', chunk => { + chunks.push(chunk); + + if (objectMode) { + length = chunks.length; + } else { + length += chunk.length; + } + }); + + stream.getBufferedValue = () => { + if (array) { + return chunks; + } + + return isBuffer ? Buffer.concat(chunks, length) : chunks.join(''); + }; + + stream.getBufferedLength = () => length; + + return stream; +}; + + +/***/ }), + +/***/ 7040: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const {constants: BufferConstants} = __nccwpck_require__(4300); +const pump = __nccwpck_require__(8341); +const bufferStream = __nccwpck_require__(4340); + +class MaxBufferError extends Error { + constructor() { + super('maxBuffer exceeded'); + this.name = 'MaxBufferError'; + } +} + +async function getStream(inputStream, options) { + if (!inputStream) { + return Promise.reject(new Error('Expected a stream')); + } + + options = { + maxBuffer: Infinity, + ...options + }; + + const {maxBuffer} = options; + + let stream; + await new Promise((resolve, reject) => { + const rejectPromise = error => { + // Don't retrieve an oversized buffer. + if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) { + error.bufferedData = stream.getBufferedValue(); + } + + reject(error); + }; + + stream = pump(inputStream, bufferStream(options), error => { + if (error) { + rejectPromise(error); + return; + } + + resolve(); + }); + + stream.on('data', () => { + if (stream.getBufferedLength() > maxBuffer) { + rejectPromise(new MaxBufferError()); + } + }); + }); + + return stream.getBufferedValue(); +} + +module.exports = getStream; +// TODO: Remove this for the next major release +module.exports["default"] = getStream; +module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'}); +module.exports.array = (stream, options) => getStream(stream, {...options, array: true}); +module.exports.MaxBufferError = MaxBufferError; + + +/***/ }), + +/***/ 5993: +/***/ ((module) => { + +"use strict"; + +module.exports = object => { + const result = {}; + + for (const [key, value] of Object.entries(object)) { + result[key.toLowerCase()] = value; + } + + return result; +}; + + +/***/ }), + +/***/ 8116: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const EventEmitter = __nccwpck_require__(2361); +const urlLib = __nccwpck_require__(7310); +const normalizeUrl = __nccwpck_require__(7952); +const getStream = __nccwpck_require__(7040); +const CachePolicy = __nccwpck_require__(1002); +const Response = __nccwpck_require__(9004); +const lowercaseKeys = __nccwpck_require__(5993); +const cloneResponse = __nccwpck_require__(1312); +const Keyv = __nccwpck_require__(1531); + +class CacheableRequest { + constructor(request, cacheAdapter) { + if (typeof request !== 'function') { + throw new TypeError('Parameter `request` must be a function'); + } + + this.cache = new Keyv({ + uri: typeof cacheAdapter === 'string' && cacheAdapter, + store: typeof cacheAdapter !== 'string' && cacheAdapter, + namespace: 'cacheable-request' + }); + + return this.createCacheableRequest(request); + } + + createCacheableRequest(request) { + return (opts, cb) => { + let url; + if (typeof opts === 'string') { + url = normalizeUrlObject(urlLib.parse(opts)); + opts = {}; + } else if (opts instanceof urlLib.URL) { + url = normalizeUrlObject(urlLib.parse(opts.toString())); + opts = {}; + } else { + const [pathname, ...searchParts] = (opts.path || '').split('?'); + const search = searchParts.length > 0 ? + `?${searchParts.join('?')}` : + ''; + url = normalizeUrlObject({ ...opts, pathname, search }); + } + + opts = { + headers: {}, + method: 'GET', + cache: true, + strictTtl: false, + automaticFailover: false, + ...opts, + ...urlObjectToRequestOptions(url) + }; + opts.headers = lowercaseKeys(opts.headers); + + const ee = new EventEmitter(); + const normalizedUrlString = normalizeUrl( + urlLib.format(url), + { + stripWWW: false, + removeTrailingSlash: false, + stripAuthentication: false + } + ); + const key = `${opts.method}:${normalizedUrlString}`; + let revalidate = false; + let madeRequest = false; + + const makeRequest = opts => { + madeRequest = true; + let requestErrored = false; + let requestErrorCallback; + + const requestErrorPromise = new Promise(resolve => { + requestErrorCallback = () => { + if (!requestErrored) { + requestErrored = true; + resolve(); + } + }; + }); + + const handler = response => { + if (revalidate && !opts.forceRefresh) { + response.status = response.statusCode; + const revalidatedPolicy = CachePolicy.fromObject(revalidate.cachePolicy).revalidatedPolicy(opts, response); + if (!revalidatedPolicy.modified) { + const headers = revalidatedPolicy.policy.responseHeaders(); + response = new Response(revalidate.statusCode, headers, revalidate.body, revalidate.url); + response.cachePolicy = revalidatedPolicy.policy; + response.fromCache = true; + } + } + + if (!response.fromCache) { + response.cachePolicy = new CachePolicy(opts, response, opts); + response.fromCache = false; + } + + let clonedResponse; + if (opts.cache && response.cachePolicy.storable()) { + clonedResponse = cloneResponse(response); + + (async () => { + try { + const bodyPromise = getStream.buffer(response); + + await Promise.race([ + requestErrorPromise, + new Promise(resolve => response.once('end', resolve)) + ]); + + if (requestErrored) { + return; + } + + const body = await bodyPromise; + + const value = { + cachePolicy: response.cachePolicy.toObject(), + url: response.url, + statusCode: response.fromCache ? revalidate.statusCode : response.statusCode, + body + }; + + let ttl = opts.strictTtl ? response.cachePolicy.timeToLive() : undefined; + if (opts.maxTtl) { + ttl = ttl ? Math.min(ttl, opts.maxTtl) : opts.maxTtl; + } + + await this.cache.set(key, value, ttl); + } catch (error) { + ee.emit('error', new CacheableRequest.CacheError(error)); + } + })(); + } else if (opts.cache && revalidate) { + (async () => { + try { + await this.cache.delete(key); + } catch (error) { + ee.emit('error', new CacheableRequest.CacheError(error)); + } + })(); + } + + ee.emit('response', clonedResponse || response); + if (typeof cb === 'function') { + cb(clonedResponse || response); + } + }; + + try { + const req = request(opts, handler); + req.once('error', requestErrorCallback); + req.once('abort', requestErrorCallback); + ee.emit('request', req); + } catch (error) { + ee.emit('error', new CacheableRequest.RequestError(error)); + } + }; + + (async () => { + const get = async opts => { + await Promise.resolve(); + + const cacheEntry = opts.cache ? await this.cache.get(key) : undefined; + if (typeof cacheEntry === 'undefined') { + return makeRequest(opts); + } + + const policy = CachePolicy.fromObject(cacheEntry.cachePolicy); + if (policy.satisfiesWithoutRevalidation(opts) && !opts.forceRefresh) { + const headers = policy.responseHeaders(); + const response = new Response(cacheEntry.statusCode, headers, cacheEntry.body, cacheEntry.url); + response.cachePolicy = policy; + response.fromCache = true; + + ee.emit('response', response); + if (typeof cb === 'function') { + cb(response); + } + } else { + revalidate = cacheEntry; + opts.headers = policy.revalidationHeaders(opts); + makeRequest(opts); + } + }; + + const errorHandler = error => ee.emit('error', new CacheableRequest.CacheError(error)); + this.cache.once('error', errorHandler); + ee.on('response', () => this.cache.removeListener('error', errorHandler)); + + try { + await get(opts); + } catch (error) { + if (opts.automaticFailover && !madeRequest) { + makeRequest(opts); + } + + ee.emit('error', new CacheableRequest.CacheError(error)); + } + })(); + + return ee; + }; + } +} + +function urlObjectToRequestOptions(url) { + const options = { ...url }; + options.path = `${url.pathname || '/'}${url.search || ''}`; + delete options.pathname; + delete options.search; + return options; +} + +function normalizeUrlObject(url) { + // If url was parsed by url.parse or new URL: + // - hostname will be set + // - host will be hostname[:port] + // - port will be set if it was explicit in the parsed string + // Otherwise, url was from request options: + // - hostname or host may be set + // - host shall not have port encoded + return { + protocol: url.protocol, + auth: url.auth, + hostname: url.hostname || url.host || 'localhost', + port: url.port, + pathname: url.pathname, + search: url.search + }; +} + +CacheableRequest.RequestError = class extends Error { + constructor(error) { + super(error.message); + this.name = 'RequestError'; + Object.assign(this, error); + } +}; + +CacheableRequest.CacheError = class extends Error { + constructor(error) { + super(error.message); + this.name = 'CacheError'; + Object.assign(this, error); + } +}; + +module.exports = CacheableRequest; + + +/***/ }), + +/***/ 1312: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const PassThrough = (__nccwpck_require__(2781).PassThrough); +const mimicResponse = __nccwpck_require__(2610); + +const cloneResponse = response => { + if (!(response && response.pipe)) { + throw new TypeError('Parameter `response` must be a response stream.'); + } + + const clone = new PassThrough(); + mimicResponse(response, clone); + + return response.pipe(clone); +}; + +module.exports = cloneResponse; + + +/***/ }), + +/***/ 5728: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { promisify } = __nccwpck_require__(3837) +const JSONB = __nccwpck_require__(2820) +const zlib = __nccwpck_require__(9796) + +const mergeOptions = __nccwpck_require__(4968) + +const compress = promisify(zlib.brotliCompress) + +const decompress = promisify(zlib.brotliDecompress) + +const identity = val => val + +const createCompress = ({ + enable = true, + serialize = JSONB.stringify, + deserialize = JSONB.parse, + compressOptions, + decompressOptions +} = {}) => { + if (!enable) { + return { serialize, deserialize, decompress: identity, compress: identity } + } + + return { + serialize, + deserialize, + compress: async (data, options = {}) => { + if (data === undefined) return data + const serializedData = serialize(data) + return compress(serializedData, mergeOptions(compressOptions, options)) + }, + decompress: async (data, options = {}) => { + if (data === undefined) return data + return deserialize( + await decompress(data, mergeOptions(decompressOptions, options)) + ) + } + } +} + +module.exports = createCompress +module.exports.stringify = JSONB.stringify +module.exports.parse = JSONB.parse + + +/***/ }), + +/***/ 4968: +/***/ ((module) => { + +"use strict"; + + +module.exports = (defaultOptions = {}, options = {}) => { + const params = { + ...(defaultOptions.params || {}), + ...(options.params || {}) + } + + return { + ...defaultOptions, + ...options, + ...(Object.keys(params).length + ? { + params + } + : {}) + } +} + + +/***/ }), + +/***/ 2391: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const {Transform, PassThrough} = __nccwpck_require__(2781); +const zlib = __nccwpck_require__(9796); +const mimicResponse = __nccwpck_require__(3877); + +module.exports = response => { + const contentEncoding = (response.headers['content-encoding'] || '').toLowerCase(); + + if (!['gzip', 'deflate', 'br'].includes(contentEncoding)) { + return response; + } + + // TODO: Remove this when targeting Node.js 12. + const isBrotli = contentEncoding === 'br'; + if (isBrotli && typeof zlib.createBrotliDecompress !== 'function') { + response.destroy(new Error('Brotli is not supported on Node.js < 12')); + return response; + } + + let isEmpty = true; + + const checker = new Transform({ + transform(data, _encoding, callback) { + isEmpty = false; + + callback(null, data); + }, + + flush(callback) { + callback(); + } + }); + + const finalStream = new PassThrough({ + autoDestroy: false, + destroy(error, callback) { + response.destroy(); + + callback(error); + } + }); + + const decompressStream = isBrotli ? zlib.createBrotliDecompress() : zlib.createUnzip(); + + decompressStream.once('error', error => { + if (isEmpty && !response.readable) { + finalStream.end(); + return; + } + + finalStream.destroy(error); + }); + + mimicResponse(response, finalStream); + response.pipe(checker).pipe(decompressStream).pipe(finalStream); + + return finalStream; +}; + + +/***/ }), + +/***/ 3877: +/***/ ((module) => { + +"use strict"; + + +// We define these manually to ensure they're always copied +// even if they would move up the prototype chain +// https://nodejs.org/api/http.html#http_class_http_incomingmessage +const knownProperties = [ + 'aborted', + 'complete', + 'headers', + 'httpVersion', + 'httpVersionMinor', + 'httpVersionMajor', + 'method', + 'rawHeaders', + 'rawTrailers', + 'setTimeout', + 'socket', + 'statusCode', + 'statusMessage', + 'trailers', + 'url' +]; + +module.exports = (fromStream, toStream) => { + if (toStream._readableState.autoDestroy) { + throw new Error('The second stream must have the `autoDestroy` option set to `false`'); + } + + const fromProperties = new Set(Object.keys(fromStream).concat(knownProperties)); + + const properties = {}; + + for (const property of fromProperties) { + // Don't overwrite existing properties. + if (property in toStream) { + continue; + } + + properties[property] = { + get() { + const value = fromStream[property]; + const isFunction = typeof value === 'function'; + + return isFunction ? value.bind(fromStream) : value; + }, + set(value) { + fromStream[property] = value; + }, + enumerable: true, + configurable: false + }; + } + + Object.defineProperties(toStream, properties); + + fromStream.once('aborted', () => { + toStream.destroy(); + + toStream.emit('aborted'); + }); + + fromStream.once('close', () => { + if (fromStream.complete) { + if (toStream.readable) { + toStream.once('end', () => { + toStream.emit('close'); + }); + } else { + toStream.emit('close'); + } + } else { + toStream.emit('close'); + } + }); + + return toStream; +}; + + +/***/ }), + +/***/ 6214: +/***/ ((module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +function isTLSSocket(socket) { + return socket.encrypted; +} +const deferToConnect = (socket, fn) => { + let listeners; + if (typeof fn === 'function') { + const connect = fn; + listeners = { connect }; + } + else { + listeners = fn; + } + const hasConnectListener = typeof listeners.connect === 'function'; + const hasSecureConnectListener = typeof listeners.secureConnect === 'function'; + const hasCloseListener = typeof listeners.close === 'function'; + const onConnect = () => { + if (hasConnectListener) { + listeners.connect(); + } + if (isTLSSocket(socket) && hasSecureConnectListener) { + if (socket.authorized) { + listeners.secureConnect(); + } + else if (!socket.authorizationError) { + socket.once('secureConnect', listeners.secureConnect); + } + } + if (hasCloseListener) { + socket.once('close', listeners.close); + } + }; + if (socket.writable && !socket.connecting) { + onConnect(); + } + else if (socket.connecting) { + socket.once('connect', onConnect); + } + else if (socket.destroyed && hasCloseListener) { + listeners.close(socket._hadError); + } +}; +exports["default"] = deferToConnect; +// For CommonJS default export support +module.exports = deferToConnect; +module.exports["default"] = deferToConnect; + + +/***/ }), + +/***/ 8932: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = 'Deprecation'; + } + +} + +exports.Deprecation = Deprecation; + + +/***/ }), + +/***/ 1205: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var once = __nccwpck_require__(1223); + +var noop = function() {}; + +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; + +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; + +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + + callback = once(callback || noop); + + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; + + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; + + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; + + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; + + var onerror = function(err) { + callback.call(stream, err); + }; + + var onclose = function() { + process.nextTick(onclosenexttick); + }; + + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; + + var onrequest = function() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + if (isChildProcess(stream)) stream.on('exit', onexit); + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; + +module.exports = eos; + + +/***/ }), + +/***/ 4460: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var isGlob = __nccwpck_require__(4466); +var pathPosixDirname = (__nccwpck_require__(1017).posix.dirname); +var isWin32 = (__nccwpck_require__(2037).platform)() === 'win32'; + +var slash = '/'; +var backslash = /\\/g; +var enclosure = /[\{\[].*[\}\]]$/; +var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/; +var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g; + +/** + * @param {string} str + * @param {Object} opts + * @param {boolean} [opts.flipBackslashes=true] + * @returns {string} + */ +module.exports = function globParent(str, opts) { + var options = Object.assign({ flipBackslashes: true }, opts); + + // flip windows path separators + if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) { + str = str.replace(backslash, slash); + } + + // special case for strings ending in enclosure containing path separator + if (enclosure.test(str)) { + str += slash; + } + + // preserves full path in case of trailing path separator + str += 'a'; + + // remove path parts that are globby + do { + str = pathPosixDirname(str); + } while (isGlob(str) || globby.test(str)); + + // remove escape chars and return result + return str.replace(escaped, '$1'); +}; + + +/***/ }), + +/***/ 3664: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const taskManager = __nccwpck_require__(2708); +const patternManager = __nccwpck_require__(8306); +const async_1 = __nccwpck_require__(5679); +const stream_1 = __nccwpck_require__(4630); +const sync_1 = __nccwpck_require__(2405); +const settings_1 = __nccwpck_require__(952); +const utils = __nccwpck_require__(5444); +async function FastGlob(source, options) { + assertPatternsInput(source); + const works = getWorks(source, async_1.default, options); + const result = await Promise.all(works); + return utils.array.flatten(result); +} +// https://github.com/typescript-eslint/typescript-eslint/issues/60 +// eslint-disable-next-line no-redeclare +(function (FastGlob) { + function sync(source, options) { + assertPatternsInput(source); + const works = getWorks(source, sync_1.default, options); + return utils.array.flatten(works); + } + FastGlob.sync = sync; + function stream(source, options) { + assertPatternsInput(source); + const works = getWorks(source, stream_1.default, options); + /** + * The stream returned by the provider cannot work with an asynchronous iterator. + * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams. + * This affects performance (+25%). I don't see best solution right now. + */ + return utils.stream.merge(works); + } + FastGlob.stream = stream; + function generateTasks(source, options) { + assertPatternsInput(source); + const patterns = patternManager.transform([].concat(source)); + const settings = new settings_1.default(options); + return taskManager.generate(patterns, settings); + } + FastGlob.generateTasks = generateTasks; + function isDynamicPattern(source, options) { + assertPatternsInput(source); + const settings = new settings_1.default(options); + return utils.pattern.isDynamicPattern(source, settings); + } + FastGlob.isDynamicPattern = isDynamicPattern; + function escapePath(source) { + assertPatternsInput(source); + return utils.path.escape(source); + } + FastGlob.escapePath = escapePath; +})(FastGlob || (FastGlob = {})); +function getWorks(source, _Provider, options) { + const patterns = patternManager.transform([].concat(source)); + const settings = new settings_1.default(options); + const tasks = taskManager.generate(patterns, settings); + const provider = new _Provider(settings); + return tasks.map(provider.read, provider); +} +function assertPatternsInput(input) { + const source = [].concat(input); + const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item)); + if (!isValidSource) { + throw new TypeError('Patterns must be a string (non empty) or an array of strings'); + } +} +module.exports = FastGlob; + + +/***/ }), + +/***/ 8306: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.removeDuplicateSlashes = exports.transform = void 0; +/** + * Matches a sequence of two or more consecutive slashes, excluding the first two slashes at the beginning of the string. + * The latter is due to the presence of the device path at the beginning of the UNC path. + * @todo rewrite to negative lookbehind with the next major release. + */ +const DOUBLE_SLASH_RE = /(?!^)\/{2,}/g; +function transform(patterns) { + return patterns.map((pattern) => removeDuplicateSlashes(pattern)); +} +exports.transform = transform; +/** + * This package only works with forward slashes as a path separator. + * Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes. + */ +function removeDuplicateSlashes(pattern) { + return pattern.replace(DOUBLE_SLASH_RE, '/'); +} +exports.removeDuplicateSlashes = removeDuplicateSlashes; + + +/***/ }), + +/***/ 2708: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0; +const utils = __nccwpck_require__(5444); +function generate(patterns, settings) { + const positivePatterns = getPositivePatterns(patterns); + const negativePatterns = getNegativePatternsAsPositive(patterns, settings.ignore); + const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings)); + const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings)); + const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false); + const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true); + return staticTasks.concat(dynamicTasks); +} +exports.generate = generate; +/** + * Returns tasks grouped by basic pattern directories. + * + * Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately. + * This is necessary because directory traversal starts at the base directory and goes deeper. + */ +function convertPatternsToTasks(positive, negative, dynamic) { + const tasks = []; + const patternsOutsideCurrentDirectory = utils.pattern.getPatternsOutsideCurrentDirectory(positive); + const patternsInsideCurrentDirectory = utils.pattern.getPatternsInsideCurrentDirectory(positive); + const outsideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsOutsideCurrentDirectory); + const insideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsInsideCurrentDirectory); + tasks.push(...convertPatternGroupsToTasks(outsideCurrentDirectoryGroup, negative, dynamic)); + /* + * For the sake of reducing future accesses to the file system, we merge all tasks within the current directory + * into a global task, if at least one pattern refers to the root (`.`). In this case, the global task covers the rest. + */ + if ('.' in insideCurrentDirectoryGroup) { + tasks.push(convertPatternGroupToTask('.', patternsInsideCurrentDirectory, negative, dynamic)); + } + else { + tasks.push(...convertPatternGroupsToTasks(insideCurrentDirectoryGroup, negative, dynamic)); + } + return tasks; +} +exports.convertPatternsToTasks = convertPatternsToTasks; +function getPositivePatterns(patterns) { + return utils.pattern.getPositivePatterns(patterns); +} +exports.getPositivePatterns = getPositivePatterns; +function getNegativePatternsAsPositive(patterns, ignore) { + const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore); + const positive = negative.map(utils.pattern.convertToPositivePattern); + return positive; +} +exports.getNegativePatternsAsPositive = getNegativePatternsAsPositive; +function groupPatternsByBaseDirectory(patterns) { + const group = {}; + return patterns.reduce((collection, pattern) => { + const base = utils.pattern.getBaseDirectory(pattern); + if (base in collection) { + collection[base].push(pattern); + } + else { + collection[base] = [pattern]; + } + return collection; + }, group); +} +exports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory; +function convertPatternGroupsToTasks(positive, negative, dynamic) { + return Object.keys(positive).map((base) => { + return convertPatternGroupToTask(base, positive[base], negative, dynamic); + }); +} +exports.convertPatternGroupsToTasks = convertPatternGroupsToTasks; +function convertPatternGroupToTask(base, positive, negative, dynamic) { + return { + dynamic, + positive, + negative, + base, + patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern)) + }; +} +exports.convertPatternGroupToTask = convertPatternGroupToTask; + + +/***/ }), + +/***/ 5679: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const stream_1 = __nccwpck_require__(2083); +const provider_1 = __nccwpck_require__(257); +class ProviderAsync extends provider_1.default { + constructor() { + super(...arguments); + this._reader = new stream_1.default(this._settings); + } + read(task) { + const root = this._getRootDirectory(task); + const options = this._getReaderOptions(task); + const entries = []; + return new Promise((resolve, reject) => { + const stream = this.api(root, task, options); + stream.once('error', reject); + stream.on('data', (entry) => entries.push(options.transform(entry))); + stream.once('end', () => resolve(entries)); + }); + } + api(root, task, options) { + if (task.dynamic) { + return this._reader.dynamic(root, options); + } + return this._reader.static(task.patterns, options); + } +} +exports["default"] = ProviderAsync; + + +/***/ }), + +/***/ 6983: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const utils = __nccwpck_require__(5444); +const partial_1 = __nccwpck_require__(5295); +class DeepFilter { + constructor(_settings, _micromatchOptions) { + this._settings = _settings; + this._micromatchOptions = _micromatchOptions; + } + getFilter(basePath, positive, negative) { + const matcher = this._getMatcher(positive); + const negativeRe = this._getNegativePatternsRe(negative); + return (entry) => this._filter(basePath, entry, matcher, negativeRe); + } + _getMatcher(patterns) { + return new partial_1.default(patterns, this._settings, this._micromatchOptions); + } + _getNegativePatternsRe(patterns) { + const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern); + return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions); + } + _filter(basePath, entry, matcher, negativeRe) { + if (this._isSkippedByDeep(basePath, entry.path)) { + return false; + } + if (this._isSkippedSymbolicLink(entry)) { + return false; + } + const filepath = utils.path.removeLeadingDotSegment(entry.path); + if (this._isSkippedByPositivePatterns(filepath, matcher)) { + return false; + } + return this._isSkippedByNegativePatterns(filepath, negativeRe); + } + _isSkippedByDeep(basePath, entryPath) { + /** + * Avoid unnecessary depth calculations when it doesn't matter. + */ + if (this._settings.deep === Infinity) { + return false; + } + return this._getEntryLevel(basePath, entryPath) >= this._settings.deep; + } + _getEntryLevel(basePath, entryPath) { + const entryPathDepth = entryPath.split('/').length; + if (basePath === '') { + return entryPathDepth; + } + const basePathDepth = basePath.split('/').length; + return entryPathDepth - basePathDepth; + } + _isSkippedSymbolicLink(entry) { + return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink(); + } + _isSkippedByPositivePatterns(entryPath, matcher) { + return !this._settings.baseNameMatch && !matcher.match(entryPath); + } + _isSkippedByNegativePatterns(entryPath, patternsRe) { + return !utils.pattern.matchAny(entryPath, patternsRe); + } +} +exports["default"] = DeepFilter; + + +/***/ }), + +/***/ 1343: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const utils = __nccwpck_require__(5444); +class EntryFilter { + constructor(_settings, _micromatchOptions) { + this._settings = _settings; + this._micromatchOptions = _micromatchOptions; + this.index = new Map(); + } + getFilter(positive, negative) { + const positiveRe = utils.pattern.convertPatternsToRe(positive, this._micromatchOptions); + const negativeRe = utils.pattern.convertPatternsToRe(negative, this._micromatchOptions); + return (entry) => this._filter(entry, positiveRe, negativeRe); + } + _filter(entry, positiveRe, negativeRe) { + if (this._settings.unique && this._isDuplicateEntry(entry)) { + return false; + } + if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) { + return false; + } + if (this._isSkippedByAbsoluteNegativePatterns(entry.path, negativeRe)) { + return false; + } + const filepath = this._settings.baseNameMatch ? entry.name : entry.path; + const isMatched = this._isMatchToPatterns(filepath, positiveRe) && !this._isMatchToPatterns(entry.path, negativeRe); + if (this._settings.unique && isMatched) { + this._createIndexRecord(entry); + } + return isMatched; + } + _isDuplicateEntry(entry) { + return this.index.has(entry.path); + } + _createIndexRecord(entry) { + this.index.set(entry.path, undefined); + } + _onlyFileFilter(entry) { + return this._settings.onlyFiles && !entry.dirent.isFile(); + } + _onlyDirectoryFilter(entry) { + return this._settings.onlyDirectories && !entry.dirent.isDirectory(); + } + _isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) { + if (!this._settings.absolute) { + return false; + } + const fullpath = utils.path.makeAbsolute(this._settings.cwd, entryPath); + return utils.pattern.matchAny(fullpath, patternsRe); + } + /** + * First, just trying to apply patterns to the path. + * Second, trying to apply patterns to the path with final slash. + */ + _isMatchToPatterns(entryPath, patternsRe) { + const filepath = utils.path.removeLeadingDotSegment(entryPath); + return utils.pattern.matchAny(filepath, patternsRe) || utils.pattern.matchAny(filepath + '/', patternsRe); + } +} +exports["default"] = EntryFilter; + + +/***/ }), + +/***/ 6654: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const utils = __nccwpck_require__(5444); +class ErrorFilter { + constructor(_settings) { + this._settings = _settings; + } + getFilter() { + return (error) => this._isNonFatalError(error); + } + _isNonFatalError(error) { + return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors; + } +} +exports["default"] = ErrorFilter; + + +/***/ }), + +/***/ 2576: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const utils = __nccwpck_require__(5444); +class Matcher { + constructor(_patterns, _settings, _micromatchOptions) { + this._patterns = _patterns; + this._settings = _settings; + this._micromatchOptions = _micromatchOptions; + this._storage = []; + this._fillStorage(); + } + _fillStorage() { + /** + * The original pattern may include `{,*,**,a/*}`, which will lead to problems with matching (unresolved level). + * So, before expand patterns with brace expansion into separated patterns. + */ + const patterns = utils.pattern.expandPatternsWithBraceExpansion(this._patterns); + for (const pattern of patterns) { + const segments = this._getPatternSegments(pattern); + const sections = this._splitSegmentsIntoSections(segments); + this._storage.push({ + complete: sections.length <= 1, + pattern, + segments, + sections + }); + } + } + _getPatternSegments(pattern) { + const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions); + return parts.map((part) => { + const dynamic = utils.pattern.isDynamicPattern(part, this._settings); + if (!dynamic) { + return { + dynamic: false, + pattern: part + }; + } + return { + dynamic: true, + pattern: part, + patternRe: utils.pattern.makeRe(part, this._micromatchOptions) + }; + }); + } + _splitSegmentsIntoSections(segments) { + return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern)); + } +} +exports["default"] = Matcher; + + +/***/ }), + +/***/ 5295: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const matcher_1 = __nccwpck_require__(2576); +class PartialMatcher extends matcher_1.default { + match(filepath) { + const parts = filepath.split('/'); + const levels = parts.length; + const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels); + for (const pattern of patterns) { + const section = pattern.sections[0]; + /** + * In this case, the pattern has a globstar and we must read all directories unconditionally, + * but only if the level has reached the end of the first group. + * + * fixtures/{a,b}/** + * ^ true/false ^ always true + */ + if (!pattern.complete && levels > section.length) { + return true; + } + const match = parts.every((part, index) => { + const segment = pattern.segments[index]; + if (segment.dynamic && segment.patternRe.test(part)) { + return true; + } + if (!segment.dynamic && segment.pattern === part) { + return true; + } + return false; + }); + if (match) { + return true; + } + } + return false; + } +} +exports["default"] = PartialMatcher; + + +/***/ }), + +/***/ 257: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const path = __nccwpck_require__(1017); +const deep_1 = __nccwpck_require__(6983); +const entry_1 = __nccwpck_require__(1343); +const error_1 = __nccwpck_require__(6654); +const entry_2 = __nccwpck_require__(4029); +class Provider { + constructor(_settings) { + this._settings = _settings; + this.errorFilter = new error_1.default(this._settings); + this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions()); + this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions()); + this.entryTransformer = new entry_2.default(this._settings); + } + _getRootDirectory(task) { + return path.resolve(this._settings.cwd, task.base); + } + _getReaderOptions(task) { + const basePath = task.base === '.' ? '' : task.base; + return { + basePath, + pathSegmentSeparator: '/', + concurrency: this._settings.concurrency, + deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative), + entryFilter: this.entryFilter.getFilter(task.positive, task.negative), + errorFilter: this.errorFilter.getFilter(), + followSymbolicLinks: this._settings.followSymbolicLinks, + fs: this._settings.fs, + stats: this._settings.stats, + throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink, + transform: this.entryTransformer.getTransformer() + }; + } + _getMicromatchOptions() { + return { + dot: this._settings.dot, + matchBase: this._settings.baseNameMatch, + nobrace: !this._settings.braceExpansion, + nocase: !this._settings.caseSensitiveMatch, + noext: !this._settings.extglob, + noglobstar: !this._settings.globstar, + posix: true, + strictSlashes: false + }; + } +} +exports["default"] = Provider; + + +/***/ }), + +/***/ 4630: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const stream_1 = __nccwpck_require__(2781); +const stream_2 = __nccwpck_require__(2083); +const provider_1 = __nccwpck_require__(257); +class ProviderStream extends provider_1.default { + constructor() { + super(...arguments); + this._reader = new stream_2.default(this._settings); + } + read(task) { + const root = this._getRootDirectory(task); + const options = this._getReaderOptions(task); + const source = this.api(root, task, options); + const destination = new stream_1.Readable({ objectMode: true, read: () => { } }); + source + .once('error', (error) => destination.emit('error', error)) + .on('data', (entry) => destination.emit('data', options.transform(entry))) + .once('end', () => destination.emit('end')); + destination + .once('close', () => source.destroy()); + return destination; + } + api(root, task, options) { + if (task.dynamic) { + return this._reader.dynamic(root, options); + } + return this._reader.static(task.patterns, options); + } +} +exports["default"] = ProviderStream; + + +/***/ }), + +/***/ 2405: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const sync_1 = __nccwpck_require__(8821); +const provider_1 = __nccwpck_require__(257); +class ProviderSync extends provider_1.default { + constructor() { + super(...arguments); + this._reader = new sync_1.default(this._settings); + } + read(task) { + const root = this._getRootDirectory(task); + const options = this._getReaderOptions(task); + const entries = this.api(root, task, options); + return entries.map(options.transform); + } + api(root, task, options) { + if (task.dynamic) { + return this._reader.dynamic(root, options); + } + return this._reader.static(task.patterns, options); + } +} +exports["default"] = ProviderSync; + + +/***/ }), + +/***/ 4029: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const utils = __nccwpck_require__(5444); +class EntryTransformer { + constructor(_settings) { + this._settings = _settings; + } + getTransformer() { + return (entry) => this._transform(entry); + } + _transform(entry) { + let filepath = entry.path; + if (this._settings.absolute) { + filepath = utils.path.makeAbsolute(this._settings.cwd, filepath); + filepath = utils.path.unixify(filepath); + } + if (this._settings.markDirectories && entry.dirent.isDirectory()) { + filepath += '/'; + } + if (!this._settings.objectMode) { + return filepath; + } + return Object.assign(Object.assign({}, entry), { path: filepath }); + } +} +exports["default"] = EntryTransformer; + + +/***/ }), + +/***/ 5582: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const path = __nccwpck_require__(1017); +const fsStat = __nccwpck_require__(109); +const utils = __nccwpck_require__(5444); +class Reader { + constructor(_settings) { + this._settings = _settings; + this._fsStatSettings = new fsStat.Settings({ + followSymbolicLink: this._settings.followSymbolicLinks, + fs: this._settings.fs, + throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks + }); + } + _getFullEntryPath(filepath) { + return path.resolve(this._settings.cwd, filepath); + } + _makeEntry(stats, pattern) { + const entry = { + name: pattern, + path: pattern, + dirent: utils.fs.createDirentFromStats(pattern, stats) + }; + if (this._settings.stats) { + entry.stats = stats; + } + return entry; + } + _isFatalError(error) { + return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors; + } +} +exports["default"] = Reader; + + +/***/ }), + +/***/ 2083: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const stream_1 = __nccwpck_require__(2781); +const fsStat = __nccwpck_require__(109); +const fsWalk = __nccwpck_require__(6026); +const reader_1 = __nccwpck_require__(5582); +class ReaderStream extends reader_1.default { + constructor() { + super(...arguments); + this._walkStream = fsWalk.walkStream; + this._stat = fsStat.stat; + } + dynamic(root, options) { + return this._walkStream(root, options); + } + static(patterns, options) { + const filepaths = patterns.map(this._getFullEntryPath, this); + const stream = new stream_1.PassThrough({ objectMode: true }); + stream._write = (index, _enc, done) => { + return this._getEntry(filepaths[index], patterns[index], options) + .then((entry) => { + if (entry !== null && options.entryFilter(entry)) { + stream.push(entry); + } + if (index === filepaths.length - 1) { + stream.end(); + } + done(); + }) + .catch(done); + }; + for (let i = 0; i < filepaths.length; i++) { + stream.write(i); + } + return stream; + } + _getEntry(filepath, pattern, options) { + return this._getStat(filepath) + .then((stats) => this._makeEntry(stats, pattern)) + .catch((error) => { + if (options.errorFilter(error)) { + return null; + } + throw error; + }); + } + _getStat(filepath) { + return new Promise((resolve, reject) => { + this._stat(filepath, this._fsStatSettings, (error, stats) => { + return error === null ? resolve(stats) : reject(error); + }); + }); + } +} +exports["default"] = ReaderStream; + + +/***/ }), + +/***/ 8821: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const fsStat = __nccwpck_require__(109); +const fsWalk = __nccwpck_require__(6026); +const reader_1 = __nccwpck_require__(5582); +class ReaderSync extends reader_1.default { + constructor() { + super(...arguments); + this._walkSync = fsWalk.walkSync; + this._statSync = fsStat.statSync; + } + dynamic(root, options) { + return this._walkSync(root, options); + } + static(patterns, options) { + const entries = []; + for (const pattern of patterns) { + const filepath = this._getFullEntryPath(pattern); + const entry = this._getEntry(filepath, pattern, options); + if (entry === null || !options.entryFilter(entry)) { + continue; + } + entries.push(entry); + } + return entries; + } + _getEntry(filepath, pattern, options) { + try { + const stats = this._getStat(filepath); + return this._makeEntry(stats, pattern); + } + catch (error) { + if (options.errorFilter(error)) { + return null; + } + throw error; + } + } + _getStat(filepath) { + return this._statSync(filepath, this._fsStatSettings); + } +} +exports["default"] = ReaderSync; + + +/***/ }), + +/***/ 952: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0; +const fs = __nccwpck_require__(7147); +const os = __nccwpck_require__(2037); +/** + * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero. + * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107 + */ +const CPU_COUNT = Math.max(os.cpus().length, 1); +exports.DEFAULT_FILE_SYSTEM_ADAPTER = { + lstat: fs.lstat, + lstatSync: fs.lstatSync, + stat: fs.stat, + statSync: fs.statSync, + readdir: fs.readdir, + readdirSync: fs.readdirSync +}; +class Settings { + constructor(_options = {}) { + this._options = _options; + this.absolute = this._getValue(this._options.absolute, false); + this.baseNameMatch = this._getValue(this._options.baseNameMatch, false); + this.braceExpansion = this._getValue(this._options.braceExpansion, true); + this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true); + this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT); + this.cwd = this._getValue(this._options.cwd, process.cwd()); + this.deep = this._getValue(this._options.deep, Infinity); + this.dot = this._getValue(this._options.dot, false); + this.extglob = this._getValue(this._options.extglob, true); + this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true); + this.fs = this._getFileSystemMethods(this._options.fs); + this.globstar = this._getValue(this._options.globstar, true); + this.ignore = this._getValue(this._options.ignore, []); + this.markDirectories = this._getValue(this._options.markDirectories, false); + this.objectMode = this._getValue(this._options.objectMode, false); + this.onlyDirectories = this._getValue(this._options.onlyDirectories, false); + this.onlyFiles = this._getValue(this._options.onlyFiles, true); + this.stats = this._getValue(this._options.stats, false); + this.suppressErrors = this._getValue(this._options.suppressErrors, false); + this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false); + this.unique = this._getValue(this._options.unique, true); + if (this.onlyDirectories) { + this.onlyFiles = false; + } + if (this.stats) { + this.objectMode = true; + } + } + _getValue(option, value) { + return option === undefined ? value : option; + } + _getFileSystemMethods(methods = {}) { + return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods); + } +} +exports["default"] = Settings; + + +/***/ }), + +/***/ 5325: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.splitWhen = exports.flatten = void 0; +function flatten(items) { + return items.reduce((collection, item) => [].concat(collection, item), []); +} +exports.flatten = flatten; +function splitWhen(items, predicate) { + const result = [[]]; + let groupIndex = 0; + for (const item of items) { + if (predicate(item)) { + groupIndex++; + result[groupIndex] = []; + } + else { + result[groupIndex].push(item); + } + } + return result; +} +exports.splitWhen = splitWhen; + + +/***/ }), + +/***/ 1230: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isEnoentCodeError = void 0; +function isEnoentCodeError(error) { + return error.code === 'ENOENT'; +} +exports.isEnoentCodeError = isEnoentCodeError; + + +/***/ }), + +/***/ 7543: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createDirentFromStats = void 0; +class DirentFromStats { + constructor(name, stats) { + this.name = name; + this.isBlockDevice = stats.isBlockDevice.bind(stats); + this.isCharacterDevice = stats.isCharacterDevice.bind(stats); + this.isDirectory = stats.isDirectory.bind(stats); + this.isFIFO = stats.isFIFO.bind(stats); + this.isFile = stats.isFile.bind(stats); + this.isSocket = stats.isSocket.bind(stats); + this.isSymbolicLink = stats.isSymbolicLink.bind(stats); + } +} +function createDirentFromStats(name, stats) { + return new DirentFromStats(name, stats); +} +exports.createDirentFromStats = createDirentFromStats; + + +/***/ }), + +/***/ 5444: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0; +const array = __nccwpck_require__(5325); +exports.array = array; +const errno = __nccwpck_require__(1230); +exports.errno = errno; +const fs = __nccwpck_require__(7543); +exports.fs = fs; +const path = __nccwpck_require__(3873); +exports.path = path; +const pattern = __nccwpck_require__(1221); +exports.pattern = pattern; +const stream = __nccwpck_require__(8382); +exports.stream = stream; +const string = __nccwpck_require__(2203); +exports.string = string; + + +/***/ }), + +/***/ 3873: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.removeLeadingDotSegment = exports.escape = exports.makeAbsolute = exports.unixify = void 0; +const path = __nccwpck_require__(1017); +const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\ +const UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\())/g; +/** + * Designed to work only with simple paths: `dir\\file`. + */ +function unixify(filepath) { + return filepath.replace(/\\/g, '/'); +} +exports.unixify = unixify; +function makeAbsolute(cwd, filepath) { + return path.resolve(cwd, filepath); +} +exports.makeAbsolute = makeAbsolute; +function escape(pattern) { + return pattern.replace(UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); +} +exports.escape = escape; +function removeLeadingDotSegment(entry) { + // We do not use `startsWith` because this is 10x slower than current implementation for some cases. + // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with + if (entry.charAt(0) === '.') { + const secondCharactery = entry.charAt(1); + if (secondCharactery === '/' || secondCharactery === '\\') { + return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT); + } + } + return entry; +} +exports.removeLeadingDotSegment = removeLeadingDotSegment; + + +/***/ }), + +/***/ 1221: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.isPatternRelatedToParentDirectory = exports.getPatternsOutsideCurrentDirectory = exports.getPatternsInsideCurrentDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0; +const path = __nccwpck_require__(1017); +const globParent = __nccwpck_require__(4460); +const micromatch = __nccwpck_require__(6228); +const GLOBSTAR = '**'; +const ESCAPE_SYMBOL = '\\'; +const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/; +const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[[^[]*]/; +const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\([^(]*\|[^|]*\)/; +const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\([^(]*\)/; +const BRACE_EXPANSION_SEPARATORS_RE = /,|\.\./; +function isStaticPattern(pattern, options = {}) { + return !isDynamicPattern(pattern, options); +} +exports.isStaticPattern = isStaticPattern; +function isDynamicPattern(pattern, options = {}) { + /** + * A special case with an empty string is necessary for matching patterns that start with a forward slash. + * An empty string cannot be a dynamic pattern. + * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'. + */ + if (pattern === '') { + return false; + } + /** + * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check + * filepath directly (without read directory). + */ + if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) { + return true; + } + if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) { + return true; + } + if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) { + return true; + } + if (options.braceExpansion !== false && hasBraceExpansion(pattern)) { + return true; + } + return false; +} +exports.isDynamicPattern = isDynamicPattern; +function hasBraceExpansion(pattern) { + const openingBraceIndex = pattern.indexOf('{'); + if (openingBraceIndex === -1) { + return false; + } + const closingBraceIndex = pattern.indexOf('}', openingBraceIndex + 1); + if (closingBraceIndex === -1) { + return false; + } + const braceContent = pattern.slice(openingBraceIndex, closingBraceIndex); + return BRACE_EXPANSION_SEPARATORS_RE.test(braceContent); +} +function convertToPositivePattern(pattern) { + return isNegativePattern(pattern) ? pattern.slice(1) : pattern; +} +exports.convertToPositivePattern = convertToPositivePattern; +function convertToNegativePattern(pattern) { + return '!' + pattern; +} +exports.convertToNegativePattern = convertToNegativePattern; +function isNegativePattern(pattern) { + return pattern.startsWith('!') && pattern[1] !== '('; +} +exports.isNegativePattern = isNegativePattern; +function isPositivePattern(pattern) { + return !isNegativePattern(pattern); +} +exports.isPositivePattern = isPositivePattern; +function getNegativePatterns(patterns) { + return patterns.filter(isNegativePattern); +} +exports.getNegativePatterns = getNegativePatterns; +function getPositivePatterns(patterns) { + return patterns.filter(isPositivePattern); +} +exports.getPositivePatterns = getPositivePatterns; +/** + * Returns patterns that can be applied inside the current directory. + * + * @example + * // ['./*', '*', 'a/*'] + * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) + */ +function getPatternsInsideCurrentDirectory(patterns) { + return patterns.filter((pattern) => !isPatternRelatedToParentDirectory(pattern)); +} +exports.getPatternsInsideCurrentDirectory = getPatternsInsideCurrentDirectory; +/** + * Returns patterns to be expanded relative to (outside) the current directory. + * + * @example + * // ['../*', './../*'] + * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) + */ +function getPatternsOutsideCurrentDirectory(patterns) { + return patterns.filter(isPatternRelatedToParentDirectory); +} +exports.getPatternsOutsideCurrentDirectory = getPatternsOutsideCurrentDirectory; +function isPatternRelatedToParentDirectory(pattern) { + return pattern.startsWith('..') || pattern.startsWith('./..'); +} +exports.isPatternRelatedToParentDirectory = isPatternRelatedToParentDirectory; +function getBaseDirectory(pattern) { + return globParent(pattern, { flipBackslashes: false }); +} +exports.getBaseDirectory = getBaseDirectory; +function hasGlobStar(pattern) { + return pattern.includes(GLOBSTAR); +} +exports.hasGlobStar = hasGlobStar; +function endsWithSlashGlobStar(pattern) { + return pattern.endsWith('/' + GLOBSTAR); +} +exports.endsWithSlashGlobStar = endsWithSlashGlobStar; +function isAffectDepthOfReadingPattern(pattern) { + const basename = path.basename(pattern); + return endsWithSlashGlobStar(pattern) || isStaticPattern(basename); +} +exports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern; +function expandPatternsWithBraceExpansion(patterns) { + return patterns.reduce((collection, pattern) => { + return collection.concat(expandBraceExpansion(pattern)); + }, []); +} +exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion; +function expandBraceExpansion(pattern) { + return micromatch.braces(pattern, { + expand: true, + nodupes: true + }); +} +exports.expandBraceExpansion = expandBraceExpansion; +function getPatternParts(pattern, options) { + let { parts } = micromatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true })); + /** + * The scan method returns an empty array in some cases. + * See micromatch/picomatch#58 for more details. + */ + if (parts.length === 0) { + parts = [pattern]; + } + /** + * The scan method does not return an empty part for the pattern with a forward slash. + * This is another part of micromatch/picomatch#58. + */ + if (parts[0].startsWith('/')) { + parts[0] = parts[0].slice(1); + parts.unshift(''); + } + return parts; +} +exports.getPatternParts = getPatternParts; +function makeRe(pattern, options) { + return micromatch.makeRe(pattern, options); +} +exports.makeRe = makeRe; +function convertPatternsToRe(patterns, options) { + return patterns.map((pattern) => makeRe(pattern, options)); +} +exports.convertPatternsToRe = convertPatternsToRe; +function matchAny(entry, patternsRe) { + return patternsRe.some((patternRe) => patternRe.test(entry)); +} +exports.matchAny = matchAny; + + +/***/ }), + +/***/ 8382: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.merge = void 0; +const merge2 = __nccwpck_require__(2578); +function merge(streams) { + const mergedStream = merge2(streams); + streams.forEach((stream) => { + stream.once('error', (error) => mergedStream.emit('error', error)); + }); + mergedStream.once('close', () => propagateCloseEventToSources(streams)); + mergedStream.once('end', () => propagateCloseEventToSources(streams)); + return mergedStream; +} +exports.merge = merge; +function propagateCloseEventToSources(streams) { + streams.forEach((stream) => stream.emit('close')); +} + + +/***/ }), + +/***/ 2203: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isEmpty = exports.isString = void 0; +function isString(input) { + return typeof input === 'string'; +} +exports.isString = isString; +function isEmpty(input) { + return input === ''; +} +exports.isEmpty = isEmpty; + + +/***/ }), + +/***/ 7340: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +/* eslint-disable no-var */ + +var reusify = __nccwpck_require__(2113) + +function fastqueue (context, worker, concurrency) { + if (typeof context === 'function') { + concurrency = worker + worker = context + context = null + } + + if (concurrency < 1) { + throw new Error('fastqueue concurrency must be greater than 1') + } + + var cache = reusify(Task) + var queueHead = null + var queueTail = null + var _running = 0 + var errorHandler = null + + var self = { + push: push, + drain: noop, + saturated: noop, + pause: pause, + paused: false, + concurrency: concurrency, + running: running, + resume: resume, + idle: idle, + length: length, + getQueue: getQueue, + unshift: unshift, + empty: noop, + kill: kill, + killAndDrain: killAndDrain, + error: error + } + + return self + + function running () { + return _running + } + + function pause () { + self.paused = true + } + + function length () { + var current = queueHead + var counter = 0 + + while (current) { + current = current.next + counter++ + } + + return counter + } + + function getQueue () { + var current = queueHead + var tasks = [] + + while (current) { + tasks.push(current.value) + current = current.next + } + + return tasks + } + + function resume () { + if (!self.paused) return + self.paused = false + for (var i = 0; i < self.concurrency; i++) { + _running++ + release() + } + } + + function idle () { + return _running === 0 && self.length() === 0 + } + + function push (value, done) { + var current = cache.get() + + current.context = context + current.release = release + current.value = value + current.callback = done || noop + current.errorHandler = errorHandler + + if (_running === self.concurrency || self.paused) { + if (queueTail) { + queueTail.next = current + queueTail = current + } else { + queueHead = current + queueTail = current + self.saturated() + } + } else { + _running++ + worker.call(context, current.value, current.worked) + } + } + + function unshift (value, done) { + var current = cache.get() + + current.context = context + current.release = release + current.value = value + current.callback = done || noop + + if (_running === self.concurrency || self.paused) { + if (queueHead) { + current.next = queueHead + queueHead = current + } else { + queueHead = current + queueTail = current + self.saturated() + } + } else { + _running++ + worker.call(context, current.value, current.worked) + } + } + + function release (holder) { + if (holder) { + cache.release(holder) + } + var next = queueHead + if (next) { + if (!self.paused) { + if (queueTail === queueHead) { + queueTail = null + } + queueHead = next.next + next.next = null + worker.call(context, next.value, next.worked) + if (queueTail === null) { + self.empty() + } + } else { + _running-- + } + } else if (--_running === 0) { + self.drain() + } + } + + function kill () { + queueHead = null + queueTail = null + self.drain = noop + } + + function killAndDrain () { + queueHead = null + queueTail = null + self.drain() + self.drain = noop + } + + function error (handler) { + errorHandler = handler + } +} + +function noop () {} + +function Task () { + this.value = null + this.callback = noop + this.next = null + this.release = noop + this.context = null + this.errorHandler = null + + var self = this + + this.worked = function worked (err, result) { + var callback = self.callback + var errorHandler = self.errorHandler + var val = self.value + self.value = null + self.callback = noop + if (self.errorHandler) { + errorHandler(err, val) + } + callback.call(self.context, err, result) + self.release(self) + } +} + +function queueAsPromised (context, worker, concurrency) { + if (typeof context === 'function') { + concurrency = worker + worker = context + context = null + } + + function asyncWrapper (arg, cb) { + worker.call(this, arg) + .then(function (res) { + cb(null, res) + }, cb) + } + + var queue = fastqueue(context, asyncWrapper, concurrency) + + var pushCb = queue.push + var unshiftCb = queue.unshift + + queue.push = push + queue.unshift = unshift + queue.drained = drained + + return queue + + function push (value) { + var p = new Promise(function (resolve, reject) { + pushCb(value, function (err, result) { + if (err) { + reject(err) + return + } + resolve(result) + }) + }) + + // Let's fork the promise chain to + // make the error bubble up to the user but + // not lead to a unhandledRejection + p.catch(noop) + + return p + } + + function unshift (value) { + var p = new Promise(function (resolve, reject) { + unshiftCb(value, function (err, result) { + if (err) { + reject(err) + return + } + resolve(result) + }) + }) + + // Let's fork the promise chain to + // make the error bubble up to the user but + // not lead to a unhandledRejection + p.catch(noop) + + return p + } + + function drained () { + var previousDrain = queue.drain + + var p = new Promise(function (resolve) { + queue.drain = function () { + previousDrain() + resolve() + } + }) + + return p + } +} + +module.exports = fastqueue +module.exports.promise = queueAsPromised + + +/***/ }), + +/***/ 6330: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; @@ -13084,8 +15598,8 @@ exports.flatten = (...args) => { -const util = __nccwpck_require__(1669); -const toRegexRange = __nccwpck_require__(6867); +const util = __nccwpck_require__(3837); +const toRegexRange = __nccwpck_require__(1861); const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); @@ -13328,4997 +15842,131 @@ module.exports = fill; /***/ }), -/***/ 2840: -/***/ ((module) => { - -"use strict"; -/*! - * is-number - * - * Copyright (c) 2014-present, Jon Schlinkert. - * Released under the MIT License. - */ - - - -module.exports = function(num) { - if (typeof num === 'number') { - return num - num === 0; - } - if (typeof num === 'string' && num.trim() !== '') { - return Number.isFinite ? Number.isFinite(+num) : isFinite(+num); - } - return false; -}; - - -/***/ }), - -/***/ 3913: +/***/ 1585: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +const {PassThrough: PassThroughStream} = __nccwpck_require__(2781); -const util = __nccwpck_require__(1669); -const braces = __nccwpck_require__(5582); -const picomatch = __nccwpck_require__(8569); -const utils = __nccwpck_require__(479); -const isEmptyString = val => typeof val === 'string' && (val === '' || val === './'); +module.exports = options => { + options = {...options}; -/** - * Returns an array of strings that match one or more glob patterns. - * - * ```js - * const mm = require('micromatch'); - * // mm(list, patterns[, options]); - * - * console.log(mm(['a.js', 'a.txt'], ['*.js'])); - * //=> [ 'a.js' ] - * ``` - * @param {String|Array} list List of strings to match. - * @param {String|Array} patterns One or more glob patterns to use for matching. - * @param {Object} options See available [options](#options) - * @return {Array} Returns an array of matches - * @summary false - * @api public - */ + const {array} = options; + let {encoding} = options; + const isBuffer = encoding === 'buffer'; + let objectMode = false; -const micromatch = (list, patterns, options) => { - patterns = [].concat(patterns); - list = [].concat(list); + if (array) { + objectMode = !(encoding || isBuffer); + } else { + encoding = encoding || 'utf8'; + } - let omit = new Set(); - let keep = new Set(); - let items = new Set(); - let negatives = 0; + if (isBuffer) { + encoding = null; + } - let onResult = state => { - items.add(state.output); - if (options && options.onResult) { - options.onResult(state); - } - }; + const stream = new PassThroughStream({objectMode}); - for (let i = 0; i < patterns.length; i++) { - let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true); - let negated = isMatch.state.negated || isMatch.state.negatedExtglob; - if (negated) negatives++; + if (encoding) { + stream.setEncoding(encoding); + } - for (let item of list) { - let matched = isMatch(item, true); + let length = 0; + const chunks = []; - let match = negated ? !matched.isMatch : matched.isMatch; - if (!match) continue; + stream.on('data', chunk => { + chunks.push(chunk); - if (negated) { - omit.add(matched.output); - } else { - omit.delete(matched.output); - keep.add(matched.output); - } - } - } + if (objectMode) { + length = chunks.length; + } else { + length += chunk.length; + } + }); - let result = negatives === patterns.length ? [...items] : [...keep]; - let matches = result.filter(item => !omit.has(item)); + stream.getBufferedValue = () => { + if (array) { + return chunks; + } - if (options && matches.length === 0) { - if (options.failglob === true) { - throw new Error(`No matches found for "${patterns.join(', ')}"`); - } + return isBuffer ? Buffer.concat(chunks, length) : chunks.join(''); + }; - if (options.nonull === true || options.nullglob === true) { - return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns; - } - } + stream.getBufferedLength = () => length; - return matches; + return stream; }; -/** - * Backwards compatibility - */ - -micromatch.match = micromatch; - -/** - * Returns a matcher function from the given glob `pattern` and `options`. - * The returned function takes a string to match as its only argument and returns - * true if the string is a match. - * - * ```js - * const mm = require('micromatch'); - * // mm.matcher(pattern[, options]); - * - * const isMatch = mm.matcher('*.!(*a)'); - * console.log(isMatch('a.a')); //=> false - * console.log(isMatch('a.b')); //=> true - * ``` - * @param {String} `pattern` Glob pattern - * @param {Object} `options` - * @return {Function} Returns a matcher function. - * @api public - */ - -micromatch.matcher = (pattern, options) => picomatch(pattern, options); - -/** - * Returns true if **any** of the given glob `patterns` match the specified `string`. - * - * ```js - * const mm = require('micromatch'); - * // mm.isMatch(string, patterns[, options]); - * - * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true - * console.log(mm.isMatch('a.a', 'b.*')); //=> false - * ``` - * @param {String} str The string to test. - * @param {String|Array} patterns One or more glob patterns to use for matching. - * @param {Object} [options] See available [options](#options). - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ - -micromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); - -/** - * Backwards compatibility - */ - -micromatch.any = micromatch.isMatch; - -/** - * Returns a list of strings that _**do not match any**_ of the given `patterns`. - * - * ```js - * const mm = require('micromatch'); - * // mm.not(list, patterns[, options]); - * - * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); - * //=> ['b.b', 'c.c'] - * ``` - * @param {Array} `list` Array of strings to match. - * @param {String|Array} `patterns` One or more glob pattern to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Array} Returns an array of strings that **do not match** the given patterns. - * @api public - */ - -micromatch.not = (list, patterns, options = {}) => { - patterns = [].concat(patterns).map(String); - let result = new Set(); - let items = []; - - let onResult = state => { - if (options.onResult) options.onResult(state); - items.push(state.output); - }; - - let matches = micromatch(list, patterns, { ...options, onResult }); - - for (let item of items) { - if (!matches.includes(item)) { - result.add(item); - } - } - return [...result]; -}; - -/** - * Returns true if the given `string` contains the given pattern. Similar - * to [.isMatch](#isMatch) but the pattern can match any part of the string. - * - * ```js - * var mm = require('micromatch'); - * // mm.contains(string, pattern[, options]); - * - * console.log(mm.contains('aa/bb/cc', '*b')); - * //=> true - * console.log(mm.contains('aa/bb/cc', '*d')); - * //=> false - * ``` - * @param {String} `str` The string to match. - * @param {String|Array} `patterns` Glob pattern to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if the patter matches any part of `str`. - * @api public - */ - -micromatch.contains = (str, pattern, options) => { - if (typeof str !== 'string') { - throw new TypeError(`Expected a string: "${util.inspect(str)}"`); - } - - if (Array.isArray(pattern)) { - return pattern.some(p => micromatch.contains(str, p, options)); - } - - if (typeof pattern === 'string') { - if (isEmptyString(str) || isEmptyString(pattern)) { - return false; - } - - if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) { - return true; - } - } - - return micromatch.isMatch(str, pattern, { ...options, contains: true }); -}; - -/** - * Filter the keys of the given object with the given `glob` pattern - * and `options`. Does not attempt to match nested keys. If you need this feature, - * use [glob-object][] instead. - * - * ```js - * const mm = require('micromatch'); - * // mm.matchKeys(object, patterns[, options]); - * - * const obj = { aa: 'a', ab: 'b', ac: 'c' }; - * console.log(mm.matchKeys(obj, '*b')); - * //=> { ab: 'b' } - * ``` - * @param {Object} `object` The object with keys to filter. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Object} Returns an object with only keys that match the given patterns. - * @api public - */ - -micromatch.matchKeys = (obj, patterns, options) => { - if (!utils.isObject(obj)) { - throw new TypeError('Expected the first argument to be an object'); - } - let keys = micromatch(Object.keys(obj), patterns, options); - let res = {}; - for (let key of keys) res[key] = obj[key]; - return res; -}; - -/** - * Returns true if some of the strings in the given `list` match any of the given glob `patterns`. - * - * ```js - * const mm = require('micromatch'); - * // mm.some(list, patterns[, options]); - * - * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); - * // true - * console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); - * // false - * ``` - * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ - -micromatch.some = (list, patterns, options) => { - let items = [].concat(list); - - for (let pattern of [].concat(patterns)) { - let isMatch = picomatch(String(pattern), options); - if (items.some(item => isMatch(item))) { - return true; - } - } - return false; -}; - -/** - * Returns true if every string in the given `list` matches - * any of the given glob `patterns`. - * - * ```js - * const mm = require('micromatch'); - * // mm.every(list, patterns[, options]); - * - * console.log(mm.every('foo.js', ['foo.js'])); - * // true - * console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); - * // true - * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); - * // false - * console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); - * // false - * ``` - * @param {String|Array} `list` The string or array of strings to test. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ - -micromatch.every = (list, patterns, options) => { - let items = [].concat(list); - - for (let pattern of [].concat(patterns)) { - let isMatch = picomatch(String(pattern), options); - if (!items.every(item => isMatch(item))) { - return false; - } - } - return true; -}; - -/** - * Returns true if **all** of the given `patterns` match - * the specified string. - * - * ```js - * const mm = require('micromatch'); - * // mm.all(string, patterns[, options]); - * - * console.log(mm.all('foo.js', ['foo.js'])); - * // true - * - * console.log(mm.all('foo.js', ['*.js', '!foo.js'])); - * // false - * - * console.log(mm.all('foo.js', ['*.js', 'foo.js'])); - * // true - * - * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); - * // true - * ``` - * @param {String|Array} `str` The string to test. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ - -micromatch.all = (str, patterns, options) => { - if (typeof str !== 'string') { - throw new TypeError(`Expected a string: "${util.inspect(str)}"`); - } - - return [].concat(patterns).every(p => picomatch(p, options)(str)); -}; - -/** - * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match. - * - * ```js - * const mm = require('micromatch'); - * // mm.capture(pattern, string[, options]); - * - * console.log(mm.capture('test/*.js', 'test/foo.js')); - * //=> ['foo'] - * console.log(mm.capture('test/*.js', 'foo/bar.css')); - * //=> null - * ``` - * @param {String} `glob` Glob pattern to use for matching. - * @param {String} `input` String to match - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns an array of captures if the input matches the glob pattern, otherwise `null`. - * @api public - */ - -micromatch.capture = (glob, input, options) => { - let posix = utils.isWindows(options); - let regex = picomatch.makeRe(String(glob), { ...options, capture: true }); - let match = regex.exec(posix ? utils.toPosixSlashes(input) : input); - - if (match) { - return match.slice(1).map(v => v === void 0 ? '' : v); - } -}; - -/** - * Create a regular expression from the given glob `pattern`. - * - * ```js - * const mm = require('micromatch'); - * // mm.makeRe(pattern[, options]); - * - * console.log(mm.makeRe('*.js')); - * //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ - * ``` - * @param {String} `pattern` A glob pattern to convert to regex. - * @param {Object} `options` - * @return {RegExp} Returns a regex created from the given pattern. - * @api public - */ - -micromatch.makeRe = (...args) => picomatch.makeRe(...args); - -/** - * Scan a glob pattern to separate the pattern into segments. Used - * by the [split](#split) method. - * - * ```js - * const mm = require('micromatch'); - * const state = mm.scan(pattern[, options]); - * ``` - * @param {String} `pattern` - * @param {Object} `options` - * @return {Object} Returns an object with - * @api public - */ - -micromatch.scan = (...args) => picomatch.scan(...args); - -/** - * Parse a glob pattern to create the source string for a regular - * expression. - * - * ```js - * const mm = require('micromatch'); - * const state = mm(pattern[, options]); - * ``` - * @param {String} `glob` - * @param {Object} `options` - * @return {Object} Returns an object with useful properties and output to be used as regex source string. - * @api public - */ - -micromatch.parse = (patterns, options) => { - let res = []; - for (let pattern of [].concat(patterns || [])) { - for (let str of braces(String(pattern), options)) { - res.push(picomatch.parse(str, options)); - } - } - return res; -}; - -/** - * Process the given brace `pattern`. - * - * ```js - * const { braces } = require('micromatch'); - * console.log(braces('foo/{a,b,c}/bar')); - * //=> [ 'foo/(a|b|c)/bar' ] - * - * console.log(braces('foo/{a,b,c}/bar', { expand: true })); - * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ] - * ``` - * @param {String} `pattern` String with brace pattern to process. - * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options. - * @return {Array} - * @api public - */ - -micromatch.braces = (pattern, options) => { - if (typeof pattern !== 'string') throw new TypeError('Expected a string'); - if ((options && options.nobrace === true) || !/\{.*\}/.test(pattern)) { - return [pattern]; - } - return braces(pattern, options); -}; - -/** - * Expand braces - */ - -micromatch.braceExpand = (pattern, options) => { - if (typeof pattern !== 'string') throw new TypeError('Expected a string'); - return micromatch.braces(pattern, { ...options, expand: true }); -}; - -/** - * Expose micromatch - */ - -module.exports = micromatch; - /***/ }), -/***/ 6867: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; -/*! - * to-regex-range - * - * Copyright (c) 2015-present, Jon Schlinkert. - * Released under the MIT License. - */ - - - -const isNumber = __nccwpck_require__(2840); - -const toRegexRange = (min, max, options) => { - if (isNumber(min) === false) { - throw new TypeError('toRegexRange: expected the first argument to be a number'); - } - - if (max === void 0 || min === max) { - return String(min); - } - - if (isNumber(max) === false) { - throw new TypeError('toRegexRange: expected the second argument to be a number.'); - } - - let opts = { relaxZeros: true, ...options }; - if (typeof opts.strictZeros === 'boolean') { - opts.relaxZeros = opts.strictZeros === false; - } - - let relax = String(opts.relaxZeros); - let shorthand = String(opts.shorthand); - let capture = String(opts.capture); - let wrap = String(opts.wrap); - let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap; - - if (toRegexRange.cache.hasOwnProperty(cacheKey)) { - return toRegexRange.cache[cacheKey].result; - } - - let a = Math.min(min, max); - let b = Math.max(min, max); - - if (Math.abs(a - b) === 1) { - let result = min + '|' + max; - if (opts.capture) { - return `(${result})`; - } - if (opts.wrap === false) { - return result; - } - return `(?:${result})`; - } - - let isPadded = hasPadding(min) || hasPadding(max); - let state = { min, max, a, b }; - let positives = []; - let negatives = []; - - if (isPadded) { - state.isPadded = isPadded; - state.maxLen = String(state.max).length; - } - - if (a < 0) { - let newMin = b < 0 ? Math.abs(b) : 1; - negatives = splitToPatterns(newMin, Math.abs(a), state, opts); - a = state.a = 0; - } - - if (b >= 0) { - positives = splitToPatterns(a, b, state, opts); - } - - state.negatives = negatives; - state.positives = positives; - state.result = collatePatterns(negatives, positives, opts); - - if (opts.capture === true) { - state.result = `(${state.result})`; - } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) { - state.result = `(?:${state.result})`; - } - - toRegexRange.cache[cacheKey] = state; - return state.result; -}; - -function collatePatterns(neg, pos, options) { - let onlyNegative = filterPatterns(neg, pos, '-', false, options) || []; - let onlyPositive = filterPatterns(pos, neg, '', false, options) || []; - let intersected = filterPatterns(neg, pos, '-?', true, options) || []; - let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive); - return subpatterns.join('|'); -} - -function splitToRanges(min, max) { - let nines = 1; - let zeros = 1; - - let stop = countNines(min, nines); - let stops = new Set([max]); - - while (min <= stop && stop <= max) { - stops.add(stop); - nines += 1; - stop = countNines(min, nines); - } - - stop = countZeros(max + 1, zeros) - 1; - - while (min < stop && stop <= max) { - stops.add(stop); - zeros += 1; - stop = countZeros(max + 1, zeros) - 1; - } - - stops = [...stops]; - stops.sort(compare); - return stops; -} - -/** - * Convert a range to a regex pattern - * @param {Number} `start` - * @param {Number} `stop` - * @return {String} - */ - -function rangeToPattern(start, stop, options) { - if (start === stop) { - return { pattern: start, count: [], digits: 0 }; - } - - let zipped = zip(start, stop); - let digits = zipped.length; - let pattern = ''; - let count = 0; - - for (let i = 0; i < digits; i++) { - let [startDigit, stopDigit] = zipped[i]; - - if (startDigit === stopDigit) { - pattern += startDigit; - - } else if (startDigit !== '0' || stopDigit !== '9') { - pattern += toCharacterClass(startDigit, stopDigit, options); - - } else { - count++; - } - } - - if (count) { - pattern += options.shorthand === true ? '\\d' : '[0-9]'; - } - - return { pattern, count: [count], digits }; -} - -function splitToPatterns(min, max, tok, options) { - let ranges = splitToRanges(min, max); - let tokens = []; - let start = min; - let prev; - - for (let i = 0; i < ranges.length; i++) { - let max = ranges[i]; - let obj = rangeToPattern(String(start), String(max), options); - let zeros = ''; - - if (!tok.isPadded && prev && prev.pattern === obj.pattern) { - if (prev.count.length > 1) { - prev.count.pop(); - } - - prev.count.push(obj.count[0]); - prev.string = prev.pattern + toQuantifier(prev.count); - start = max + 1; - continue; - } - - if (tok.isPadded) { - zeros = padZeros(max, tok, options); - } - - obj.string = zeros + obj.pattern + toQuantifier(obj.count); - tokens.push(obj); - start = max + 1; - prev = obj; - } - - return tokens; -} - -function filterPatterns(arr, comparison, prefix, intersection, options) { - let result = []; - - for (let ele of arr) { - let { string } = ele; - - // only push if _both_ are negative... - if (!intersection && !contains(comparison, 'string', string)) { - result.push(prefix + string); - } - - // or _both_ are positive - if (intersection && contains(comparison, 'string', string)) { - result.push(prefix + string); - } - } - return result; -} - -/** - * Zip strings - */ - -function zip(a, b) { - let arr = []; - for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]); - return arr; -} - -function compare(a, b) { - return a > b ? 1 : b > a ? -1 : 0; -} - -function contains(arr, key, val) { - return arr.some(ele => ele[key] === val); -} - -function countNines(min, len) { - return Number(String(min).slice(0, -len) + '9'.repeat(len)); -} - -function countZeros(integer, zeros) { - return integer - (integer % Math.pow(10, zeros)); -} - -function toQuantifier(digits) { - let [start = 0, stop = ''] = digits; - if (stop || start > 1) { - return `{${start + (stop ? ',' + stop : '')}}`; - } - return ''; -} - -function toCharacterClass(a, b, options) { - return `[${a}${(b - a === 1) ? '' : '-'}${b}]`; -} - -function hasPadding(str) { - return /^-?(0+)\d/.test(str); -} - -function padZeros(value, tok, options) { - if (!tok.isPadded) { - return value; - } - - let diff = Math.abs(tok.maxLen - String(value).length); - let relax = options.relaxZeros !== false; - - switch (diff) { - case 0: - return ''; - case 1: - return relax ? '0?' : '0'; - case 2: - return relax ? '0{0,2}' : '00'; - default: { - return relax ? `0{0,${diff}}` : `0{${diff}}`; - } - } -} - -/** - * Cache - */ - -toRegexRange.cache = {}; -toRegexRange.clearCache = () => (toRegexRange.cache = {}); - -/** - * Expose `toRegexRange` - */ - -module.exports = toRegexRange; - - -/***/ }), - -/***/ 3664: +/***/ 1766: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const taskManager = __nccwpck_require__(2708); -const async_1 = __nccwpck_require__(5679); -const stream_1 = __nccwpck_require__(4630); -const sync_1 = __nccwpck_require__(2405); -const settings_1 = __nccwpck_require__(952); -const utils = __nccwpck_require__(5444); -async function FastGlob(source, options) { - assertPatternsInput(source); - const works = getWorks(source, async_1.default, options); - const result = await Promise.all(works); - return utils.array.flatten(result); -} -// https://github.com/typescript-eslint/typescript-eslint/issues/60 -// eslint-disable-next-line no-redeclare -(function (FastGlob) { - function sync(source, options) { - assertPatternsInput(source); - const works = getWorks(source, sync_1.default, options); - return utils.array.flatten(works); - } - FastGlob.sync = sync; - function stream(source, options) { - assertPatternsInput(source); - const works = getWorks(source, stream_1.default, options); - /** - * The stream returned by the provider cannot work with an asynchronous iterator. - * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams. - * This affects performance (+25%). I don't see best solution right now. - */ - return utils.stream.merge(works); - } - FastGlob.stream = stream; - function generateTasks(source, options) { - assertPatternsInput(source); - const patterns = [].concat(source); - const settings = new settings_1.default(options); - return taskManager.generate(patterns, settings); - } - FastGlob.generateTasks = generateTasks; - function isDynamicPattern(source, options) { - assertPatternsInput(source); - const settings = new settings_1.default(options); - return utils.pattern.isDynamicPattern(source, settings); - } - FastGlob.isDynamicPattern = isDynamicPattern; - function escapePath(source) { - assertPatternsInput(source); - return utils.path.escape(source); - } - FastGlob.escapePath = escapePath; -})(FastGlob || (FastGlob = {})); -function getWorks(source, _Provider, options) { - const patterns = [].concat(source); - const settings = new settings_1.default(options); - const tasks = taskManager.generate(patterns, settings); - const provider = new _Provider(settings); - return tasks.map(provider.read, provider); -} -function assertPatternsInput(input) { - const source = [].concat(input); - const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item)); - if (!isValidSource) { - throw new TypeError('Patterns must be a string (non empty) or an array of strings'); - } -} -module.exports = FastGlob; - - -/***/ }), - -/***/ 2708: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0; -const utils = __nccwpck_require__(5444); -function generate(patterns, settings) { - const positivePatterns = getPositivePatterns(patterns); - const negativePatterns = getNegativePatternsAsPositive(patterns, settings.ignore); - const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings)); - const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings)); - const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false); - const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true); - return staticTasks.concat(dynamicTasks); -} -exports.generate = generate; -function convertPatternsToTasks(positive, negative, dynamic) { - const positivePatternsGroup = groupPatternsByBaseDirectory(positive); - // When we have a global group – there is no reason to divide the patterns into independent tasks. - // In this case, the global task covers the rest. - if ('.' in positivePatternsGroup) { - const task = convertPatternGroupToTask('.', positive, negative, dynamic); - return [task]; - } - return convertPatternGroupsToTasks(positivePatternsGroup, negative, dynamic); -} -exports.convertPatternsToTasks = convertPatternsToTasks; -function getPositivePatterns(patterns) { - return utils.pattern.getPositivePatterns(patterns); -} -exports.getPositivePatterns = getPositivePatterns; -function getNegativePatternsAsPositive(patterns, ignore) { - const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore); - const positive = negative.map(utils.pattern.convertToPositivePattern); - return positive; -} -exports.getNegativePatternsAsPositive = getNegativePatternsAsPositive; -function groupPatternsByBaseDirectory(patterns) { - const group = {}; - return patterns.reduce((collection, pattern) => { - const base = utils.pattern.getBaseDirectory(pattern); - if (base in collection) { - collection[base].push(pattern); - } - else { - collection[base] = [pattern]; - } - return collection; - }, group); -} -exports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory; -function convertPatternGroupsToTasks(positive, negative, dynamic) { - return Object.keys(positive).map((base) => { - return convertPatternGroupToTask(base, positive[base], negative, dynamic); - }); -} -exports.convertPatternGroupsToTasks = convertPatternGroupsToTasks; -function convertPatternGroupToTask(base, positive, negative, dynamic) { - return { - dynamic, - positive, - negative, - base, - patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern)) - }; -} -exports.convertPatternGroupToTask = convertPatternGroupToTask; - - -/***/ }), - -/***/ 5679: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const stream_1 = __nccwpck_require__(2083); -const provider_1 = __nccwpck_require__(257); -class ProviderAsync extends provider_1.default { - constructor() { - super(...arguments); - this._reader = new stream_1.default(this._settings); - } - read(task) { - const root = this._getRootDirectory(task); - const options = this._getReaderOptions(task); - const entries = []; - return new Promise((resolve, reject) => { - const stream = this.api(root, task, options); - stream.once('error', reject); - stream.on('data', (entry) => entries.push(options.transform(entry))); - stream.once('end', () => resolve(entries)); - }); - } - api(root, task, options) { - if (task.dynamic) { - return this._reader.dynamic(root, options); - } - return this._reader.static(task.patterns, options); - } -} -exports.default = ProviderAsync; - - -/***/ }), - -/***/ 6983: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const utils = __nccwpck_require__(5444); -const partial_1 = __nccwpck_require__(5295); -class DeepFilter { - constructor(_settings, _micromatchOptions) { - this._settings = _settings; - this._micromatchOptions = _micromatchOptions; - } - getFilter(basePath, positive, negative) { - const matcher = this._getMatcher(positive); - const negativeRe = this._getNegativePatternsRe(negative); - return (entry) => this._filter(basePath, entry, matcher, negativeRe); - } - _getMatcher(patterns) { - return new partial_1.default(patterns, this._settings, this._micromatchOptions); - } - _getNegativePatternsRe(patterns) { - const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern); - return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions); - } - _filter(basePath, entry, matcher, negativeRe) { - if (this._isSkippedByDeep(basePath, entry.path)) { - return false; - } - if (this._isSkippedSymbolicLink(entry)) { - return false; - } - const filepath = utils.path.removeLeadingDotSegment(entry.path); - if (this._isSkippedByPositivePatterns(filepath, matcher)) { - return false; - } - return this._isSkippedByNegativePatterns(filepath, negativeRe); - } - _isSkippedByDeep(basePath, entryPath) { - /** - * Avoid unnecessary depth calculations when it doesn't matter. - */ - if (this._settings.deep === Infinity) { - return false; - } - return this._getEntryLevel(basePath, entryPath) >= this._settings.deep; - } - _getEntryLevel(basePath, entryPath) { - const entryPathDepth = entryPath.split('/').length; - if (basePath === '') { - return entryPathDepth; - } - const basePathDepth = basePath.split('/').length; - return entryPathDepth - basePathDepth; - } - _isSkippedSymbolicLink(entry) { - return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink(); - } - _isSkippedByPositivePatterns(entryPath, matcher) { - return !this._settings.baseNameMatch && !matcher.match(entryPath); - } - _isSkippedByNegativePatterns(entryPath, patternsRe) { - return !utils.pattern.matchAny(entryPath, patternsRe); - } -} -exports.default = DeepFilter; - - -/***/ }), - -/***/ 1343: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const utils = __nccwpck_require__(5444); -class EntryFilter { - constructor(_settings, _micromatchOptions) { - this._settings = _settings; - this._micromatchOptions = _micromatchOptions; - this.index = new Map(); - } - getFilter(positive, negative) { - const positiveRe = utils.pattern.convertPatternsToRe(positive, this._micromatchOptions); - const negativeRe = utils.pattern.convertPatternsToRe(negative, this._micromatchOptions); - return (entry) => this._filter(entry, positiveRe, negativeRe); - } - _filter(entry, positiveRe, negativeRe) { - if (this._settings.unique && this._isDuplicateEntry(entry)) { - return false; - } - if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) { - return false; - } - if (this._isSkippedByAbsoluteNegativePatterns(entry.path, negativeRe)) { - return false; - } - const filepath = this._settings.baseNameMatch ? entry.name : entry.path; - const isMatched = this._isMatchToPatterns(filepath, positiveRe) && !this._isMatchToPatterns(entry.path, negativeRe); - if (this._settings.unique && isMatched) { - this._createIndexRecord(entry); - } - return isMatched; - } - _isDuplicateEntry(entry) { - return this.index.has(entry.path); - } - _createIndexRecord(entry) { - this.index.set(entry.path, undefined); - } - _onlyFileFilter(entry) { - return this._settings.onlyFiles && !entry.dirent.isFile(); - } - _onlyDirectoryFilter(entry) { - return this._settings.onlyDirectories && !entry.dirent.isDirectory(); - } - _isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) { - if (!this._settings.absolute) { - return false; - } - const fullpath = utils.path.makeAbsolute(this._settings.cwd, entryPath); - return utils.pattern.matchAny(fullpath, patternsRe); - } - _isMatchToPatterns(entryPath, patternsRe) { - const filepath = utils.path.removeLeadingDotSegment(entryPath); - return utils.pattern.matchAny(filepath, patternsRe); - } -} -exports.default = EntryFilter; - - -/***/ }), - -/***/ 6654: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const utils = __nccwpck_require__(5444); -class ErrorFilter { - constructor(_settings) { - this._settings = _settings; - } - getFilter() { - return (error) => this._isNonFatalError(error); - } - _isNonFatalError(error) { - return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors; - } -} -exports.default = ErrorFilter; - - -/***/ }), - -/***/ 2576: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const utils = __nccwpck_require__(5444); -class Matcher { - constructor(_patterns, _settings, _micromatchOptions) { - this._patterns = _patterns; - this._settings = _settings; - this._micromatchOptions = _micromatchOptions; - this._storage = []; - this._fillStorage(); - } - _fillStorage() { - /** - * The original pattern may include `{,*,**,a/*}`, which will lead to problems with matching (unresolved level). - * So, before expand patterns with brace expansion into separated patterns. - */ - const patterns = utils.pattern.expandPatternsWithBraceExpansion(this._patterns); - for (const pattern of patterns) { - const segments = this._getPatternSegments(pattern); - const sections = this._splitSegmentsIntoSections(segments); - this._storage.push({ - complete: sections.length <= 1, - pattern, - segments, - sections - }); - } - } - _getPatternSegments(pattern) { - const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions); - return parts.map((part) => { - const dynamic = utils.pattern.isDynamicPattern(part, this._settings); - if (!dynamic) { - return { - dynamic: false, - pattern: part - }; - } - return { - dynamic: true, - pattern: part, - patternRe: utils.pattern.makeRe(part, this._micromatchOptions) - }; - }); - } - _splitSegmentsIntoSections(segments) { - return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern)); - } -} -exports.default = Matcher; - - -/***/ }), - -/***/ 5295: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const matcher_1 = __nccwpck_require__(2576); -class PartialMatcher extends matcher_1.default { - match(filepath) { - const parts = filepath.split('/'); - const levels = parts.length; - const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels); - for (const pattern of patterns) { - const section = pattern.sections[0]; - /** - * In this case, the pattern has a globstar and we must read all directories unconditionally, - * but only if the level has reached the end of the first group. - * - * fixtures/{a,b}/** - * ^ true/false ^ always true - */ - if (!pattern.complete && levels > section.length) { - return true; - } - const match = parts.every((part, index) => { - const segment = pattern.segments[index]; - if (segment.dynamic && segment.patternRe.test(part)) { - return true; - } - if (!segment.dynamic && segment.pattern === part) { - return true; - } - return false; - }); - if (match) { - return true; - } - } - return false; - } -} -exports.default = PartialMatcher; - - -/***/ }), - -/***/ 257: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const path = __nccwpck_require__(5622); -const deep_1 = __nccwpck_require__(6983); -const entry_1 = __nccwpck_require__(1343); -const error_1 = __nccwpck_require__(6654); -const entry_2 = __nccwpck_require__(4029); -class Provider { - constructor(_settings) { - this._settings = _settings; - this.errorFilter = new error_1.default(this._settings); - this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions()); - this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions()); - this.entryTransformer = new entry_2.default(this._settings); - } - _getRootDirectory(task) { - return path.resolve(this._settings.cwd, task.base); - } - _getReaderOptions(task) { - const basePath = task.base === '.' ? '' : task.base; - return { - basePath, - pathSegmentSeparator: '/', - concurrency: this._settings.concurrency, - deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative), - entryFilter: this.entryFilter.getFilter(task.positive, task.negative), - errorFilter: this.errorFilter.getFilter(), - followSymbolicLinks: this._settings.followSymbolicLinks, - fs: this._settings.fs, - stats: this._settings.stats, - throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink, - transform: this.entryTransformer.getTransformer() - }; - } - _getMicromatchOptions() { - return { - dot: this._settings.dot, - matchBase: this._settings.baseNameMatch, - nobrace: !this._settings.braceExpansion, - nocase: !this._settings.caseSensitiveMatch, - noext: !this._settings.extglob, - noglobstar: !this._settings.globstar, - posix: true, - strictSlashes: false - }; - } -} -exports.default = Provider; - - -/***/ }), - -/***/ 4630: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const stream_1 = __nccwpck_require__(2413); -const stream_2 = __nccwpck_require__(2083); -const provider_1 = __nccwpck_require__(257); -class ProviderStream extends provider_1.default { - constructor() { - super(...arguments); - this._reader = new stream_2.default(this._settings); - } - read(task) { - const root = this._getRootDirectory(task); - const options = this._getReaderOptions(task); - const source = this.api(root, task, options); - const destination = new stream_1.Readable({ objectMode: true, read: () => { } }); - source - .once('error', (error) => destination.emit('error', error)) - .on('data', (entry) => destination.emit('data', options.transform(entry))) - .once('end', () => destination.emit('end')); - destination - .once('close', () => source.destroy()); - return destination; - } - api(root, task, options) { - if (task.dynamic) { - return this._reader.dynamic(root, options); - } - return this._reader.static(task.patterns, options); - } -} -exports.default = ProviderStream; - - -/***/ }), - -/***/ 2405: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const sync_1 = __nccwpck_require__(8821); -const provider_1 = __nccwpck_require__(257); -class ProviderSync extends provider_1.default { - constructor() { - super(...arguments); - this._reader = new sync_1.default(this._settings); - } - read(task) { - const root = this._getRootDirectory(task); - const options = this._getReaderOptions(task); - const entries = this.api(root, task, options); - return entries.map(options.transform); - } - api(root, task, options) { - if (task.dynamic) { - return this._reader.dynamic(root, options); - } - return this._reader.static(task.patterns, options); - } -} -exports.default = ProviderSync; - - -/***/ }), - -/***/ 4029: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const utils = __nccwpck_require__(5444); -class EntryTransformer { - constructor(_settings) { - this._settings = _settings; - } - getTransformer() { - return (entry) => this._transform(entry); - } - _transform(entry) { - let filepath = entry.path; - if (this._settings.absolute) { - filepath = utils.path.makeAbsolute(this._settings.cwd, filepath); - filepath = utils.path.unixify(filepath); - } - if (this._settings.markDirectories && entry.dirent.isDirectory()) { - filepath += '/'; - } - if (!this._settings.objectMode) { - return filepath; - } - return Object.assign(Object.assign({}, entry), { path: filepath }); - } -} -exports.default = EntryTransformer; - - -/***/ }), - -/***/ 8062: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const path = __nccwpck_require__(5622); -const fsStat = __nccwpck_require__(109); -const utils = __nccwpck_require__(5444); -class Reader { - constructor(_settings) { - this._settings = _settings; - this._fsStatSettings = new fsStat.Settings({ - followSymbolicLink: this._settings.followSymbolicLinks, - fs: this._settings.fs, - throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks - }); - } - _getFullEntryPath(filepath) { - return path.resolve(this._settings.cwd, filepath); - } - _makeEntry(stats, pattern) { - const entry = { - name: pattern, - path: pattern, - dirent: utils.fs.createDirentFromStats(pattern, stats) - }; - if (this._settings.stats) { - entry.stats = stats; - } - return entry; - } - _isFatalError(error) { - return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors; - } -} -exports.default = Reader; - - -/***/ }), - -/***/ 2083: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const stream_1 = __nccwpck_require__(2413); -const fsStat = __nccwpck_require__(109); -const fsWalk = __nccwpck_require__(6026); -const reader_1 = __nccwpck_require__(8062); -class ReaderStream extends reader_1.default { - constructor() { - super(...arguments); - this._walkStream = fsWalk.walkStream; - this._stat = fsStat.stat; - } - dynamic(root, options) { - return this._walkStream(root, options); - } - static(patterns, options) { - const filepaths = patterns.map(this._getFullEntryPath, this); - const stream = new stream_1.PassThrough({ objectMode: true }); - stream._write = (index, _enc, done) => { - return this._getEntry(filepaths[index], patterns[index], options) - .then((entry) => { - if (entry !== null && options.entryFilter(entry)) { - stream.push(entry); - } - if (index === filepaths.length - 1) { - stream.end(); - } - done(); - }) - .catch(done); - }; - for (let i = 0; i < filepaths.length; i++) { - stream.write(i); - } - return stream; - } - _getEntry(filepath, pattern, options) { - return this._getStat(filepath) - .then((stats) => this._makeEntry(stats, pattern)) - .catch((error) => { - if (options.errorFilter(error)) { - return null; - } - throw error; - }); - } - _getStat(filepath) { - return new Promise((resolve, reject) => { - this._stat(filepath, this._fsStatSettings, (error, stats) => { - return error === null ? resolve(stats) : reject(error); - }); - }); - } -} -exports.default = ReaderStream; - - -/***/ }), - -/***/ 8821: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const fsStat = __nccwpck_require__(109); -const fsWalk = __nccwpck_require__(6026); -const reader_1 = __nccwpck_require__(8062); -class ReaderSync extends reader_1.default { - constructor() { - super(...arguments); - this._walkSync = fsWalk.walkSync; - this._statSync = fsStat.statSync; - } - dynamic(root, options) { - return this._walkSync(root, options); - } - static(patterns, options) { - const entries = []; - for (const pattern of patterns) { - const filepath = this._getFullEntryPath(pattern); - const entry = this._getEntry(filepath, pattern, options); - if (entry === null || !options.entryFilter(entry)) { - continue; - } - entries.push(entry); - } - return entries; - } - _getEntry(filepath, pattern, options) { - try { - const stats = this._getStat(filepath); - return this._makeEntry(stats, pattern); - } - catch (error) { - if (options.errorFilter(error)) { - return null; - } - throw error; - } - } - _getStat(filepath) { - return this._statSync(filepath, this._fsStatSettings); - } -} -exports.default = ReaderSync; - - -/***/ }), - -/***/ 952: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0; -const fs = __nccwpck_require__(5747); -const os = __nccwpck_require__(2087); -/** - * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero. - * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107 - */ -const CPU_COUNT = Math.max(os.cpus().length, 1); -exports.DEFAULT_FILE_SYSTEM_ADAPTER = { - lstat: fs.lstat, - lstatSync: fs.lstatSync, - stat: fs.stat, - statSync: fs.statSync, - readdir: fs.readdir, - readdirSync: fs.readdirSync -}; -class Settings { - constructor(_options = {}) { - this._options = _options; - this.absolute = this._getValue(this._options.absolute, false); - this.baseNameMatch = this._getValue(this._options.baseNameMatch, false); - this.braceExpansion = this._getValue(this._options.braceExpansion, true); - this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true); - this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT); - this.cwd = this._getValue(this._options.cwd, process.cwd()); - this.deep = this._getValue(this._options.deep, Infinity); - this.dot = this._getValue(this._options.dot, false); - this.extglob = this._getValue(this._options.extglob, true); - this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true); - this.fs = this._getFileSystemMethods(this._options.fs); - this.globstar = this._getValue(this._options.globstar, true); - this.ignore = this._getValue(this._options.ignore, []); - this.markDirectories = this._getValue(this._options.markDirectories, false); - this.objectMode = this._getValue(this._options.objectMode, false); - this.onlyDirectories = this._getValue(this._options.onlyDirectories, false); - this.onlyFiles = this._getValue(this._options.onlyFiles, true); - this.stats = this._getValue(this._options.stats, false); - this.suppressErrors = this._getValue(this._options.suppressErrors, false); - this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false); - this.unique = this._getValue(this._options.unique, true); - if (this.onlyDirectories) { - this.onlyFiles = false; - } - if (this.stats) { - this.objectMode = true; - } - } - _getValue(option, value) { - return option === undefined ? value : option; - } - _getFileSystemMethods(methods = {}) { - return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods); - } -} -exports.default = Settings; - - -/***/ }), - -/***/ 5325: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.splitWhen = exports.flatten = void 0; -function flatten(items) { - return items.reduce((collection, item) => [].concat(collection, item), []); -} -exports.flatten = flatten; -function splitWhen(items, predicate) { - const result = [[]]; - let groupIndex = 0; - for (const item of items) { - if (predicate(item)) { - groupIndex++; - result[groupIndex] = []; - } - else { - result[groupIndex].push(item); - } - } - return result; -} -exports.splitWhen = splitWhen; - - -/***/ }), - -/***/ 1230: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isEnoentCodeError = void 0; -function isEnoentCodeError(error) { - return error.code === 'ENOENT'; -} -exports.isEnoentCodeError = isEnoentCodeError; - - -/***/ }), - -/***/ 7543: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createDirentFromStats = void 0; -class DirentFromStats { - constructor(name, stats) { - this.name = name; - this.isBlockDevice = stats.isBlockDevice.bind(stats); - this.isCharacterDevice = stats.isCharacterDevice.bind(stats); - this.isDirectory = stats.isDirectory.bind(stats); - this.isFIFO = stats.isFIFO.bind(stats); - this.isFile = stats.isFile.bind(stats); - this.isSocket = stats.isSocket.bind(stats); - this.isSymbolicLink = stats.isSymbolicLink.bind(stats); - } -} -function createDirentFromStats(name, stats) { - return new DirentFromStats(name, stats); -} -exports.createDirentFromStats = createDirentFromStats; - - -/***/ }), - -/***/ 5444: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0; -const array = __nccwpck_require__(5325); -exports.array = array; -const errno = __nccwpck_require__(1230); -exports.errno = errno; -const fs = __nccwpck_require__(7543); -exports.fs = fs; -const path = __nccwpck_require__(3873); -exports.path = path; -const pattern = __nccwpck_require__(1221); -exports.pattern = pattern; -const stream = __nccwpck_require__(8382); -exports.stream = stream; -const string = __nccwpck_require__(2203); -exports.string = string; - - -/***/ }), - -/***/ 3873: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.removeLeadingDotSegment = exports.escape = exports.makeAbsolute = exports.unixify = void 0; -const path = __nccwpck_require__(5622); -const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\ -const UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\())/g; -/** - * Designed to work only with simple paths: `dir\\file`. - */ -function unixify(filepath) { - return filepath.replace(/\\/g, '/'); -} -exports.unixify = unixify; -function makeAbsolute(cwd, filepath) { - return path.resolve(cwd, filepath); -} -exports.makeAbsolute = makeAbsolute; -function escape(pattern) { - return pattern.replace(UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); -} -exports.escape = escape; -function removeLeadingDotSegment(entry) { - // We do not use `startsWith` because this is 10x slower than current implementation for some cases. - // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with - if (entry.charAt(0) === '.') { - const secondCharactery = entry.charAt(1); - if (secondCharactery === '/' || secondCharactery === '\\') { - return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT); - } - } - return entry; -} -exports.removeLeadingDotSegment = removeLeadingDotSegment; - - -/***/ }), - -/***/ 1221: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0; -const path = __nccwpck_require__(5622); -const globParent = __nccwpck_require__(4655); -const micromatch = __nccwpck_require__(3913); -const picomatch = __nccwpck_require__(8569); -const GLOBSTAR = '**'; -const ESCAPE_SYMBOL = '\\'; -const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/; -const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[.*]/; -const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\(.*\|.*\)/; -const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\(.*\)/; -const BRACE_EXPANSIONS_SYMBOLS_RE = /{.*(?:,|\.\.).*}/; -function isStaticPattern(pattern, options = {}) { - return !isDynamicPattern(pattern, options); -} -exports.isStaticPattern = isStaticPattern; -function isDynamicPattern(pattern, options = {}) { - /** - * A special case with an empty string is necessary for matching patterns that start with a forward slash. - * An empty string cannot be a dynamic pattern. - * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'. - */ - if (pattern === '') { - return false; - } - /** - * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check - * filepath directly (without read directory). - */ - if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) { - return true; - } - if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) { - return true; - } - if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) { - return true; - } - if (options.braceExpansion !== false && BRACE_EXPANSIONS_SYMBOLS_RE.test(pattern)) { - return true; - } - return false; -} -exports.isDynamicPattern = isDynamicPattern; -function convertToPositivePattern(pattern) { - return isNegativePattern(pattern) ? pattern.slice(1) : pattern; -} -exports.convertToPositivePattern = convertToPositivePattern; -function convertToNegativePattern(pattern) { - return '!' + pattern; -} -exports.convertToNegativePattern = convertToNegativePattern; -function isNegativePattern(pattern) { - return pattern.startsWith('!') && pattern[1] !== '('; -} -exports.isNegativePattern = isNegativePattern; -function isPositivePattern(pattern) { - return !isNegativePattern(pattern); -} -exports.isPositivePattern = isPositivePattern; -function getNegativePatterns(patterns) { - return patterns.filter(isNegativePattern); -} -exports.getNegativePatterns = getNegativePatterns; -function getPositivePatterns(patterns) { - return patterns.filter(isPositivePattern); -} -exports.getPositivePatterns = getPositivePatterns; -function getBaseDirectory(pattern) { - return globParent(pattern, { flipBackslashes: false }); -} -exports.getBaseDirectory = getBaseDirectory; -function hasGlobStar(pattern) { - return pattern.includes(GLOBSTAR); -} -exports.hasGlobStar = hasGlobStar; -function endsWithSlashGlobStar(pattern) { - return pattern.endsWith('/' + GLOBSTAR); -} -exports.endsWithSlashGlobStar = endsWithSlashGlobStar; -function isAffectDepthOfReadingPattern(pattern) { - const basename = path.basename(pattern); - return endsWithSlashGlobStar(pattern) || isStaticPattern(basename); -} -exports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern; -function expandPatternsWithBraceExpansion(patterns) { - return patterns.reduce((collection, pattern) => { - return collection.concat(expandBraceExpansion(pattern)); - }, []); -} -exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion; -function expandBraceExpansion(pattern) { - return micromatch.braces(pattern, { - expand: true, - nodupes: true - }); -} -exports.expandBraceExpansion = expandBraceExpansion; -function getPatternParts(pattern, options) { - let { parts } = picomatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true })); - /** - * The scan method returns an empty array in some cases. - * See micromatch/picomatch#58 for more details. - */ - if (parts.length === 0) { - parts = [pattern]; - } - /** - * The scan method does not return an empty part for the pattern with a forward slash. - * This is another part of micromatch/picomatch#58. - */ - if (parts[0].startsWith('/')) { - parts[0] = parts[0].slice(1); - parts.unshift(''); - } - return parts; -} -exports.getPatternParts = getPatternParts; -function makeRe(pattern, options) { - return micromatch.makeRe(pattern, options); -} -exports.makeRe = makeRe; -function convertPatternsToRe(patterns, options) { - return patterns.map((pattern) => makeRe(pattern, options)); -} -exports.convertPatternsToRe = convertPatternsToRe; -function matchAny(entry, patternsRe) { - return patternsRe.some((patternRe) => patternRe.test(entry)); -} -exports.matchAny = matchAny; - - -/***/ }), - -/***/ 8382: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.merge = void 0; -const merge2 = __nccwpck_require__(2578); -function merge(streams) { - const mergedStream = merge2(streams); - streams.forEach((stream) => { - stream.once('error', (error) => mergedStream.emit('error', error)); - }); - mergedStream.once('close', () => propagateCloseEventToSources(streams)); - mergedStream.once('end', () => propagateCloseEventToSources(streams)); - return mergedStream; -} -exports.merge = merge; -function propagateCloseEventToSources(streams) { - streams.forEach((stream) => stream.emit('close')); -} - - -/***/ }), - -/***/ 2203: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isEmpty = exports.isString = void 0; -function isString(input) { - return typeof input === 'string'; -} -exports.isString = isString; -function isEmpty(input) { - return input === ''; -} -exports.isEmpty = isEmpty; - - -/***/ }), - -/***/ 7340: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var reusify = __nccwpck_require__(2113) - -function fastqueue (context, worker, concurrency) { - if (typeof context === 'function') { - concurrency = worker - worker = context - context = null - } - - if (concurrency < 1) { - throw new Error('fastqueue concurrency must be greater than 1') - } - - var cache = reusify(Task) - var queueHead = null - var queueTail = null - var _running = 0 - var errorHandler = null - - var self = { - push: push, - drain: noop, - saturated: noop, - pause: pause, - paused: false, - concurrency: concurrency, - running: running, - resume: resume, - idle: idle, - length: length, - getQueue: getQueue, - unshift: unshift, - empty: noop, - kill: kill, - killAndDrain: killAndDrain, - error: error - } - - return self - - function running () { - return _running - } - - function pause () { - self.paused = true - } - - function length () { - var current = queueHead - var counter = 0 - - while (current) { - current = current.next - counter++ - } - - return counter - } - - function getQueue () { - var current = queueHead - var tasks = [] - - while (current) { - tasks.push(current.value) - current = current.next - } - - return tasks - } - - function resume () { - if (!self.paused) return - self.paused = false - for (var i = 0; i < self.concurrency; i++) { - _running++ - release() - } - } - - function idle () { - return _running === 0 && self.length() === 0 - } - - function push (value, done) { - var current = cache.get() - - current.context = context - current.release = release - current.value = value - current.callback = done || noop - current.errorHandler = errorHandler - - if (_running === self.concurrency || self.paused) { - if (queueTail) { - queueTail.next = current - queueTail = current - } else { - queueHead = current - queueTail = current - self.saturated() - } - } else { - _running++ - worker.call(context, current.value, current.worked) - } - } - - function unshift (value, done) { - var current = cache.get() - - current.context = context - current.release = release - current.value = value - current.callback = done || noop - - if (_running === self.concurrency || self.paused) { - if (queueHead) { - current.next = queueHead - queueHead = current - } else { - queueHead = current - queueTail = current - self.saturated() - } - } else { - _running++ - worker.call(context, current.value, current.worked) - } - } - - function release (holder) { - if (holder) { - cache.release(holder) - } - var next = queueHead - if (next) { - if (!self.paused) { - if (queueTail === queueHead) { - queueTail = null - } - queueHead = next.next - next.next = null - worker.call(context, next.value, next.worked) - if (queueTail === null) { - self.empty() - } - } else { - _running-- - } - } else if (--_running === 0) { - self.drain() - } - } - - function kill () { - queueHead = null - queueTail = null - self.drain = noop - } - - function killAndDrain () { - queueHead = null - queueTail = null - self.drain() - self.drain = noop - } - - function error (handler) { - errorHandler = handler - } -} - -function noop () {} - -function Task () { - this.value = null - this.callback = noop - this.next = null - this.release = noop - this.context = null - this.errorHandler = null - - var self = this - - this.worked = function worked (err, result) { - var callback = self.callback - var errorHandler = self.errorHandler - var val = self.value - self.value = null - self.callback = noop - if (self.errorHandler) { - errorHandler(err, val) - } - callback.call(self.context, err, result) - self.release(self) - } -} - -module.exports = fastqueue - - -/***/ }), - -/***/ 4655: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var isGlob = __nccwpck_require__(4466); -var pathPosixDirname = __nccwpck_require__(5622).posix.dirname; -var isWin32 = __nccwpck_require__(2087).platform() === 'win32'; - -var slash = '/'; -var backslash = /\\/g; -var enclosure = /[\{\[].*[\/]*.*[\}\]]$/; -var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/; -var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g; - -/** - * @param {string} str - * @param {Object} opts - * @param {boolean} [opts.flipBackslashes=true] - */ -module.exports = function globParent(str, opts) { - var options = Object.assign({ flipBackslashes: true }, opts); - - // flip windows path separators - if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) { - str = str.replace(backslash, slash); - } - - // special case for strings ending in enclosure containing path separator - if (enclosure.test(str)) { - str += slash; - } - - // preserves full path in case of trailing path separator - str += 'a'; - - // remove path parts that are globby - do { - str = pathPosixDirname(str); - } while (isGlob(str) || globby.test(str)); - - // remove escape chars and return result - return str.replace(escaped, '$1'); -}; - - -/***/ }), - -/***/ 6457: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const types_1 = __nccwpck_require__(4597); -function createRejection(error, ...beforeErrorGroups) { - const promise = (async () => { - if (error instanceof types_1.RequestError) { - try { - for (const hooks of beforeErrorGroups) { - if (hooks) { - for (const hook of hooks) { - // eslint-disable-next-line no-await-in-loop - error = await hook(error); - } - } - } - } - catch (error_) { - error = error_; - } - } - throw error; - })(); - const returnPromise = () => promise; - promise.json = returnPromise; - promise.text = returnPromise; - promise.buffer = returnPromise; - promise.on = returnPromise; - return promise; -} -exports.default = createRejection; - - -/***/ }), - -/***/ 6056: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const events_1 = __nccwpck_require__(8614); -const is_1 = __nccwpck_require__(7678); -const PCancelable = __nccwpck_require__(9072); -const types_1 = __nccwpck_require__(4597); -const parse_body_1 = __nccwpck_require__(8220); -const core_1 = __nccwpck_require__(94); -const proxy_events_1 = __nccwpck_require__(3021); -const get_buffer_1 = __nccwpck_require__(4500); -const is_response_ok_1 = __nccwpck_require__(9298); -const proxiedRequestEvents = [ - 'request', - 'response', - 'redirect', - 'uploadProgress', - 'downloadProgress' -]; -function asPromise(normalizedOptions) { - let globalRequest; - let globalResponse; - const emitter = new events_1.EventEmitter(); - const promise = new PCancelable((resolve, reject, onCancel) => { - const makeRequest = (retryCount) => { - const request = new core_1.default(undefined, normalizedOptions); - request.retryCount = retryCount; - request._noPipe = true; - onCancel(() => request.destroy()); - onCancel.shouldReject = false; - onCancel(() => reject(new types_1.CancelError(request))); - globalRequest = request; - request.once('response', async (response) => { - var _a; - response.retryCount = retryCount; - if (response.request.aborted) { - // Canceled while downloading - will throw a `CancelError` or `TimeoutError` error - return; - } - // Download body - let rawBody; - try { - rawBody = await get_buffer_1.default(request); - response.rawBody = rawBody; - } - catch (_b) { - // The same error is caught below. - // See request.once('error') - return; - } - if (request._isAboutToError) { - return; - } - // Parse body - const contentEncoding = ((_a = response.headers['content-encoding']) !== null && _a !== void 0 ? _a : '').toLowerCase(); - const isCompressed = ['gzip', 'deflate', 'br'].includes(contentEncoding); - const { options } = request; - if (isCompressed && !options.decompress) { - response.body = rawBody; - } - else { - try { - response.body = parse_body_1.default(response, options.responseType, options.parseJson, options.encoding); - } - catch (error) { - // Fallback to `utf8` - response.body = rawBody.toString(); - if (is_response_ok_1.isResponseOk(response)) { - request._beforeError(error); - return; - } - } - } - try { - for (const [index, hook] of options.hooks.afterResponse.entries()) { - // @ts-expect-error TS doesn't notice that CancelableRequest is a Promise - // eslint-disable-next-line no-await-in-loop - response = await hook(response, async (updatedOptions) => { - const typedOptions = core_1.default.normalizeArguments(undefined, { - ...updatedOptions, - retry: { - calculateDelay: () => 0 - }, - throwHttpErrors: false, - resolveBodyOnly: false - }, options); - // Remove any further hooks for that request, because we'll call them anyway. - // The loop continues. We don't want duplicates (asPromise recursion). - typedOptions.hooks.afterResponse = typedOptions.hooks.afterResponse.slice(0, index); - for (const hook of typedOptions.hooks.beforeRetry) { - // eslint-disable-next-line no-await-in-loop - await hook(typedOptions); - } - const promise = asPromise(typedOptions); - onCancel(() => { - promise.catch(() => { }); - promise.cancel(); - }); - return promise; - }); - } - } - catch (error) { - request._beforeError(new types_1.RequestError(error.message, error, request)); - return; - } - if (!is_response_ok_1.isResponseOk(response)) { - request._beforeError(new types_1.HTTPError(response)); - return; - } - globalResponse = response; - resolve(request.options.resolveBodyOnly ? response.body : response); - }); - const onError = (error) => { - if (promise.isCanceled) { - return; - } - const { options } = request; - if (error instanceof types_1.HTTPError && !options.throwHttpErrors) { - const { response } = error; - resolve(request.options.resolveBodyOnly ? response.body : response); - return; - } - reject(error); - }; - request.once('error', onError); - const previousBody = request.options.body; - request.once('retry', (newRetryCount, error) => { - var _a, _b; - if (previousBody === ((_a = error.request) === null || _a === void 0 ? void 0 : _a.options.body) && is_1.default.nodeStream((_b = error.request) === null || _b === void 0 ? void 0 : _b.options.body)) { - onError(error); - return; - } - makeRequest(newRetryCount); - }); - proxy_events_1.default(request, emitter, proxiedRequestEvents); - }; - makeRequest(0); - }); - promise.on = (event, fn) => { - emitter.on(event, fn); - return promise; - }; - const shortcut = (responseType) => { - const newPromise = (async () => { - // Wait until downloading has ended - await promise; - const { options } = globalResponse.request; - return parse_body_1.default(globalResponse, responseType, options.parseJson, options.encoding); - })(); - Object.defineProperties(newPromise, Object.getOwnPropertyDescriptors(promise)); - return newPromise; - }; - promise.json = () => { - const { headers } = globalRequest.options; - if (!globalRequest.writableFinished && headers.accept === undefined) { - headers.accept = 'application/json'; - } - return shortcut('json'); - }; - promise.buffer = () => shortcut('buffer'); - promise.text = () => shortcut('text'); - return promise; -} -exports.default = asPromise; -__exportStar(__nccwpck_require__(4597), exports); - - -/***/ }), - -/***/ 1048: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const is_1 = __nccwpck_require__(7678); -const normalizeArguments = (options, defaults) => { - if (is_1.default.null_(options.encoding)) { - throw new TypeError('To get a Buffer, set `options.responseType` to `buffer` instead'); - } - is_1.assert.any([is_1.default.string, is_1.default.undefined], options.encoding); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.resolveBodyOnly); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.methodRewriting); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.isStream); - is_1.assert.any([is_1.default.string, is_1.default.undefined], options.responseType); - // `options.responseType` - if (options.responseType === undefined) { - options.responseType = 'text'; - } - // `options.retry` - const { retry } = options; - if (defaults) { - options.retry = { ...defaults.retry }; - } - else { - options.retry = { - calculateDelay: retryObject => retryObject.computedValue, - limit: 0, - methods: [], - statusCodes: [], - errorCodes: [], - maxRetryAfter: undefined - }; - } - if (is_1.default.object(retry)) { - options.retry = { - ...options.retry, - ...retry - }; - options.retry.methods = [...new Set(options.retry.methods.map(method => method.toUpperCase()))]; - options.retry.statusCodes = [...new Set(options.retry.statusCodes)]; - options.retry.errorCodes = [...new Set(options.retry.errorCodes)]; - } - else if (is_1.default.number(retry)) { - options.retry.limit = retry; - } - if (is_1.default.undefined(options.retry.maxRetryAfter)) { - options.retry.maxRetryAfter = Math.min( - // TypeScript is not smart enough to handle `.filter(x => is.number(x))`. - // eslint-disable-next-line unicorn/no-fn-reference-in-iterator - ...[options.timeout.request, options.timeout.connect].filter(is_1.default.number)); - } - // `options.pagination` - if (is_1.default.object(options.pagination)) { - if (defaults) { - options.pagination = { - ...defaults.pagination, - ...options.pagination - }; - } - const { pagination } = options; - if (!is_1.default.function_(pagination.transform)) { - throw new Error('`options.pagination.transform` must be implemented'); - } - if (!is_1.default.function_(pagination.shouldContinue)) { - throw new Error('`options.pagination.shouldContinue` must be implemented'); - } - if (!is_1.default.function_(pagination.filter)) { - throw new TypeError('`options.pagination.filter` must be implemented'); - } - if (!is_1.default.function_(pagination.paginate)) { - throw new Error('`options.pagination.paginate` must be implemented'); - } - } - // JSON mode - if (options.responseType === 'json' && options.headers.accept === undefined) { - options.headers.accept = 'application/json'; - } - return options; -}; -exports.default = normalizeArguments; - - -/***/ }), - -/***/ 8220: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const types_1 = __nccwpck_require__(4597); -const parseBody = (response, responseType, parseJson, encoding) => { - const { rawBody } = response; - try { - if (responseType === 'text') { - return rawBody.toString(encoding); - } - if (responseType === 'json') { - return rawBody.length === 0 ? '' : parseJson(rawBody.toString()); - } - if (responseType === 'buffer') { - return rawBody; - } - throw new types_1.ParseError({ - message: `Unknown body type '${responseType}'`, - name: 'Error' - }, response); - } - catch (error) { - throw new types_1.ParseError(error, response); - } -}; -exports.default = parseBody; - - -/***/ }), - -/***/ 4597: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.CancelError = exports.ParseError = void 0; -const core_1 = __nccwpck_require__(94); -/** -An error to be thrown when server response code is 2xx, and parsing body fails. -Includes a `response` property. -*/ -class ParseError extends core_1.RequestError { - constructor(error, response) { - const { options } = response.request; - super(`${error.message} in "${options.url.toString()}"`, error, response.request); - this.name = 'ParseError'; - } -} -exports.ParseError = ParseError; -/** -An error to be thrown when the request is aborted with `.cancel()`. -*/ -class CancelError extends core_1.RequestError { - constructor(request) { - super('Promise was canceled', {}, request); - this.name = 'CancelError'; - } - get isCanceled() { - return true; - } -} -exports.CancelError = CancelError; -__exportStar(__nccwpck_require__(94), exports); - - -/***/ }), - -/***/ 3462: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.retryAfterStatusCodes = void 0; -exports.retryAfterStatusCodes = new Set([413, 429, 503]); -const calculateRetryDelay = ({ attemptCount, retryOptions, error, retryAfter }) => { - if (attemptCount > retryOptions.limit) { - return 0; - } - const hasMethod = retryOptions.methods.includes(error.options.method); - const hasErrorCode = retryOptions.errorCodes.includes(error.code); - const hasStatusCode = error.response && retryOptions.statusCodes.includes(error.response.statusCode); - if (!hasMethod || (!hasErrorCode && !hasStatusCode)) { - return 0; - } - if (error.response) { - if (retryAfter) { - if (retryOptions.maxRetryAfter === undefined || retryAfter > retryOptions.maxRetryAfter) { - return 0; - } - return retryAfter; - } - if (error.response.statusCode === 413) { - return 0; - } - } - const noise = Math.random() * 100; - return ((2 ** (attemptCount - 1)) * 1000) + noise; -}; -exports.default = calculateRetryDelay; - - -/***/ }), - -/***/ 94: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.UnsupportedProtocolError = exports.ReadError = exports.TimeoutError = exports.UploadError = exports.CacheError = exports.HTTPError = exports.MaxRedirectsError = exports.RequestError = exports.setNonEnumerableProperties = exports.knownHookEvents = exports.withoutBody = exports.kIsNormalizedAlready = void 0; -const util_1 = __nccwpck_require__(1669); -const stream_1 = __nccwpck_require__(2413); -const fs_1 = __nccwpck_require__(5747); -const url_1 = __nccwpck_require__(8835); -const http = __nccwpck_require__(8605); -const http_1 = __nccwpck_require__(8605); -const https = __nccwpck_require__(7211); -const http_timer_1 = __nccwpck_require__(8097); -const cacheable_lookup_1 = __nccwpck_require__(2286); -const CacheableRequest = __nccwpck_require__(8116); -const decompressResponse = __nccwpck_require__(2391); -// @ts-expect-error Missing types -const http2wrapper = __nccwpck_require__(4645); -const lowercaseKeys = __nccwpck_require__(9662); -const is_1 = __nccwpck_require__(7678); -const get_body_size_1 = __nccwpck_require__(4564); -const is_form_data_1 = __nccwpck_require__(40); -const proxy_events_1 = __nccwpck_require__(3021); -const timed_out_1 = __nccwpck_require__(2454); -const url_to_options_1 = __nccwpck_require__(8026); -const options_to_url_1 = __nccwpck_require__(9219); -const weakable_map_1 = __nccwpck_require__(7288); -const get_buffer_1 = __nccwpck_require__(4500); -const dns_ip_version_1 = __nccwpck_require__(4993); -const is_response_ok_1 = __nccwpck_require__(9298); -const deprecation_warning_1 = __nccwpck_require__(397); -const normalize_arguments_1 = __nccwpck_require__(1048); -const calculate_retry_delay_1 = __nccwpck_require__(3462); -const globalDnsCache = new cacheable_lookup_1.default(); -const kRequest = Symbol('request'); -const kResponse = Symbol('response'); -const kResponseSize = Symbol('responseSize'); -const kDownloadedSize = Symbol('downloadedSize'); -const kBodySize = Symbol('bodySize'); -const kUploadedSize = Symbol('uploadedSize'); -const kServerResponsesPiped = Symbol('serverResponsesPiped'); -const kUnproxyEvents = Symbol('unproxyEvents'); -const kIsFromCache = Symbol('isFromCache'); -const kCancelTimeouts = Symbol('cancelTimeouts'); -const kStartedReading = Symbol('startedReading'); -const kStopReading = Symbol('stopReading'); -const kTriggerRead = Symbol('triggerRead'); -const kBody = Symbol('body'); -const kJobs = Symbol('jobs'); -const kOriginalResponse = Symbol('originalResponse'); -const kRetryTimeout = Symbol('retryTimeout'); -exports.kIsNormalizedAlready = Symbol('isNormalizedAlready'); -const supportsBrotli = is_1.default.string(process.versions.brotli); -exports.withoutBody = new Set(['GET', 'HEAD']); -exports.knownHookEvents = [ - 'init', - 'beforeRequest', - 'beforeRedirect', - 'beforeError', - 'beforeRetry', - // Promise-Only - 'afterResponse' -]; -function validateSearchParameters(searchParameters) { - // eslint-disable-next-line guard-for-in - for (const key in searchParameters) { - const value = searchParameters[key]; - if (!is_1.default.string(value) && !is_1.default.number(value) && !is_1.default.boolean(value) && !is_1.default.null_(value) && !is_1.default.undefined(value)) { - throw new TypeError(`The \`searchParams\` value '${String(value)}' must be a string, number, boolean or null`); - } - } -} -function isClientRequest(clientRequest) { - return is_1.default.object(clientRequest) && !('statusCode' in clientRequest); -} -const cacheableStore = new weakable_map_1.default(); -const waitForOpenFile = async (file) => new Promise((resolve, reject) => { - const onError = (error) => { - reject(error); - }; - // Node.js 12 has incomplete types - if (!file.pending) { - resolve(); - } - file.once('error', onError); - file.once('ready', () => { - file.off('error', onError); - resolve(); - }); -}); -const redirectCodes = new Set([300, 301, 302, 303, 304, 307, 308]); -const nonEnumerableProperties = [ - 'context', - 'body', - 'json', - 'form' -]; -exports.setNonEnumerableProperties = (sources, to) => { - // Non enumerable properties shall not be merged - const properties = {}; - for (const source of sources) { - if (!source) { - continue; - } - for (const name of nonEnumerableProperties) { - if (!(name in source)) { - continue; - } - properties[name] = { - writable: true, - configurable: true, - enumerable: false, - // @ts-expect-error TS doesn't see the check above - value: source[name] - }; - } - } - Object.defineProperties(to, properties); -}; -/** -An error to be thrown when a request fails. -Contains a `code` property with error class code, like `ECONNREFUSED`. -*/ -class RequestError extends Error { - constructor(message, error, self) { - var _a; - super(message); - Error.captureStackTrace(this, this.constructor); - this.name = 'RequestError'; - this.code = error.code; - if (self instanceof Request) { - Object.defineProperty(this, 'request', { - enumerable: false, - value: self - }); - Object.defineProperty(this, 'response', { - enumerable: false, - value: self[kResponse] - }); - Object.defineProperty(this, 'options', { - // This fails because of TS 3.7.2 useDefineForClassFields - // Ref: https://github.com/microsoft/TypeScript/issues/34972 - enumerable: false, - value: self.options - }); - } - else { - Object.defineProperty(this, 'options', { - // This fails because of TS 3.7.2 useDefineForClassFields - // Ref: https://github.com/microsoft/TypeScript/issues/34972 - enumerable: false, - value: self - }); - } - this.timings = (_a = this.request) === null || _a === void 0 ? void 0 : _a.timings; - // Recover the original stacktrace - if (is_1.default.string(error.stack) && is_1.default.string(this.stack)) { - const indexOfMessage = this.stack.indexOf(this.message) + this.message.length; - const thisStackTrace = this.stack.slice(indexOfMessage).split('\n').reverse(); - const errorStackTrace = error.stack.slice(error.stack.indexOf(error.message) + error.message.length).split('\n').reverse(); - // Remove duplicated traces - while (errorStackTrace.length !== 0 && errorStackTrace[0] === thisStackTrace[0]) { - thisStackTrace.shift(); - } - this.stack = `${this.stack.slice(0, indexOfMessage)}${thisStackTrace.reverse().join('\n')}${errorStackTrace.reverse().join('\n')}`; - } - } -} -exports.RequestError = RequestError; -/** -An error to be thrown when the server redirects you more than ten times. -Includes a `response` property. -*/ -class MaxRedirectsError extends RequestError { - constructor(request) { - super(`Redirected ${request.options.maxRedirects} times. Aborting.`, {}, request); - this.name = 'MaxRedirectsError'; - } -} -exports.MaxRedirectsError = MaxRedirectsError; -/** -An error to be thrown when the server response code is not 2xx nor 3xx if `options.followRedirect` is `true`, but always except for 304. -Includes a `response` property. -*/ -class HTTPError extends RequestError { - constructor(response) { - super(`Response code ${response.statusCode} (${response.statusMessage})`, {}, response.request); - this.name = 'HTTPError'; - } -} -exports.HTTPError = HTTPError; -/** -An error to be thrown when a cache method fails. -For example, if the database goes down or there's a filesystem error. -*/ -class CacheError extends RequestError { - constructor(error, request) { - super(error.message, error, request); - this.name = 'CacheError'; - } -} -exports.CacheError = CacheError; -/** -An error to be thrown when the request body is a stream and an error occurs while reading from that stream. -*/ -class UploadError extends RequestError { - constructor(error, request) { - super(error.message, error, request); - this.name = 'UploadError'; - } -} -exports.UploadError = UploadError; -/** -An error to be thrown when the request is aborted due to a timeout. -Includes an `event` and `timings` property. -*/ -class TimeoutError extends RequestError { - constructor(error, timings, request) { - super(error.message, error, request); - this.name = 'TimeoutError'; - this.event = error.event; - this.timings = timings; - } -} -exports.TimeoutError = TimeoutError; -/** -An error to be thrown when reading from response stream fails. -*/ -class ReadError extends RequestError { - constructor(error, request) { - super(error.message, error, request); - this.name = 'ReadError'; - } -} -exports.ReadError = ReadError; -/** -An error to be thrown when given an unsupported protocol. -*/ -class UnsupportedProtocolError extends RequestError { - constructor(options) { - super(`Unsupported protocol "${options.url.protocol}"`, {}, options); - this.name = 'UnsupportedProtocolError'; - } -} -exports.UnsupportedProtocolError = UnsupportedProtocolError; -const proxiedRequestEvents = [ - 'socket', - 'connect', - 'continue', - 'information', - 'upgrade', - 'timeout' -]; -class Request extends stream_1.Duplex { - constructor(url, options = {}, defaults) { - super({ - // This must be false, to enable throwing after destroy - // It is used for retry logic in Promise API - autoDestroy: false, - // It needs to be zero because we're just proxying the data to another stream - highWaterMark: 0 - }); - this[kDownloadedSize] = 0; - this[kUploadedSize] = 0; - this.requestInitialized = false; - this[kServerResponsesPiped] = new Set(); - this.redirects = []; - this[kStopReading] = false; - this[kTriggerRead] = false; - this[kJobs] = []; - this.retryCount = 0; - // TODO: Remove this when targeting Node.js >= 12 - this._progressCallbacks = []; - const unlockWrite = () => this._unlockWrite(); - const lockWrite = () => this._lockWrite(); - this.on('pipe', (source) => { - source.prependListener('data', unlockWrite); - source.on('data', lockWrite); - source.prependListener('end', unlockWrite); - source.on('end', lockWrite); - }); - this.on('unpipe', (source) => { - source.off('data', unlockWrite); - source.off('data', lockWrite); - source.off('end', unlockWrite); - source.off('end', lockWrite); - }); - this.on('pipe', source => { - if (source instanceof http_1.IncomingMessage) { - this.options.headers = { - ...source.headers, - ...this.options.headers - }; - } - }); - const { json, body, form } = options; - if (json || body || form) { - this._lockWrite(); - } - if (exports.kIsNormalizedAlready in options) { - this.options = options; - } - else { - try { - // @ts-expect-error Common TypeScript bug saying that `this.constructor` is not accessible - this.options = this.constructor.normalizeArguments(url, options, defaults); - } - catch (error) { - // TODO: Move this to `_destroy()` - if (is_1.default.nodeStream(options.body)) { - options.body.destroy(); - } - this.destroy(error); - return; - } - } - (async () => { - var _a; - try { - if (this.options.body instanceof fs_1.ReadStream) { - await waitForOpenFile(this.options.body); - } - const { url: normalizedURL } = this.options; - if (!normalizedURL) { - throw new TypeError('Missing `url` property'); - } - this.requestUrl = normalizedURL.toString(); - decodeURI(this.requestUrl); - await this._finalizeBody(); - await this._makeRequest(); - if (this.destroyed) { - (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.destroy(); - return; - } - // Queued writes etc. - for (const job of this[kJobs]) { - job(); - } - // Prevent memory leak - this[kJobs].length = 0; - this.requestInitialized = true; - } - catch (error) { - if (error instanceof RequestError) { - this._beforeError(error); - return; - } - // This is a workaround for https://github.com/nodejs/node/issues/33335 - if (!this.destroyed) { - this.destroy(error); - } - } - })(); - } - static normalizeArguments(url, options, defaults) { - var _a, _b, _c, _d, _e; - const rawOptions = options; - if (is_1.default.object(url) && !is_1.default.urlInstance(url)) { - options = { ...defaults, ...url, ...options }; - } - else { - if (url && options && options.url !== undefined) { - throw new TypeError('The `url` option is mutually exclusive with the `input` argument'); - } - options = { ...defaults, ...options }; - if (url !== undefined) { - options.url = url; - } - if (is_1.default.urlInstance(options.url)) { - options.url = new url_1.URL(options.url.toString()); - } - } - // TODO: Deprecate URL options in Got 12. - // Support extend-specific options - if (options.cache === false) { - options.cache = undefined; - } - if (options.dnsCache === false) { - options.dnsCache = undefined; - } - // Nice type assertions - is_1.assert.any([is_1.default.string, is_1.default.undefined], options.method); - is_1.assert.any([is_1.default.object, is_1.default.undefined], options.headers); - is_1.assert.any([is_1.default.string, is_1.default.urlInstance, is_1.default.undefined], options.prefixUrl); - is_1.assert.any([is_1.default.object, is_1.default.undefined], options.cookieJar); - is_1.assert.any([is_1.default.object, is_1.default.string, is_1.default.undefined], options.searchParams); - is_1.assert.any([is_1.default.object, is_1.default.string, is_1.default.undefined], options.cache); - is_1.assert.any([is_1.default.object, is_1.default.number, is_1.default.undefined], options.timeout); - is_1.assert.any([is_1.default.object, is_1.default.undefined], options.context); - is_1.assert.any([is_1.default.object, is_1.default.undefined], options.hooks); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.decompress); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.ignoreInvalidCookies); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.followRedirect); - is_1.assert.any([is_1.default.number, is_1.default.undefined], options.maxRedirects); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.throwHttpErrors); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.http2); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.allowGetBody); - is_1.assert.any([is_1.default.string, is_1.default.undefined], options.localAddress); - is_1.assert.any([dns_ip_version_1.isDnsLookupIpVersion, is_1.default.undefined], options.dnsLookupIpVersion); - is_1.assert.any([is_1.default.object, is_1.default.undefined], options.https); - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.rejectUnauthorized); - if (options.https) { - is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.https.rejectUnauthorized); - is_1.assert.any([is_1.default.function_, is_1.default.undefined], options.https.checkServerIdentity); - is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.certificateAuthority); - is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.key); - is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.certificate); - is_1.assert.any([is_1.default.string, is_1.default.undefined], options.https.passphrase); - is_1.assert.any([is_1.default.string, is_1.default.buffer, is_1.default.array, is_1.default.undefined], options.https.pfx); - } - is_1.assert.any([is_1.default.object, is_1.default.undefined], options.cacheOptions); - // `options.method` - if (is_1.default.string(options.method)) { - options.method = options.method.toUpperCase(); - } - else { - options.method = 'GET'; - } - // `options.headers` - if (options.headers === (defaults === null || defaults === void 0 ? void 0 : defaults.headers)) { - options.headers = { ...options.headers }; - } - else { - options.headers = lowercaseKeys({ ...(defaults === null || defaults === void 0 ? void 0 : defaults.headers), ...options.headers }); - } - // Disallow legacy `url.Url` - if ('slashes' in options) { - throw new TypeError('The legacy `url.Url` has been deprecated. Use `URL` instead.'); - } - // `options.auth` - if ('auth' in options) { - throw new TypeError('Parameter `auth` is deprecated. Use `username` / `password` instead.'); - } - // `options.searchParams` - if ('searchParams' in options) { - if (options.searchParams && options.searchParams !== (defaults === null || defaults === void 0 ? void 0 : defaults.searchParams)) { - let searchParameters; - if (is_1.default.string(options.searchParams) || (options.searchParams instanceof url_1.URLSearchParams)) { - searchParameters = new url_1.URLSearchParams(options.searchParams); - } - else { - validateSearchParameters(options.searchParams); - searchParameters = new url_1.URLSearchParams(); - // eslint-disable-next-line guard-for-in - for (const key in options.searchParams) { - const value = options.searchParams[key]; - if (value === null) { - searchParameters.append(key, ''); - } - else if (value !== undefined) { - searchParameters.append(key, value); - } - } - } - // `normalizeArguments()` is also used to merge options - (_a = defaults === null || defaults === void 0 ? void 0 : defaults.searchParams) === null || _a === void 0 ? void 0 : _a.forEach((value, key) => { - // Only use default if one isn't already defined - if (!searchParameters.has(key)) { - searchParameters.append(key, value); - } - }); - options.searchParams = searchParameters; - } - } - // `options.username` & `options.password` - options.username = (_b = options.username) !== null && _b !== void 0 ? _b : ''; - options.password = (_c = options.password) !== null && _c !== void 0 ? _c : ''; - // `options.prefixUrl` & `options.url` - if (is_1.default.undefined(options.prefixUrl)) { - options.prefixUrl = (_d = defaults === null || defaults === void 0 ? void 0 : defaults.prefixUrl) !== null && _d !== void 0 ? _d : ''; - } - else { - options.prefixUrl = options.prefixUrl.toString(); - if (options.prefixUrl !== '' && !options.prefixUrl.endsWith('/')) { - options.prefixUrl += '/'; - } - } - if (is_1.default.string(options.url)) { - if (options.url.startsWith('/')) { - throw new Error('`input` must not start with a slash when using `prefixUrl`'); - } - options.url = options_to_url_1.default(options.prefixUrl + options.url, options); - } - else if ((is_1.default.undefined(options.url) && options.prefixUrl !== '') || options.protocol) { - options.url = options_to_url_1.default(options.prefixUrl, options); - } - if (options.url) { - if ('port' in options) { - delete options.port; - } - // Make it possible to change `options.prefixUrl` - let { prefixUrl } = options; - Object.defineProperty(options, 'prefixUrl', { - set: (value) => { - const url = options.url; - if (!url.href.startsWith(value)) { - throw new Error(`Cannot change \`prefixUrl\` from ${prefixUrl} to ${value}: ${url.href}`); - } - options.url = new url_1.URL(value + url.href.slice(prefixUrl.length)); - prefixUrl = value; - }, - get: () => prefixUrl - }); - // Support UNIX sockets - let { protocol } = options.url; - if (protocol === 'unix:') { - protocol = 'http:'; - options.url = new url_1.URL(`http://unix${options.url.pathname}${options.url.search}`); - } - // Set search params - if (options.searchParams) { - // eslint-disable-next-line @typescript-eslint/no-base-to-string - options.url.search = options.searchParams.toString(); - } - // Protocol check - if (protocol !== 'http:' && protocol !== 'https:') { - throw new UnsupportedProtocolError(options); - } - // Update `username` - if (options.username === '') { - options.username = options.url.username; - } - else { - options.url.username = options.username; - } - // Update `password` - if (options.password === '') { - options.password = options.url.password; - } - else { - options.url.password = options.password; - } - } - // `options.cookieJar` - const { cookieJar } = options; - if (cookieJar) { - let { setCookie, getCookieString } = cookieJar; - is_1.assert.function_(setCookie); - is_1.assert.function_(getCookieString); - /* istanbul ignore next: Horrible `tough-cookie` v3 check */ - if (setCookie.length === 4 && getCookieString.length === 0) { - setCookie = util_1.promisify(setCookie.bind(options.cookieJar)); - getCookieString = util_1.promisify(getCookieString.bind(options.cookieJar)); - options.cookieJar = { - setCookie, - getCookieString: getCookieString - }; - } - } - // `options.cache` - const { cache } = options; - if (cache) { - if (!cacheableStore.has(cache)) { - cacheableStore.set(cache, new CacheableRequest(((requestOptions, handler) => { - const result = requestOptions[kRequest](requestOptions, handler); - // TODO: remove this when `cacheable-request` supports async request functions. - if (is_1.default.promise(result)) { - // @ts-expect-error - // We only need to implement the error handler in order to support HTTP2 caching. - // The result will be a promise anyway. - result.once = (event, handler) => { - if (event === 'error') { - result.catch(handler); - } - else if (event === 'abort') { - // The empty catch is needed here in case when - // it rejects before it's `await`ed in `_makeRequest`. - (async () => { - try { - const request = (await result); - request.once('abort', handler); - } - catch (_a) { } - })(); - } - else { - /* istanbul ignore next: safety check */ - throw new Error(`Unknown HTTP2 promise event: ${event}`); - } - return result; - }; - } - return result; - }), cache)); - } - } - // `options.cacheOptions` - options.cacheOptions = { ...options.cacheOptions }; - // `options.dnsCache` - if (options.dnsCache === true) { - options.dnsCache = globalDnsCache; - } - else if (!is_1.default.undefined(options.dnsCache) && !options.dnsCache.lookup) { - throw new TypeError(`Parameter \`dnsCache\` must be a CacheableLookup instance or a boolean, got ${is_1.default(options.dnsCache)}`); - } - // `options.timeout` - if (is_1.default.number(options.timeout)) { - options.timeout = { request: options.timeout }; - } - else if (defaults && options.timeout !== defaults.timeout) { - options.timeout = { - ...defaults.timeout, - ...options.timeout - }; - } - else { - options.timeout = { ...options.timeout }; - } - // `options.context` - if (!options.context) { - options.context = {}; - } - // `options.hooks` - const areHooksDefault = options.hooks === (defaults === null || defaults === void 0 ? void 0 : defaults.hooks); - options.hooks = { ...options.hooks }; - for (const event of exports.knownHookEvents) { - if (event in options.hooks) { - if (is_1.default.array(options.hooks[event])) { - // See https://github.com/microsoft/TypeScript/issues/31445#issuecomment-576929044 - options.hooks[event] = [...options.hooks[event]]; - } - else { - throw new TypeError(`Parameter \`${event}\` must be an Array, got ${is_1.default(options.hooks[event])}`); - } - } - else { - options.hooks[event] = []; - } - } - if (defaults && !areHooksDefault) { - for (const event of exports.knownHookEvents) { - const defaultHooks = defaults.hooks[event]; - if (defaultHooks.length > 0) { - // See https://github.com/microsoft/TypeScript/issues/31445#issuecomment-576929044 - options.hooks[event] = [ - ...defaults.hooks[event], - ...options.hooks[event] - ]; - } - } - } - // DNS options - if ('family' in options) { - deprecation_warning_1.default('"options.family" was never documented, please use "options.dnsLookupIpVersion"'); - } - // HTTPS options - if (defaults === null || defaults === void 0 ? void 0 : defaults.https) { - options.https = { ...defaults.https, ...options.https }; - } - if ('rejectUnauthorized' in options) { - deprecation_warning_1.default('"options.rejectUnauthorized" is now deprecated, please use "options.https.rejectUnauthorized"'); - } - if ('checkServerIdentity' in options) { - deprecation_warning_1.default('"options.checkServerIdentity" was never documented, please use "options.https.checkServerIdentity"'); - } - if ('ca' in options) { - deprecation_warning_1.default('"options.ca" was never documented, please use "options.https.certificateAuthority"'); - } - if ('key' in options) { - deprecation_warning_1.default('"options.key" was never documented, please use "options.https.key"'); - } - if ('cert' in options) { - deprecation_warning_1.default('"options.cert" was never documented, please use "options.https.certificate"'); - } - if ('passphrase' in options) { - deprecation_warning_1.default('"options.passphrase" was never documented, please use "options.https.passphrase"'); - } - if ('pfx' in options) { - deprecation_warning_1.default('"options.pfx" was never documented, please use "options.https.pfx"'); - } - // Other options - if ('followRedirects' in options) { - throw new TypeError('The `followRedirects` option does not exist. Use `followRedirect` instead.'); - } - if (options.agent) { - for (const key in options.agent) { - if (key !== 'http' && key !== 'https' && key !== 'http2') { - throw new TypeError(`Expected the \`options.agent\` properties to be \`http\`, \`https\` or \`http2\`, got \`${key}\``); - } - } - } - options.maxRedirects = (_e = options.maxRedirects) !== null && _e !== void 0 ? _e : 0; - // Set non-enumerable properties - exports.setNonEnumerableProperties([defaults, rawOptions], options); - return normalize_arguments_1.default(options, defaults); - } - _lockWrite() { - const onLockedWrite = () => { - throw new TypeError('The payload has been already provided'); - }; - this.write = onLockedWrite; - this.end = onLockedWrite; - } - _unlockWrite() { - this.write = super.write; - this.end = super.end; - } - async _finalizeBody() { - const { options } = this; - const { headers } = options; - const isForm = !is_1.default.undefined(options.form); - const isJSON = !is_1.default.undefined(options.json); - const isBody = !is_1.default.undefined(options.body); - const hasPayload = isForm || isJSON || isBody; - const cannotHaveBody = exports.withoutBody.has(options.method) && !(options.method === 'GET' && options.allowGetBody); - this._cannotHaveBody = cannotHaveBody; - if (hasPayload) { - if (cannotHaveBody) { - throw new TypeError(`The \`${options.method}\` method cannot be used with a body`); - } - if ([isBody, isForm, isJSON].filter(isTrue => isTrue).length > 1) { - throw new TypeError('The `body`, `json` and `form` options are mutually exclusive'); - } - if (isBody && - !(options.body instanceof stream_1.Readable) && - !is_1.default.string(options.body) && - !is_1.default.buffer(options.body) && - !is_form_data_1.default(options.body)) { - throw new TypeError('The `body` option must be a stream.Readable, string or Buffer'); - } - if (isForm && !is_1.default.object(options.form)) { - throw new TypeError('The `form` option must be an Object'); - } - { - // Serialize body - const noContentType = !is_1.default.string(headers['content-type']); - if (isBody) { - // Special case for https://github.com/form-data/form-data - if (is_form_data_1.default(options.body) && noContentType) { - headers['content-type'] = `multipart/form-data; boundary=${options.body.getBoundary()}`; - } - this[kBody] = options.body; - } - else if (isForm) { - if (noContentType) { - headers['content-type'] = 'application/x-www-form-urlencoded'; - } - this[kBody] = (new url_1.URLSearchParams(options.form)).toString(); - } - else { - if (noContentType) { - headers['content-type'] = 'application/json'; - } - this[kBody] = options.stringifyJson(options.json); - } - const uploadBodySize = await get_body_size_1.default(this[kBody], options.headers); - // See https://tools.ietf.org/html/rfc7230#section-3.3.2 - // A user agent SHOULD send a Content-Length in a request message when - // no Transfer-Encoding is sent and the request method defines a meaning - // for an enclosed payload body. For example, a Content-Length header - // field is normally sent in a POST request even when the value is 0 - // (indicating an empty payload body). A user agent SHOULD NOT send a - // Content-Length header field when the request message does not contain - // a payload body and the method semantics do not anticipate such a - // body. - if (is_1.default.undefined(headers['content-length']) && is_1.default.undefined(headers['transfer-encoding'])) { - if (!cannotHaveBody && !is_1.default.undefined(uploadBodySize)) { - headers['content-length'] = String(uploadBodySize); - } - } - } - } - else if (cannotHaveBody) { - this._lockWrite(); - } - else { - this._unlockWrite(); - } - this[kBodySize] = Number(headers['content-length']) || undefined; - } - async _onResponseBase(response) { - const { options } = this; - const { url } = options; - this[kOriginalResponse] = response; - if (options.decompress) { - response = decompressResponse(response); - } - const statusCode = response.statusCode; - const typedResponse = response; - typedResponse.statusMessage = typedResponse.statusMessage ? typedResponse.statusMessage : http.STATUS_CODES[statusCode]; - typedResponse.url = options.url.toString(); - typedResponse.requestUrl = this.requestUrl; - typedResponse.redirectUrls = this.redirects; - typedResponse.request = this; - typedResponse.isFromCache = response.fromCache || false; - typedResponse.ip = this.ip; - typedResponse.retryCount = this.retryCount; - this[kIsFromCache] = typedResponse.isFromCache; - this[kResponseSize] = Number(response.headers['content-length']) || undefined; - this[kResponse] = response; - response.once('end', () => { - this[kResponseSize] = this[kDownloadedSize]; - this.emit('downloadProgress', this.downloadProgress); - }); - response.once('error', (error) => { - // Force clean-up, because some packages don't do this. - // TODO: Fix decompress-response - response.destroy(); - this._beforeError(new ReadError(error, this)); - }); - response.once('aborted', () => { - this._beforeError(new ReadError({ - name: 'Error', - message: 'The server aborted pending request', - code: 'ECONNRESET' - }, this)); - }); - this.emit('downloadProgress', this.downloadProgress); - const rawCookies = response.headers['set-cookie']; - if (is_1.default.object(options.cookieJar) && rawCookies) { - let promises = rawCookies.map(async (rawCookie) => options.cookieJar.setCookie(rawCookie, url.toString())); - if (options.ignoreInvalidCookies) { - promises = promises.map(async (p) => p.catch(() => { })); - } - try { - await Promise.all(promises); - } - catch (error) { - this._beforeError(error); - return; - } - } - if (options.followRedirect && response.headers.location && redirectCodes.has(statusCode)) { - // We're being redirected, we don't care about the response. - // It'd be best to abort the request, but we can't because - // we would have to sacrifice the TCP connection. We don't want that. - response.resume(); - if (this[kRequest]) { - this[kCancelTimeouts](); - // eslint-disable-next-line @typescript-eslint/no-dynamic-delete - delete this[kRequest]; - this[kUnproxyEvents](); - } - const shouldBeGet = statusCode === 303 && options.method !== 'GET' && options.method !== 'HEAD'; - if (shouldBeGet || !options.methodRewriting) { - // Server responded with "see other", indicating that the resource exists at another location, - // and the client should request it from that location via GET or HEAD. - options.method = 'GET'; - if ('body' in options) { - delete options.body; - } - if ('json' in options) { - delete options.json; - } - if ('form' in options) { - delete options.form; - } - this[kBody] = undefined; - delete options.headers['content-length']; - } - if (this.redirects.length >= options.maxRedirects) { - this._beforeError(new MaxRedirectsError(this)); - return; - } - try { - // Do not remove. See https://github.com/sindresorhus/got/pull/214 - const redirectBuffer = Buffer.from(response.headers.location, 'binary').toString(); - // Handles invalid URLs. See https://github.com/sindresorhus/got/issues/604 - const redirectUrl = new url_1.URL(redirectBuffer, url); - const redirectString = redirectUrl.toString(); - decodeURI(redirectString); - // Redirecting to a different site, clear sensitive data. - if (redirectUrl.hostname !== url.hostname || redirectUrl.port !== url.port) { - if ('host' in options.headers) { - delete options.headers.host; - } - if ('cookie' in options.headers) { - delete options.headers.cookie; - } - if ('authorization' in options.headers) { - delete options.headers.authorization; - } - if (options.username || options.password) { - options.username = ''; - options.password = ''; - } - } - else { - redirectUrl.username = options.username; - redirectUrl.password = options.password; - } - this.redirects.push(redirectString); - options.url = redirectUrl; - for (const hook of options.hooks.beforeRedirect) { - // eslint-disable-next-line no-await-in-loop - await hook(options, typedResponse); - } - this.emit('redirect', typedResponse, options); - await this._makeRequest(); - } - catch (error) { - this._beforeError(error); - return; - } - return; - } - if (options.isStream && options.throwHttpErrors && !is_response_ok_1.isResponseOk(typedResponse)) { - this._beforeError(new HTTPError(typedResponse)); - return; - } - response.on('readable', () => { - if (this[kTriggerRead]) { - this._read(); - } - }); - this.on('resume', () => { - response.resume(); - }); - this.on('pause', () => { - response.pause(); - }); - response.once('end', () => { - this.push(null); - }); - this.emit('response', response); - for (const destination of this[kServerResponsesPiped]) { - if (destination.headersSent) { - continue; - } - // eslint-disable-next-line guard-for-in - for (const key in response.headers) { - const isAllowed = options.decompress ? key !== 'content-encoding' : true; - const value = response.headers[key]; - if (isAllowed) { - destination.setHeader(key, value); - } - } - destination.statusCode = statusCode; - } - } - async _onResponse(response) { - try { - await this._onResponseBase(response); - } - catch (error) { - /* istanbul ignore next: better safe than sorry */ - this._beforeError(error); - } - } - _onRequest(request) { - const { options } = this; - const { timeout, url } = options; - http_timer_1.default(request); - this[kCancelTimeouts] = timed_out_1.default(request, timeout, url); - const responseEventName = options.cache ? 'cacheableResponse' : 'response'; - request.once(responseEventName, (response) => { - void this._onResponse(response); - }); - request.once('error', (error) => { - var _a; - // Force clean-up, because some packages (e.g. nock) don't do this. - request.destroy(); - // Node.js <= 12.18.2 mistakenly emits the response `end` first. - (_a = request.res) === null || _a === void 0 ? void 0 : _a.removeAllListeners('end'); - error = error instanceof timed_out_1.TimeoutError ? new TimeoutError(error, this.timings, this) : new RequestError(error.message, error, this); - this._beforeError(error); - }); - this[kUnproxyEvents] = proxy_events_1.default(request, this, proxiedRequestEvents); - this[kRequest] = request; - this.emit('uploadProgress', this.uploadProgress); - // Send body - const body = this[kBody]; - const currentRequest = this.redirects.length === 0 ? this : request; - if (is_1.default.nodeStream(body)) { - body.pipe(currentRequest); - body.once('error', (error) => { - this._beforeError(new UploadError(error, this)); - }); - } - else { - this._unlockWrite(); - if (!is_1.default.undefined(body)) { - this._writeRequest(body, undefined, () => { }); - currentRequest.end(); - this._lockWrite(); - } - else if (this._cannotHaveBody || this._noPipe) { - currentRequest.end(); - this._lockWrite(); - } - } - this.emit('request', request); - } - async _createCacheableRequest(url, options) { - return new Promise((resolve, reject) => { - // TODO: Remove `utils/url-to-options.ts` when `cacheable-request` is fixed - Object.assign(options, url_to_options_1.default(url)); - // `http-cache-semantics` checks this - // TODO: Fix this ignore. - // @ts-expect-error - delete options.url; - let request; - // This is ugly - const cacheRequest = cacheableStore.get(options.cache)(options, async (response) => { - // TODO: Fix `cacheable-response` - response._readableState.autoDestroy = false; - if (request) { - (await request).emit('cacheableResponse', response); - } - resolve(response); - }); - // Restore options - options.url = url; - cacheRequest.once('error', reject); - cacheRequest.once('request', async (requestOrPromise) => { - request = requestOrPromise; - resolve(request); - }); - }); - } - async _makeRequest() { - var _a, _b, _c, _d, _e; - const { options } = this; - const { headers } = options; - for (const key in headers) { - if (is_1.default.undefined(headers[key])) { - // eslint-disable-next-line @typescript-eslint/no-dynamic-delete - delete headers[key]; - } - else if (is_1.default.null_(headers[key])) { - throw new TypeError(`Use \`undefined\` instead of \`null\` to delete the \`${key}\` header`); - } - } - if (options.decompress && is_1.default.undefined(headers['accept-encoding'])) { - headers['accept-encoding'] = supportsBrotli ? 'gzip, deflate, br' : 'gzip, deflate'; - } - // Set cookies - if (options.cookieJar) { - const cookieString = await options.cookieJar.getCookieString(options.url.toString()); - if (is_1.default.nonEmptyString(cookieString)) { - options.headers.cookie = cookieString; - } - } - for (const hook of options.hooks.beforeRequest) { - // eslint-disable-next-line no-await-in-loop - const result = await hook(options); - if (!is_1.default.undefined(result)) { - // @ts-expect-error Skip the type mismatch to support abstract responses - options.request = () => result; - break; - } - } - if (options.body && this[kBody] !== options.body) { - this[kBody] = options.body; - } - const { agent, request, timeout, url } = options; - if (options.dnsCache && !('lookup' in options)) { - options.lookup = options.dnsCache.lookup; - } - // UNIX sockets - if (url.hostname === 'unix') { - const matches = /(?.+?):(?.+)/.exec(`${url.pathname}${url.search}`); - if (matches === null || matches === void 0 ? void 0 : matches.groups) { - const { socketPath, path } = matches.groups; - Object.assign(options, { - socketPath, - path, - host: '' - }); - } - } - const isHttps = url.protocol === 'https:'; - // Fallback function - let fallbackFn; - if (options.http2) { - fallbackFn = http2wrapper.auto; - } - else { - fallbackFn = isHttps ? https.request : http.request; - } - const realFn = (_a = options.request) !== null && _a !== void 0 ? _a : fallbackFn; - // Cache support - const fn = options.cache ? this._createCacheableRequest : realFn; - // Pass an agent directly when HTTP2 is disabled - if (agent && !options.http2) { - options.agent = agent[isHttps ? 'https' : 'http']; - } - // Prepare plain HTTP request options - options[kRequest] = realFn; - delete options.request; - // TODO: Fix this ignore. - // @ts-expect-error - delete options.timeout; - const requestOptions = options; - requestOptions.shared = (_b = options.cacheOptions) === null || _b === void 0 ? void 0 : _b.shared; - requestOptions.cacheHeuristic = (_c = options.cacheOptions) === null || _c === void 0 ? void 0 : _c.cacheHeuristic; - requestOptions.immutableMinTimeToLive = (_d = options.cacheOptions) === null || _d === void 0 ? void 0 : _d.immutableMinTimeToLive; - requestOptions.ignoreCargoCult = (_e = options.cacheOptions) === null || _e === void 0 ? void 0 : _e.ignoreCargoCult; - // If `dnsLookupIpVersion` is not present do not override `family` - if (options.dnsLookupIpVersion !== undefined) { - try { - requestOptions.family = dns_ip_version_1.dnsLookupIpVersionToFamily(options.dnsLookupIpVersion); - } - catch (_f) { - throw new Error('Invalid `dnsLookupIpVersion` option value'); - } - } - // HTTPS options remapping - if (options.https) { - if ('rejectUnauthorized' in options.https) { - requestOptions.rejectUnauthorized = options.https.rejectUnauthorized; - } - if (options.https.checkServerIdentity) { - requestOptions.checkServerIdentity = options.https.checkServerIdentity; - } - if (options.https.certificateAuthority) { - requestOptions.ca = options.https.certificateAuthority; - } - if (options.https.certificate) { - requestOptions.cert = options.https.certificate; - } - if (options.https.key) { - requestOptions.key = options.https.key; - } - if (options.https.passphrase) { - requestOptions.passphrase = options.https.passphrase; - } - if (options.https.pfx) { - requestOptions.pfx = options.https.pfx; - } - } - try { - let requestOrResponse = await fn(url, requestOptions); - if (is_1.default.undefined(requestOrResponse)) { - requestOrResponse = fallbackFn(url, requestOptions); - } - // Restore options - options.request = request; - options.timeout = timeout; - options.agent = agent; - // HTTPS options restore - if (options.https) { - if ('rejectUnauthorized' in options.https) { - delete requestOptions.rejectUnauthorized; - } - if (options.https.checkServerIdentity) { - // @ts-expect-error - This one will be removed when we remove the alias. - delete requestOptions.checkServerIdentity; - } - if (options.https.certificateAuthority) { - delete requestOptions.ca; - } - if (options.https.certificate) { - delete requestOptions.cert; - } - if (options.https.key) { - delete requestOptions.key; - } - if (options.https.passphrase) { - delete requestOptions.passphrase; - } - if (options.https.pfx) { - delete requestOptions.pfx; - } - } - if (isClientRequest(requestOrResponse)) { - this._onRequest(requestOrResponse); - // Emit the response after the stream has been ended - } - else if (this.writable) { - this.once('finish', () => { - void this._onResponse(requestOrResponse); - }); - this._unlockWrite(); - this.end(); - this._lockWrite(); - } - else { - void this._onResponse(requestOrResponse); - } - } - catch (error) { - if (error instanceof CacheableRequest.CacheError) { - throw new CacheError(error, this); - } - throw new RequestError(error.message, error, this); - } - } - async _error(error) { - try { - for (const hook of this.options.hooks.beforeError) { - // eslint-disable-next-line no-await-in-loop - error = await hook(error); - } - } - catch (error_) { - error = new RequestError(error_.message, error_, this); - } - this.destroy(error); - } - _beforeError(error) { - if (this[kStopReading]) { - return; - } - const { options } = this; - const retryCount = this.retryCount + 1; - this[kStopReading] = true; - if (!(error instanceof RequestError)) { - error = new RequestError(error.message, error, this); - } - const typedError = error; - const { response } = typedError; - void (async () => { - if (response && !response.body) { - response.setEncoding(this._readableState.encoding); - try { - response.rawBody = await get_buffer_1.default(response); - response.body = response.rawBody.toString(); - } - catch (_a) { } - } - if (this.listenerCount('retry') !== 0) { - let backoff; - try { - let retryAfter; - if (response && 'retry-after' in response.headers) { - retryAfter = Number(response.headers['retry-after']); - if (Number.isNaN(retryAfter)) { - retryAfter = Date.parse(response.headers['retry-after']) - Date.now(); - if (retryAfter <= 0) { - retryAfter = 1; - } - } - else { - retryAfter *= 1000; - } - } - backoff = await options.retry.calculateDelay({ - attemptCount: retryCount, - retryOptions: options.retry, - error: typedError, - retryAfter, - computedValue: calculate_retry_delay_1.default({ - attemptCount: retryCount, - retryOptions: options.retry, - error: typedError, - retryAfter, - computedValue: 0 - }) - }); - } - catch (error_) { - void this._error(new RequestError(error_.message, error_, this)); - return; - } - if (backoff) { - const retry = async () => { - try { - for (const hook of this.options.hooks.beforeRetry) { - // eslint-disable-next-line no-await-in-loop - await hook(this.options, typedError, retryCount); - } - } - catch (error_) { - void this._error(new RequestError(error_.message, error, this)); - return; - } - // Something forced us to abort the retry - if (this.destroyed) { - return; - } - this.destroy(); - this.emit('retry', retryCount, error); - }; - this[kRetryTimeout] = setTimeout(retry, backoff); - return; - } - } - void this._error(typedError); - })(); - } - _read() { - this[kTriggerRead] = true; - const response = this[kResponse]; - if (response && !this[kStopReading]) { - // We cannot put this in the `if` above - // because `.read()` also triggers the `end` event - if (response.readableLength) { - this[kTriggerRead] = false; - } - let data; - while ((data = response.read()) !== null) { - this[kDownloadedSize] += data.length; - this[kStartedReading] = true; - const progress = this.downloadProgress; - if (progress.percent < 1) { - this.emit('downloadProgress', progress); - } - this.push(data); - } - } - } - // Node.js 12 has incorrect types, so the encoding must be a string - _write(chunk, encoding, callback) { - const write = () => { - this._writeRequest(chunk, encoding, callback); - }; - if (this.requestInitialized) { - write(); - } - else { - this[kJobs].push(write); - } - } - _writeRequest(chunk, encoding, callback) { - if (this[kRequest].destroyed) { - // Probably the `ClientRequest` instance will throw - return; - } - this._progressCallbacks.push(() => { - this[kUploadedSize] += Buffer.byteLength(chunk, encoding); - const progress = this.uploadProgress; - if (progress.percent < 1) { - this.emit('uploadProgress', progress); - } - }); - // TODO: What happens if it's from cache? Then this[kRequest] won't be defined. - this[kRequest].write(chunk, encoding, (error) => { - if (!error && this._progressCallbacks.length > 0) { - this._progressCallbacks.shift()(); - } - callback(error); - }); - } - _final(callback) { - const endRequest = () => { - // FIX: Node.js 10 calls the write callback AFTER the end callback! - while (this._progressCallbacks.length !== 0) { - this._progressCallbacks.shift()(); - } - // We need to check if `this[kRequest]` is present, - // because it isn't when we use cache. - if (!(kRequest in this)) { - callback(); - return; - } - if (this[kRequest].destroyed) { - callback(); - return; - } - this[kRequest].end((error) => { - if (!error) { - this[kBodySize] = this[kUploadedSize]; - this.emit('uploadProgress', this.uploadProgress); - this[kRequest].emit('upload-complete'); - } - callback(error); - }); - }; - if (this.requestInitialized) { - endRequest(); - } - else { - this[kJobs].push(endRequest); - } - } - _destroy(error, callback) { - var _a; - this[kStopReading] = true; - // Prevent further retries - clearTimeout(this[kRetryTimeout]); - if (kRequest in this) { - this[kCancelTimeouts](); - // TODO: Remove the next `if` when these get fixed: - // - https://github.com/nodejs/node/issues/32851 - if (!((_a = this[kResponse]) === null || _a === void 0 ? void 0 : _a.complete)) { - this[kRequest].destroy(); - } - } - if (error !== null && !is_1.default.undefined(error) && !(error instanceof RequestError)) { - error = new RequestError(error.message, error, this); - } - callback(error); - } - get _isAboutToError() { - return this[kStopReading]; - } - /** - The remote IP address. - */ - get ip() { - var _a; - return (_a = this.socket) === null || _a === void 0 ? void 0 : _a.remoteAddress; - } - /** - Indicates whether the request has been aborted or not. - */ - get aborted() { - var _a, _b, _c; - return ((_b = (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.destroyed) !== null && _b !== void 0 ? _b : this.destroyed) && !((_c = this[kOriginalResponse]) === null || _c === void 0 ? void 0 : _c.complete); - } - get socket() { - var _a, _b; - return (_b = (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.socket) !== null && _b !== void 0 ? _b : undefined; - } - /** - Progress event for downloading (receiving a response). - */ - get downloadProgress() { - let percent; - if (this[kResponseSize]) { - percent = this[kDownloadedSize] / this[kResponseSize]; - } - else if (this[kResponseSize] === this[kDownloadedSize]) { - percent = 1; - } - else { - percent = 0; - } - return { - percent, - transferred: this[kDownloadedSize], - total: this[kResponseSize] - }; - } - /** - Progress event for uploading (sending a request). - */ - get uploadProgress() { - let percent; - if (this[kBodySize]) { - percent = this[kUploadedSize] / this[kBodySize]; - } - else if (this[kBodySize] === this[kUploadedSize]) { - percent = 1; - } - else { - percent = 0; - } - return { - percent, - transferred: this[kUploadedSize], - total: this[kBodySize] - }; - } - /** - The object contains the following properties: - - - `start` - Time when the request started. - - `socket` - Time when a socket was assigned to the request. - - `lookup` - Time when the DNS lookup finished. - - `connect` - Time when the socket successfully connected. - - `secureConnect` - Time when the socket securely connected. - - `upload` - Time when the request finished uploading. - - `response` - Time when the request fired `response` event. - - `end` - Time when the response fired `end` event. - - `error` - Time when the request fired `error` event. - - `abort` - Time when the request fired `abort` event. - - `phases` - - `wait` - `timings.socket - timings.start` - - `dns` - `timings.lookup - timings.socket` - - `tcp` - `timings.connect - timings.lookup` - - `tls` - `timings.secureConnect - timings.connect` - - `request` - `timings.upload - (timings.secureConnect || timings.connect)` - - `firstByte` - `timings.response - timings.upload` - - `download` - `timings.end - timings.response` - - `total` - `(timings.end || timings.error || timings.abort) - timings.start` - - If something has not been measured yet, it will be `undefined`. - - __Note__: The time is a `number` representing the milliseconds elapsed since the UNIX epoch. - */ - get timings() { - var _a; - return (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.timings; - } - /** - Whether the response was retrieved from the cache. - */ - get isFromCache() { - return this[kIsFromCache]; - } - pipe(destination, options) { - if (this[kStartedReading]) { - throw new Error('Failed to pipe. The response has been emitted already.'); - } - if (destination instanceof http_1.ServerResponse) { - this[kServerResponsesPiped].add(destination); - } - return super.pipe(destination, options); - } - unpipe(destination) { - if (destination instanceof http_1.ServerResponse) { - this[kServerResponsesPiped].delete(destination); - } - super.unpipe(destination); - return this; - } -} -exports.default = Request; - - -/***/ }), - -/***/ 4993: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.dnsLookupIpVersionToFamily = exports.isDnsLookupIpVersion = void 0; -const conversionTable = { - auto: 0, - ipv4: 4, - ipv6: 6 -}; -exports.isDnsLookupIpVersion = (value) => { - return value in conversionTable; -}; -exports.dnsLookupIpVersionToFamily = (dnsLookupIpVersion) => { - if (exports.isDnsLookupIpVersion(dnsLookupIpVersion)) { - return conversionTable[dnsLookupIpVersion]; - } - throw new Error('Invalid DNS lookup IP version'); -}; - - -/***/ }), - -/***/ 4564: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const fs_1 = __nccwpck_require__(5747); -const util_1 = __nccwpck_require__(1669); -const is_1 = __nccwpck_require__(7678); -const is_form_data_1 = __nccwpck_require__(40); -const statAsync = util_1.promisify(fs_1.stat); -exports.default = async (body, headers) => { - if (headers && 'content-length' in headers) { - return Number(headers['content-length']); - } - if (!body) { - return 0; - } - if (is_1.default.string(body)) { - return Buffer.byteLength(body); - } - if (is_1.default.buffer(body)) { - return body.length; - } - if (is_form_data_1.default(body)) { - return util_1.promisify(body.getLength.bind(body))(); - } - if (body instanceof fs_1.ReadStream) { - const { size } = await statAsync(body.path); - if (size === 0) { - return undefined; - } - return size; - } - return undefined; -}; - - -/***/ }), - -/***/ 4500: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -// TODO: Update https://github.com/sindresorhus/get-stream -const getBuffer = async (stream) => { - const chunks = []; - let length = 0; - for await (const chunk of stream) { - chunks.push(chunk); - length += Buffer.byteLength(chunk); - } - if (Buffer.isBuffer(chunks[0])) { - return Buffer.concat(chunks, length); - } - return Buffer.from(chunks.join('')); -}; -exports.default = getBuffer; - - -/***/ }), - -/***/ 40: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const is_1 = __nccwpck_require__(7678); -exports.default = (body) => is_1.default.nodeStream(body) && is_1.default.function_(body.getBoundary); - - -/***/ }), - -/***/ 9298: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isResponseOk = void 0; -exports.isResponseOk = (response) => { - const { statusCode } = response; - const limitStatusCode = response.request.options.followRedirect ? 299 : 399; - return (statusCode >= 200 && statusCode <= limitStatusCode) || statusCode === 304; -}; - - -/***/ }), - -/***/ 9219: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -/* istanbul ignore file: deprecated */ -const url_1 = __nccwpck_require__(8835); -const keys = [ - 'protocol', - 'host', - 'hostname', - 'port', - 'pathname', - 'search' -]; -exports.default = (origin, options) => { - var _a, _b; - if (options.path) { - if (options.pathname) { - throw new TypeError('Parameters `path` and `pathname` are mutually exclusive.'); - } - if (options.search) { - throw new TypeError('Parameters `path` and `search` are mutually exclusive.'); - } - if (options.searchParams) { - throw new TypeError('Parameters `path` and `searchParams` are mutually exclusive.'); - } - } - if (options.search && options.searchParams) { - throw new TypeError('Parameters `search` and `searchParams` are mutually exclusive.'); - } - if (!origin) { - if (!options.protocol) { - throw new TypeError('No URL protocol specified'); - } - origin = `${options.protocol}//${(_b = (_a = options.hostname) !== null && _a !== void 0 ? _a : options.host) !== null && _b !== void 0 ? _b : ''}`; - } - const url = new url_1.URL(origin); - if (options.path) { - const searchIndex = options.path.indexOf('?'); - if (searchIndex === -1) { - options.pathname = options.path; - } - else { - options.pathname = options.path.slice(0, searchIndex); - options.search = options.path.slice(searchIndex + 1); - } - delete options.path; - } - for (const key of keys) { - if (options[key]) { - url[key] = options[key].toString(); - } - } - return url; -}; - - -/***/ }), - -/***/ 3021: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -function default_1(from, to, events) { - const fns = {}; - for (const event of events) { - fns[event] = (...args) => { - to.emit(event, ...args); - }; - from.on(event, fns[event]); - } - return () => { - for (const event of events) { - from.off(event, fns[event]); - } - }; -} -exports.default = default_1; - - -/***/ }), - -/***/ 2454: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.TimeoutError = void 0; -const net = __nccwpck_require__(1631); -const unhandle_1 = __nccwpck_require__(1593); -const reentry = Symbol('reentry'); -const noop = () => { }; -class TimeoutError extends Error { - constructor(threshold, event) { - super(`Timeout awaiting '${event}' for ${threshold}ms`); - this.event = event; - this.name = 'TimeoutError'; - this.code = 'ETIMEDOUT'; - } -} -exports.TimeoutError = TimeoutError; -exports.default = (request, delays, options) => { - if (reentry in request) { - return noop; - } - request[reentry] = true; - const cancelers = []; - const { once, unhandleAll } = unhandle_1.default(); - const addTimeout = (delay, callback, event) => { - var _a; - const timeout = setTimeout(callback, delay, delay, event); - (_a = timeout.unref) === null || _a === void 0 ? void 0 : _a.call(timeout); - const cancel = () => { - clearTimeout(timeout); - }; - cancelers.push(cancel); - return cancel; - }; - const { host, hostname } = options; - const timeoutHandler = (delay, event) => { - request.destroy(new TimeoutError(delay, event)); - }; - const cancelTimeouts = () => { - for (const cancel of cancelers) { - cancel(); - } - unhandleAll(); - }; - request.once('error', error => { - cancelTimeouts(); - // Save original behavior - /* istanbul ignore next */ - if (request.listenerCount('error') === 0) { - throw error; - } - }); - request.once('close', cancelTimeouts); - once(request, 'response', (response) => { - once(response, 'end', cancelTimeouts); - }); - if (typeof delays.request !== 'undefined') { - addTimeout(delays.request, timeoutHandler, 'request'); - } - if (typeof delays.socket !== 'undefined') { - const socketTimeoutHandler = () => { - timeoutHandler(delays.socket, 'socket'); - }; - request.setTimeout(delays.socket, socketTimeoutHandler); - // `request.setTimeout(0)` causes a memory leak. - // We can just remove the listener and forget about the timer - it's unreffed. - // See https://github.com/sindresorhus/got/issues/690 - cancelers.push(() => { - request.removeListener('timeout', socketTimeoutHandler); - }); - } - once(request, 'socket', (socket) => { - var _a; - const { socketPath } = request; - /* istanbul ignore next: hard to test */ - if (socket.connecting) { - const hasPath = Boolean(socketPath !== null && socketPath !== void 0 ? socketPath : net.isIP((_a = hostname !== null && hostname !== void 0 ? hostname : host) !== null && _a !== void 0 ? _a : '') !== 0); - if (typeof delays.lookup !== 'undefined' && !hasPath && typeof socket.address().address === 'undefined') { - const cancelTimeout = addTimeout(delays.lookup, timeoutHandler, 'lookup'); - once(socket, 'lookup', cancelTimeout); - } - if (typeof delays.connect !== 'undefined') { - const timeConnect = () => addTimeout(delays.connect, timeoutHandler, 'connect'); - if (hasPath) { - once(socket, 'connect', timeConnect()); - } - else { - once(socket, 'lookup', (error) => { - if (error === null) { - once(socket, 'connect', timeConnect()); - } - }); - } - } - if (typeof delays.secureConnect !== 'undefined' && options.protocol === 'https:') { - once(socket, 'connect', () => { - const cancelTimeout = addTimeout(delays.secureConnect, timeoutHandler, 'secureConnect'); - once(socket, 'secureConnect', cancelTimeout); - }); - } - } - if (typeof delays.send !== 'undefined') { - const timeRequest = () => addTimeout(delays.send, timeoutHandler, 'send'); - /* istanbul ignore next: hard to test */ - if (socket.connecting) { - once(socket, 'connect', () => { - once(request, 'upload-complete', timeRequest()); - }); - } - else { - once(request, 'upload-complete', timeRequest()); - } - } - }); - if (typeof delays.response !== 'undefined') { - once(request, 'upload-complete', () => { - const cancelTimeout = addTimeout(delays.response, timeoutHandler, 'response'); - once(request, 'response', cancelTimeout); - }); - } - return cancelTimeouts; -}; - - -/***/ }), - -/***/ 1593: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -// When attaching listeners, it's very easy to forget about them. -// Especially if you do error handling and set timeouts. -// So instead of checking if it's proper to throw an error on every timeout ever, -// use this simple tool which will remove all listeners you have attached. -exports.default = () => { - const handlers = []; - return { - once(origin, event, fn) { - origin.once(event, fn); - handlers.push({ origin, event, fn }); - }, - unhandleAll() { - for (const handler of handlers) { - const { origin, event, fn } = handler; - origin.removeListener(event, fn); - } - handlers.length = 0; - } - }; -}; - - -/***/ }), - -/***/ 8026: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const is_1 = __nccwpck_require__(7678); -exports.default = (url) => { - // Cast to URL - url = url; - const options = { - protocol: url.protocol, - hostname: is_1.default.string(url.hostname) && url.hostname.startsWith('[') ? url.hostname.slice(1, -1) : url.hostname, - host: url.host, - hash: url.hash, - search: url.search, - pathname: url.pathname, - href: url.href, - path: `${url.pathname || ''}${url.search || ''}` - }; - if (is_1.default.string(url.port) && url.port.length > 0) { - options.port = Number(url.port); - } - if (url.username || url.password) { - options.auth = `${url.username || ''}:${url.password || ''}`; - } - return options; -}; - - -/***/ }), - -/***/ 7288: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -class WeakableMap { - constructor() { - this.weakMap = new WeakMap(); - this.map = new Map(); - } - set(key, value) { - if (typeof key === 'object') { - this.weakMap.set(key, value); - } - else { - this.map.set(key, value); - } - } - get(key) { - if (typeof key === 'object') { - return this.weakMap.get(key); - } - return this.map.get(key); - } - has(key) { - if (typeof key === 'object') { - return this.weakMap.has(key); - } - return this.map.has(key); - } -} -exports.default = WeakableMap; - - -/***/ }), - -/***/ 4337: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.defaultHandler = void 0; -const is_1 = __nccwpck_require__(7678); -const as_promise_1 = __nccwpck_require__(6056); -const create_rejection_1 = __nccwpck_require__(6457); -const core_1 = __nccwpck_require__(94); -const deep_freeze_1 = __nccwpck_require__(285); -const errors = { - RequestError: as_promise_1.RequestError, - CacheError: as_promise_1.CacheError, - ReadError: as_promise_1.ReadError, - HTTPError: as_promise_1.HTTPError, - MaxRedirectsError: as_promise_1.MaxRedirectsError, - TimeoutError: as_promise_1.TimeoutError, - ParseError: as_promise_1.ParseError, - CancelError: as_promise_1.CancelError, - UnsupportedProtocolError: as_promise_1.UnsupportedProtocolError, - UploadError: as_promise_1.UploadError -}; -// The `delay` package weighs 10KB (!) -const delay = async (ms) => new Promise(resolve => { - setTimeout(resolve, ms); -}); -const { normalizeArguments } = core_1.default; -const mergeOptions = (...sources) => { - let mergedOptions; - for (const source of sources) { - mergedOptions = normalizeArguments(undefined, source, mergedOptions); - } - return mergedOptions; -}; -const getPromiseOrStream = (options) => options.isStream ? new core_1.default(undefined, options) : as_promise_1.default(options); -const isGotInstance = (value) => ('defaults' in value && 'options' in value.defaults); -const aliases = [ - 'get', - 'post', - 'put', - 'patch', - 'head', - 'delete' -]; -exports.defaultHandler = (options, next) => next(options); -const callInitHooks = (hooks, options) => { - if (hooks) { - for (const hook of hooks) { - hook(options); - } - } -}; -const create = (defaults) => { - // Proxy properties from next handlers - defaults._rawHandlers = defaults.handlers; - defaults.handlers = defaults.handlers.map(fn => ((options, next) => { - // This will be assigned by assigning result - let root; - const result = fn(options, newOptions => { - root = next(newOptions); - return root; - }); - if (result !== root && !options.isStream && root) { - const typedResult = result; - const { then: promiseThen, catch: promiseCatch, finally: promiseFianlly } = typedResult; - Object.setPrototypeOf(typedResult, Object.getPrototypeOf(root)); - Object.defineProperties(typedResult, Object.getOwnPropertyDescriptors(root)); - // These should point to the new promise - // eslint-disable-next-line promise/prefer-await-to-then - typedResult.then = promiseThen; - typedResult.catch = promiseCatch; - typedResult.finally = promiseFianlly; - } - return result; - })); - // Got interface - const got = ((url, options = {}, _defaults) => { - var _a, _b; - let iteration = 0; - const iterateHandlers = (newOptions) => { - return defaults.handlers[iteration++](newOptions, iteration === defaults.handlers.length ? getPromiseOrStream : iterateHandlers); - }; - // TODO: Remove this in Got 12. - if (is_1.default.plainObject(url)) { - const mergedOptions = { - ...url, - ...options - }; - core_1.setNonEnumerableProperties([url, options], mergedOptions); - options = mergedOptions; - url = undefined; - } - try { - // Call `init` hooks - let initHookError; - try { - callInitHooks(defaults.options.hooks.init, options); - callInitHooks((_a = options.hooks) === null || _a === void 0 ? void 0 : _a.init, options); - } - catch (error) { - initHookError = error; - } - // Normalize options & call handlers - const normalizedOptions = normalizeArguments(url, options, _defaults !== null && _defaults !== void 0 ? _defaults : defaults.options); - normalizedOptions[core_1.kIsNormalizedAlready] = true; - if (initHookError) { - throw new as_promise_1.RequestError(initHookError.message, initHookError, normalizedOptions); - } - return iterateHandlers(normalizedOptions); - } - catch (error) { - if (options.isStream) { - throw error; - } - else { - return create_rejection_1.default(error, defaults.options.hooks.beforeError, (_b = options.hooks) === null || _b === void 0 ? void 0 : _b.beforeError); - } - } - }); - got.extend = (...instancesOrOptions) => { - const optionsArray = [defaults.options]; - let handlers = [...defaults._rawHandlers]; - let isMutableDefaults; - for (const value of instancesOrOptions) { - if (isGotInstance(value)) { - optionsArray.push(value.defaults.options); - handlers.push(...value.defaults._rawHandlers); - isMutableDefaults = value.defaults.mutableDefaults; - } - else { - optionsArray.push(value); - if ('handlers' in value) { - handlers.push(...value.handlers); - } - isMutableDefaults = value.mutableDefaults; - } - } - handlers = handlers.filter(handler => handler !== exports.defaultHandler); - if (handlers.length === 0) { - handlers.push(exports.defaultHandler); - } - return create({ - options: mergeOptions(...optionsArray), - handlers, - mutableDefaults: Boolean(isMutableDefaults) - }); - }; - // Pagination - const paginateEach = (async function* (url, options) { - // TODO: Remove this `@ts-expect-error` when upgrading to TypeScript 4. - // Error: Argument of type 'Merge> | undefined' is not assignable to parameter of type 'Options | undefined'. - // @ts-expect-error - let normalizedOptions = normalizeArguments(url, options, defaults.options); - normalizedOptions.resolveBodyOnly = false; - const pagination = normalizedOptions.pagination; - if (!is_1.default.object(pagination)) { - throw new TypeError('`options.pagination` must be implemented'); - } - const all = []; - let { countLimit } = pagination; - let numberOfRequests = 0; - while (numberOfRequests < pagination.requestLimit) { - if (numberOfRequests !== 0) { - // eslint-disable-next-line no-await-in-loop - await delay(pagination.backoff); - } - // @ts-expect-error FIXME! - // TODO: Throw when result is not an instance of Response - // eslint-disable-next-line no-await-in-loop - const result = (await got(undefined, undefined, normalizedOptions)); - // eslint-disable-next-line no-await-in-loop - const parsed = await pagination.transform(result); - const current = []; - for (const item of parsed) { - if (pagination.filter(item, all, current)) { - if (!pagination.shouldContinue(item, all, current)) { - return; - } - yield item; - if (pagination.stackAllItems) { - all.push(item); - } - current.push(item); - if (--countLimit <= 0) { - return; - } - } - } - const optionsToMerge = pagination.paginate(result, all, current); - if (optionsToMerge === false) { - return; - } - if (optionsToMerge === result.request.options) { - normalizedOptions = result.request.options; - } - else if (optionsToMerge !== undefined) { - normalizedOptions = normalizeArguments(undefined, optionsToMerge, normalizedOptions); - } - numberOfRequests++; - } - }); - got.paginate = paginateEach; - got.paginate.all = (async (url, options) => { - const results = []; - for await (const item of paginateEach(url, options)) { - results.push(item); - } - return results; - }); - // For those who like very descriptive names - got.paginate.each = paginateEach; - // Stream API - got.stream = ((url, options) => got(url, { ...options, isStream: true })); - // Shortcuts - for (const method of aliases) { - got[method] = ((url, options) => got(url, { ...options, method })); - got.stream[method] = ((url, options) => { - return got(url, { ...options, method, isStream: true }); - }); - } - Object.assign(got, errors); - Object.defineProperty(got, 'defaults', { - value: defaults.mutableDefaults ? defaults : deep_freeze_1.default(defaults), - writable: defaults.mutableDefaults, - configurable: defaults.mutableDefaults, - enumerable: true - }); - got.mergeOptions = mergeOptions; - return got; -}; -exports.default = create; -__exportStar(__nccwpck_require__(2613), exports); - - -/***/ }), - -/***/ 3061: -/***/ (function(module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const url_1 = __nccwpck_require__(8835); -const create_1 = __nccwpck_require__(4337); -const defaults = { - options: { - method: 'GET', - retry: { - limit: 2, - methods: [ - 'GET', - 'PUT', - 'HEAD', - 'DELETE', - 'OPTIONS', - 'TRACE' - ], - statusCodes: [ - 408, - 413, - 429, - 500, - 502, - 503, - 504, - 521, - 522, - 524 - ], - errorCodes: [ - 'ETIMEDOUT', - 'ECONNRESET', - 'EADDRINUSE', - 'ECONNREFUSED', - 'EPIPE', - 'ENOTFOUND', - 'ENETUNREACH', - 'EAI_AGAIN' - ], - maxRetryAfter: undefined, - calculateDelay: ({ computedValue }) => computedValue - }, - timeout: {}, - headers: { - 'user-agent': 'got (https://github.com/sindresorhus/got)' - }, - hooks: { - init: [], - beforeRequest: [], - beforeRedirect: [], - beforeRetry: [], - beforeError: [], - afterResponse: [] - }, - cache: undefined, - dnsCache: undefined, - decompress: true, - throwHttpErrors: true, - followRedirect: true, - isStream: false, - responseType: 'text', - resolveBodyOnly: false, - maxRedirects: 10, - prefixUrl: '', - methodRewriting: true, - ignoreInvalidCookies: false, - context: {}, - // TODO: Set this to `true` when Got 12 gets released - http2: false, - allowGetBody: false, - https: undefined, - pagination: { - transform: (response) => { - if (response.request.options.responseType === 'json') { - return response.body; - } - return JSON.parse(response.body); - }, - paginate: response => { - if (!Reflect.has(response.headers, 'link')) { - return false; - } - const items = response.headers.link.split(','); - let next; - for (const item of items) { - const parsed = item.split(';'); - if (parsed[1].includes('next')) { - next = parsed[0].trimStart().trim(); - next = next.slice(1, -1); - break; - } - } - if (next) { - const options = { - url: new url_1.URL(next) - }; - return options; - } - return false; - }, - filter: () => true, - shouldContinue: () => true, - countLimit: Infinity, - backoff: 0, - requestLimit: 10000, - stackAllItems: true - }, - parseJson: (text) => JSON.parse(text), - stringifyJson: (object) => JSON.stringify(object), - cacheOptions: {} - }, - handlers: [create_1.defaultHandler], - mutableDefaults: false -}; -const got = create_1.default(defaults); -exports.default = got; -// For CommonJS default export support -module.exports = got; -module.exports.default = got; -module.exports.__esModule = true; // Workaround for TS issue: https://github.com/sindresorhus/got/pull/1267 -__exportStar(__nccwpck_require__(4337), exports); -__exportStar(__nccwpck_require__(6056), exports); - - -/***/ }), - -/***/ 2613: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); - - -/***/ }), - -/***/ 285: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const is_1 = __nccwpck_require__(7678); -function deepFreeze(object) { - for (const value of Object.values(object)) { - if (is_1.default.plainObject(value) || is_1.default.array(value)) { - deepFreeze(value); - } - } - return Object.freeze(object); -} -exports.default = deepFreeze; - - -/***/ }), - -/***/ 397: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const alreadyWarned = new Set(); -exports.default = (message) => { - if (alreadyWarned.has(message)) { - return; - } - alreadyWarned.add(message); - // @ts-expect-error Missing types. - process.emitWarning(`Got: ${message}`, { - type: 'DeprecationWarning' - }); -}; +const {constants: BufferConstants} = __nccwpck_require__(4300); +const stream = __nccwpck_require__(2781); +const {promisify} = __nccwpck_require__(3837); +const bufferStream = __nccwpck_require__(1585); + +const streamPipelinePromisified = promisify(stream.pipeline); + +class MaxBufferError extends Error { + constructor() { + super('maxBuffer exceeded'); + this.name = 'MaxBufferError'; + } +} + +async function getStream(inputStream, options) { + if (!inputStream) { + throw new Error('Expected a stream'); + } + + options = { + maxBuffer: Infinity, + ...options + }; + + const {maxBuffer} = options; + const stream = bufferStream(options); + + await new Promise((resolve, reject) => { + const rejectPromise = error => { + // Don't retrieve an oversized buffer. + if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) { + error.bufferedData = stream.getBufferedValue(); + } + + reject(error); + }; + + (async () => { + try { + await streamPipelinePromisified(inputStream, stream); + resolve(); + } catch (error) { + rejectPromise(error); + } + })(); + + stream.on('data', () => { + if (stream.getBufferedLength() > maxBuffer) { + rejectPromise(new MaxBufferError()); + } + }); + }); + + return stream.getBufferedValue(); +} + +module.exports = getStream; +module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'}); +module.exports.array = (stream, options) => getStream(stream, {...options, array: true}); +module.exports.MaxBufferError = MaxBufferError; /***/ }), @@ -19009,49 +16657,72 @@ module.exports = class CachePolicy { "use strict"; -const EventEmitter = __nccwpck_require__(8614); -const tls = __nccwpck_require__(4016); -const http2 = __nccwpck_require__(7565); +// See https://github.com/facebook/jest/issues/2549 +// eslint-disable-next-line node/prefer-global/url +const {URL} = __nccwpck_require__(7310); +const EventEmitter = __nccwpck_require__(2361); +const tls = __nccwpck_require__(4404); +const http2 = __nccwpck_require__(5158); const QuickLRU = __nccwpck_require__(9273); +const delayAsyncDestroy = __nccwpck_require__(9237); -const kCurrentStreamsCount = Symbol('currentStreamsCount'); +const kCurrentStreamCount = Symbol('currentStreamCount'); const kRequest = Symbol('request'); const kOriginSet = Symbol('cachedOriginSet'); const kGracefullyClosing = Symbol('gracefullyClosing'); +const kLength = Symbol('length'); const nameKeys = [ + // Not an Agent option actually + 'createConnection', + // `http2.connect()` options 'maxDeflateDynamicTableSize', + 'maxSettings', 'maxSessionMemory', 'maxHeaderListPairs', 'maxOutstandingPings', 'maxReservedRemoteStreams', 'maxSendHeaderBlockLength', 'paddingStrategy', + 'peerMaxConcurrentStreams', + 'settings', - // `tls.connect()` options + // `tls.connect()` source options + 'family', 'localAddress', - 'path', 'rejectUnauthorized', + + // `tls.connect()` secure context options + 'pskCallback', 'minDHSize', + // `tls.connect()` destination options + // - `servername` is automatically validated, skip it + // - `host` and `port` just describe the destination server, + 'path', + 'socket', + // `tls.createSecureContext()` options 'ca', 'cert', - 'clientCertEngine', + 'sigalgs', 'ciphers', - 'key', - 'pfx', - 'servername', - 'minVersion', - 'maxVersion', - 'secureProtocol', + 'clientCertEngine', 'crl', - 'honorCipherOrder', - 'ecdhCurve', 'dhparam', + 'ecdhCurve', + 'honorCipherOrder', + 'key', + 'privateKeyEngine', + 'privateKeyIdentifier', + 'maxVersion', + 'minVersion', + 'pfx', 'secureOptions', - 'sessionIdContext' + 'secureProtocol', + 'sessionIdContext', + 'ticketKeys' ]; const getSortedIndex = (array, value, compare) => { @@ -19061,9 +16732,7 @@ const getSortedIndex = (array, value, compare) => { while (low < high) { const mid = (low + high) >>> 1; - /* istanbul ignore next */ if (compare(array[mid], value)) { - // This never gets called because we use descending sort. Better to have this anyway. low = mid + 1; } else { high = mid; @@ -19073,25 +16742,28 @@ const getSortedIndex = (array, value, compare) => { return low; }; -const compareSessions = (a, b) => { - return a.remoteSettings.maxConcurrentStreams > b.remoteSettings.maxConcurrentStreams; -}; +const compareSessions = (a, b) => a.remoteSettings.maxConcurrentStreams > b.remoteSettings.maxConcurrentStreams; // See https://tools.ietf.org/html/rfc8336 const closeCoveredSessions = (where, session) => { // Clients SHOULD NOT emit new requests on any connection whose Origin // Set is a proper subset of another connection's Origin Set, and they // SHOULD close it once all outstanding requests are satisfied. - for (const coveredSession of where) { + for (let index = 0; index < where.length; index++) { + const coveredSession = where[index]; + if ( + // Unfortunately `.every()` returns true for an empty array + coveredSession[kOriginSet].length > 0 + // The set is a proper subset when its length is less than the other set. - coveredSession[kOriginSet].length < session[kOriginSet].length && + && coveredSession[kOriginSet].length < session[kOriginSet].length // And the other set includes all elements of the subset. - coveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin)) && + && coveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin)) // Makes sure that the session can handle all requests from the covered session. - coveredSession[kCurrentStreamsCount] + session[kCurrentStreamsCount] <= session.remoteSettings.maxConcurrentStreams + && (coveredSession[kCurrentStreamCount] + session[kCurrentStreamCount]) <= session.remoteSettings.maxConcurrentStreams ) { // This allows pending requests to finish and prevents making new requests. gracefullyClose(coveredSession); @@ -19101,62 +16773,45 @@ const closeCoveredSessions = (where, session) => { // This is basically inverted `closeCoveredSessions(...)`. const closeSessionIfCovered = (where, coveredSession) => { - for (const session of where) { + for (let index = 0; index < where.length; index++) { + const session = where[index]; + if ( - coveredSession[kOriginSet].length < session[kOriginSet].length && - coveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin)) && - coveredSession[kCurrentStreamsCount] + session[kCurrentStreamsCount] <= session.remoteSettings.maxConcurrentStreams + coveredSession[kOriginSet].length > 0 + && coveredSession[kOriginSet].length < session[kOriginSet].length + && coveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin)) + && (coveredSession[kCurrentStreamCount] + session[kCurrentStreamCount]) <= session.remoteSettings.maxConcurrentStreams ) { gracefullyClose(coveredSession); - } - } -}; -const getSessions = ({agent, isFree}) => { - const result = {}; - - // eslint-disable-next-line guard-for-in - for (const normalizedOptions in agent.sessions) { - const sessions = agent.sessions[normalizedOptions]; - - const filtered = sessions.filter(session => { - const result = session[Agent.kCurrentStreamsCount] < session.remoteSettings.maxConcurrentStreams; - - return isFree ? result : !result; - }); - - if (filtered.length !== 0) { - result[normalizedOptions] = filtered; + return true; } } - return result; + return false; }; const gracefullyClose = session => { session[kGracefullyClosing] = true; - if (session[kCurrentStreamsCount] === 0) { + if (session[kCurrentStreamCount] === 0) { session.close(); } }; class Agent extends EventEmitter { - constructor({timeout = 60000, maxSessions = Infinity, maxFreeSessions = 10, maxCachedTlsSessions = 100} = {}) { + constructor({timeout = 0, maxSessions = Number.POSITIVE_INFINITY, maxEmptySessions = 10, maxCachedTlsSessions = 100} = {}) { super(); - // A session is considered busy when its current streams count - // is equal to or greater than the `maxConcurrentStreams` value. - - // A session is considered free when its current streams count - // is less than the `maxConcurrentStreams` value. - // SESSIONS[NORMALIZED_OPTIONS] = []; this.sessions = {}; // The queue for creating new sessions. It looks like this: // QUEUE[NORMALIZED_OPTIONS][NORMALIZED_ORIGIN] = ENTRY_FUNCTION // + // It's faster when there are many origins. If there's only one, then QUEUE[`${options}:${origin}`] is faster. + // I guess object creation / deletion is causing the slowdown. + // // The entry function has `listeners`, `completed` and `destroyed` properties. // `listeners` is an array of objects containing `resolve` and `reject` functions. // `completed` is a boolean. It's set to true after ENTRY_FUNCTION is executed. @@ -19169,69 +16824,90 @@ class Agent extends EventEmitter { // Max sessions in total this.maxSessions = maxSessions; - // Max free sessions in total - // TODO: decreasing `maxFreeSessions` should close some sessions - this.maxFreeSessions = maxFreeSessions; + // Max empty sessions in total + this.maxEmptySessions = maxEmptySessions; - this._freeSessionsCount = 0; - this._sessionsCount = 0; + this._emptySessionCount = 0; + this._sessionCount = 0; // We don't support push streams by default. this.settings = { - enablePush: false + enablePush: false, + initialWindowSize: 1024 * 1024 * 32 // 32MB, see https://github.com/nodejs/node/issues/38426 }; // Reusing TLS sessions increases performance. this.tlsSessionCache = new QuickLRU({maxSize: maxCachedTlsSessions}); } - static normalizeOrigin(url, servername) { - if (typeof url === 'string') { - url = new URL(url); - } - - if (servername && url.hostname !== servername) { - url.hostname = servername; - } - - return url.origin; + get protocol() { + return 'https:'; } normalizeOptions(options) { let normalized = ''; - if (options) { - for (const key of nameKeys) { - if (options[key]) { - normalized += `:${options[key]}`; - } + for (let index = 0; index < nameKeys.length; index++) { + const key = nameKeys[index]; + + normalized += ':'; + + if (options && options[key] !== undefined) { + normalized += options[key]; } } return normalized; } - _tryToCreateNewSession(normalizedOptions, normalizedOrigin) { - if (!(normalizedOptions in this.queue) || !(normalizedOrigin in this.queue[normalizedOptions])) { + _processQueue() { + if (this._sessionCount >= this.maxSessions) { + this.closeEmptySessions(this.maxSessions - this._sessionCount + 1); return; } - const item = this.queue[normalizedOptions][normalizedOrigin]; + // eslint-disable-next-line guard-for-in + for (const normalizedOptions in this.queue) { + // eslint-disable-next-line guard-for-in + for (const normalizedOrigin in this.queue[normalizedOptions]) { + const item = this.queue[normalizedOptions][normalizedOrigin]; - // The entry function can be run only once. - // BUG: The session may be never created when: - // - the first condition is false AND - // - this function is never called with the same arguments in the future. - if (this._sessionsCount < this.maxSessions && !item.completed) { - item.completed = true; + // The entry function can be run only once. + if (!item.completed) { + item.completed = true; - item(); + item(); + } + } + } + } + + _isBetterSession(thisStreamCount, thatStreamCount) { + return thisStreamCount > thatStreamCount; + } + + _accept(session, listeners, normalizedOrigin, options) { + let index = 0; + + while (index < listeners.length && session[kCurrentStreamCount] < session.remoteSettings.maxConcurrentStreams) { + // We assume `resolve(...)` calls `request(...)` *directly*, + // otherwise the session will get overloaded. + listeners[index].resolve(session); + + index++; + } + + listeners.splice(0, index); + + if (listeners.length > 0) { + this.getSession(normalizedOrigin, options, listeners); + listeners.length = 0; } } getSession(origin, options, listeners) { return new Promise((resolve, reject) => { - if (Array.isArray(listeners)) { + if (Array.isArray(listeners) && listeners.length > 0) { listeners = [...listeners]; // Resolve the current promise ASAP, we're just moving the listeners. @@ -19241,17 +16917,33 @@ class Agent extends EventEmitter { listeners = [{resolve, reject}]; } - const normalizedOptions = this.normalizeOptions(options); - const normalizedOrigin = Agent.normalizeOrigin(origin, options && options.servername); + try { + // Parse origin + if (typeof origin === 'string') { + origin = new URL(origin); + } else if (!(origin instanceof URL)) { + throw new TypeError('The `origin` argument needs to be a string or an URL object'); + } - if (normalizedOrigin === undefined) { - for (const {reject} of listeners) { - reject(new TypeError('The `origin` argument needs to be a string or an URL object')); + if (options) { + // Validate servername + const {servername} = options; + const {hostname} = origin; + if (servername && hostname !== servername) { + throw new Error(`Origin ${hostname} differs from servername ${servername}`); + } + } + } catch (error) { + for (let index = 0; index < listeners.length; index++) { + listeners[index].reject(error); } return; } + const normalizedOptions = this.normalizeOptions(options); + const normalizedOrigin = origin.origin; + if (normalizedOptions in this.sessions) { const sessions = this.sessions[normalizedOptions]; @@ -19261,56 +16953,55 @@ class Agent extends EventEmitter { // We could just do this.sessions[normalizedOptions].find(...) but that isn't optimal. // Additionally, we are looking for session which has biggest current pending streams count. - for (const session of sessions) { + // + // |------------| |------------| |------------| |------------| + // | Session: A | | Session: B | | Session: C | | Session: D | + // | Pending: 5 |-| Pending: 8 |-| Pending: 9 |-| Pending: 4 | + // | Max: 10 | | Max: 10 | | Max: 9 | | Max: 5 | + // |------------| |------------| |------------| |------------| + // ^ + // | + // pick this one -- + // + for (let index = 0; index < sessions.length; index++) { + const session = sessions[index]; + const sessionMaxConcurrentStreams = session.remoteSettings.maxConcurrentStreams; if (sessionMaxConcurrentStreams < maxConcurrentStreams) { break; } - if (session[kOriginSet].includes(normalizedOrigin)) { - const sessionCurrentStreamsCount = session[kCurrentStreamsCount]; + if (!session[kOriginSet].includes(normalizedOrigin)) { + continue; + } - if ( - sessionCurrentStreamsCount >= sessionMaxConcurrentStreams || - session[kGracefullyClosing] || - // Unfortunately the `close` event isn't called immediately, - // so `session.destroyed` is `true`, but `session.closed` is `false`. - session.destroyed - ) { - continue; - } + const sessionCurrentStreamsCount = session[kCurrentStreamCount]; - // We only need set this once. - if (!optimalSession) { - maxConcurrentStreams = sessionMaxConcurrentStreams; - } + if ( + sessionCurrentStreamsCount >= sessionMaxConcurrentStreams + || session[kGracefullyClosing] + // Unfortunately the `close` event isn't called immediately, + // so `session.destroyed` is `true`, but `session.closed` is `false`. + || session.destroyed + ) { + continue; + } - // We're looking for the session which has biggest current pending stream count, - // in order to minimalize the amount of active sessions. - if (sessionCurrentStreamsCount > currentStreamsCount) { - optimalSession = session; - currentStreamsCount = sessionCurrentStreamsCount; - } + // We only need set this once. + if (!optimalSession) { + maxConcurrentStreams = sessionMaxConcurrentStreams; + } + + // Either get the session which has biggest current stream count or the lowest. + if (this._isBetterSession(sessionCurrentStreamsCount, currentStreamsCount)) { + optimalSession = session; + currentStreamsCount = sessionCurrentStreamsCount; } } if (optimalSession) { - /* istanbul ignore next: safety check */ - if (listeners.length !== 1) { - for (const {reject} of listeners) { - const error = new Error( - `Expected the length of listeners to be 1, got ${listeners.length}.\n` + - 'Please report this to https://github.com/szmarczak/http2-wrapper/' - ); - - reject(error); - } - - return; - } - - listeners[0].resolve(optimalSession); + this._accept(optimalSession, listeners, normalizedOrigin, options); return; } } @@ -19319,14 +17010,12 @@ class Agent extends EventEmitter { if (normalizedOrigin in this.queue[normalizedOptions]) { // There's already an item in the queue, just attach ourselves to it. this.queue[normalizedOptions][normalizedOrigin].listeners.push(...listeners); - - // This shouldn't be executed here. - // See the comment inside _tryToCreateNewSession. - this._tryToCreateNewSession(normalizedOptions, normalizedOrigin); return; } } else { - this.queue[normalizedOptions] = {}; + this.queue[normalizedOptions] = { + [kLength]: 0 + }; } // The entry must be removed from the queue IMMEDIATELY when: @@ -19337,29 +17026,58 @@ class Agent extends EventEmitter { if (normalizedOptions in this.queue && this.queue[normalizedOptions][normalizedOrigin] === entry) { delete this.queue[normalizedOptions][normalizedOrigin]; - if (Object.keys(this.queue[normalizedOptions]).length === 0) { + if (--this.queue[normalizedOptions][kLength] === 0) { delete this.queue[normalizedOptions]; } } }; // The main logic is here - const entry = () => { + const entry = async () => { + this._sessionCount++; + const name = `${normalizedOrigin}:${normalizedOptions}`; let receivedSettings = false; + let socket; try { - const session = http2.connect(origin, { - createConnection: this.createConnection, - settings: this.settings, - session: this.tlsSessionCache.get(name), - ...options - }); - session[kCurrentStreamsCount] = 0; + const computedOptions = {...options}; + + if (computedOptions.settings === undefined) { + computedOptions.settings = this.settings; + } + + if (computedOptions.session === undefined) { + computedOptions.session = this.tlsSessionCache.get(name); + } + + const createConnection = computedOptions.createConnection || this.createConnection; + + // A hacky workaround to enable async `createConnection` + socket = await createConnection.call(this, origin, computedOptions); + computedOptions.createConnection = () => socket; + + const session = http2.connect(origin, computedOptions); + session[kCurrentStreamCount] = 0; session[kGracefullyClosing] = false; - const isFree = () => session[kCurrentStreamsCount] < session.remoteSettings.maxConcurrentStreams; - let wasFree = true; + // Node.js return https://false:443 instead of https://1.1.1.1:443 + const getOriginSet = () => { + const {socket} = session; + + let originSet; + if (socket.servername === false) { + socket.servername = socket.remoteAddress; + originSet = session.originSet; + socket.servername = false; + } else { + originSet = session.originSet; + } + + return originSet; + }; + + const isFree = () => session[kCurrentStreamCount] < session.remoteSettings.maxConcurrentStreams; session.socket.once('session', tlsSession => { this.tlsSessionCache.set(name, tlsSession); @@ -19367,8 +17085,8 @@ class Agent extends EventEmitter { session.once('error', error => { // Listeners are empty when the session successfully connected. - for (const {reject} of listeners) { - reject(error); + for (let index = 0; index < listeners.length; index++) { + listeners[index].reject(error); } // The connection got broken, purge the cache. @@ -19377,68 +17095,73 @@ class Agent extends EventEmitter { session.setTimeout(this.timeout, () => { // Terminates all streams owned by this session. - // TODO: Maybe the streams should have a "Session timed out" error? session.destroy(); }); session.once('close', () => { - if (receivedSettings) { - // 1. If it wasn't free then no need to decrease because - // it has been decreased already in session.request(). - // 2. `stream.once('close')` won't increment the count - // because the session is already closed. - if (wasFree) { - this._freeSessionsCount--; - } + this._sessionCount--; - this._sessionsCount--; + if (receivedSettings) { + // Assumes session `close` is emitted after request `close` + this._emptySessionCount--; // This cannot be moved to the stream logic, // because there may be a session that hadn't made a single request. const where = this.sessions[normalizedOptions]; - where.splice(where.indexOf(session), 1); - if (where.length === 0) { + if (where.length === 1) { delete this.sessions[normalizedOptions]; + } else { + where.splice(where.indexOf(session), 1); } } else { // Broken connection + removeFromQueue(); + const error = new Error('Session closed without receiving a SETTINGS frame'); error.code = 'HTTP2WRAPPER_NOSETTINGS'; - for (const {reject} of listeners) { - reject(error); + for (let index = 0; index < listeners.length; index++) { + listeners[index].reject(error); } - - removeFromQueue(); } // There may be another session awaiting. - this._tryToCreateNewSession(normalizedOptions, normalizedOrigin); + this._processQueue(); }); // Iterates over the queue and processes listeners. const processListeners = () => { - if (!(normalizedOptions in this.queue) || !isFree()) { + const queue = this.queue[normalizedOptions]; + if (!queue) { return; } - for (const origin of session[kOriginSet]) { - if (origin in this.queue[normalizedOptions]) { - const {listeners} = this.queue[normalizedOptions][origin]; + const originSet = session[kOriginSet]; + + for (let index = 0; index < originSet.length; index++) { + const origin = originSet[index]; + + if (origin in queue) { + const {listeners, completed} = queue[origin]; + + let index = 0; // Prevents session overloading. - while (listeners.length !== 0 && isFree()) { + while (index < listeners.length && isFree()) { // We assume `resolve(...)` calls `request(...)` *directly*, // otherwise the session will get overloaded. - listeners.shift().resolve(session); + listeners[index].resolve(session); + + index++; } - const where = this.queue[normalizedOptions]; - if (where[origin].listeners.length === 0) { - delete where[origin]; + queue[origin].listeners.splice(0, index); - if (Object.keys(where).length === 0) { + if (queue[origin].listeners.length === 0 && !completed) { + delete queue[origin]; + + if (--queue[kLength] === 0) { delete this.queue[normalizedOptions]; break; } @@ -19454,39 +17177,59 @@ class Agent extends EventEmitter { // The Origin Set cannot shrink. No need to check if it suddenly became covered by another one. session.on('origin', () => { - session[kOriginSet] = session.originSet; + session[kOriginSet] = getOriginSet() || []; + session[kGracefullyClosing] = false; + closeSessionIfCovered(this.sessions[normalizedOptions], session); - if (!isFree()) { - // The session is full. + if (session[kGracefullyClosing] || !isFree()) { return; } processListeners(); + if (!isFree()) { + return; + } + // Close covered sessions (if possible). closeCoveredSessions(this.sessions[normalizedOptions], session); }); session.once('remoteSettings', () => { - // Fix Node.js bug preventing the process from exiting - session.ref(); - session.unref(); - - this._sessionsCount++; - // The Agent could have been destroyed already. if (entry.destroyed) { const error = new Error('Agent has been destroyed'); - for (const listener of listeners) { - listener.reject(error); + for (let index = 0; index < listeners.length; index++) { + listeners[index].reject(error); } session.destroy(); return; } - session[kOriginSet] = session.originSet; + // See https://github.com/nodejs/node/issues/38426 + if (session.setLocalWindowSize) { + session.setLocalWindowSize(1024 * 1024 * 4); // 4 MB + } + + session[kOriginSet] = getOriginSet() || []; + + if (session.socket.encrypted) { + const mainOrigin = session[kOriginSet][0]; + if (mainOrigin !== normalizedOrigin) { + const error = new Error(`Requested origin ${normalizedOrigin} does not match server ${mainOrigin}`); + + for (let index = 0; index < listeners.length; index++) { + listeners[index].reject(error); + } + + session.destroy(); + return; + } + } + + removeFromQueue(); { const where = this.sessions; @@ -19499,30 +17242,28 @@ class Agent extends EventEmitter { } } - this._freeSessionsCount += 1; receivedSettings = true; + this._emptySessionCount++; this.emit('session', session); + this._accept(session, listeners, normalizedOrigin, options); - processListeners(); - removeFromQueue(); - - // TODO: Close last recently used (or least used?) session - if (session[kCurrentStreamsCount] === 0 && this._freeSessionsCount > this.maxFreeSessions) { - session.close(); - } - - // Check if we haven't managed to execute all listeners. - if (listeners.length !== 0) { - // Request for a new session with predefined listeners. - this.getSession(normalizedOrigin, options, listeners); - listeners.length = 0; + if (session[kCurrentStreamCount] === 0 && this._emptySessionCount > this.maxEmptySessions) { + this.closeEmptySessions(this._emptySessionCount - this.maxEmptySessions); } // `session.remoteSettings.maxConcurrentStreams` might get increased session.on('remoteSettings', () => { + if (!isFree()) { + return; + } + processListeners(); + if (!isFree()) { + return; + } + // In case the Origin Set changes closeCoveredSessions(this.sessions[normalizedOptions], session); }); @@ -19540,45 +17281,31 @@ class Agent extends EventEmitter { // The process won't exit until the session is closed or all requests are gone. session.ref(); - ++session[kCurrentStreamsCount]; - - if (session[kCurrentStreamsCount] === session.remoteSettings.maxConcurrentStreams) { - this._freeSessionsCount--; + if (session[kCurrentStreamCount]++ === 0) { + this._emptySessionCount--; } stream.once('close', () => { - wasFree = isFree(); + if (--session[kCurrentStreamCount] === 0) { + this._emptySessionCount++; + session.unref(); - --session[kCurrentStreamsCount]; + if (this._emptySessionCount > this.maxEmptySessions || session[kGracefullyClosing]) { + session.close(); + return; + } + } - if (!session.destroyed && !session.closed) { - closeSessionIfCovered(this.sessions[normalizedOptions], session); + if (session.destroyed || session.closed) { + return; + } - if (isFree() && !session.closed) { - if (!wasFree) { - this._freeSessionsCount++; + if (isFree() && !closeSessionIfCovered(this.sessions[normalizedOptions], session)) { + closeCoveredSessions(this.sessions[normalizedOptions], session); + processListeners(); - wasFree = true; - } - - const isEmpty = session[kCurrentStreamsCount] === 0; - - if (isEmpty) { - session.unref(); - } - - if ( - isEmpty && - ( - this._freeSessionsCount > this.maxFreeSessions || - session[kGracefullyClosing] - ) - ) { - session.close(); - } else { - closeCoveredSessions(this.sessions[normalizedOptions], session); - processListeners(); - } + if (session[kCurrentStreamCount] === 0) { + this._processQueue(); } } }); @@ -19586,11 +17313,12 @@ class Agent extends EventEmitter { return stream; }; } catch (error) { - for (const listener of listeners) { - listener.reject(error); - } - removeFromQueue(); + this._sessionCount--; + + for (let index = 0; index < listeners.length; index++) { + listeners[index].reject(error); + } } }; @@ -19599,7 +17327,8 @@ class Agent extends EventEmitter { entry.destroyed = false; this.queue[normalizedOptions][normalizedOrigin] = entry; - this._tryToCreateNewSession(normalizedOptions, normalizedOrigin); + this.queue[normalizedOptions][kLength]++; + this._processQueue(); }); } @@ -19609,7 +17338,12 @@ class Agent extends EventEmitter { reject, resolve: session => { try { - resolve(session.request(headers, streamOptions)); + const stream = session.request(headers, streamOptions); + + // Do not throw before `request(...)` has been awaited + delayAsyncDestroy(stream); + + resolve(stream); } catch (error) { reject(error); } @@ -19618,7 +17352,7 @@ class Agent extends EventEmitter { }); } - createConnection(origin, options) { + async createConnection(origin, options) { return Agent.connect(origin, options); } @@ -19626,52 +17360,92 @@ class Agent extends EventEmitter { options.ALPNProtocols = ['h2']; const port = origin.port || 443; - const host = origin.hostname || origin.host; + const host = origin.hostname; if (typeof options.servername === 'undefined') { options.servername = host; } - return tls.connect(port, host, options); + const socket = tls.connect(port, host, options); + + if (options.socket) { + socket._peername = { + family: undefined, + address: undefined, + port + }; + } + + return socket; } - closeFreeSessions() { - for (const sessions of Object.values(this.sessions)) { - for (const session of sessions) { - if (session[kCurrentStreamsCount] === 0) { + closeEmptySessions(maxCount = Number.POSITIVE_INFINITY) { + let closedCount = 0; + + const {sessions} = this; + + // eslint-disable-next-line guard-for-in + for (const key in sessions) { + const thisSessions = sessions[key]; + + for (let index = 0; index < thisSessions.length; index++) { + const session = thisSessions[index]; + + if (session[kCurrentStreamCount] === 0) { + closedCount++; session.close(); + + if (closedCount >= maxCount) { + return closedCount; + } } } } + + return closedCount; } destroy(reason) { - for (const sessions of Object.values(this.sessions)) { - for (const session of sessions) { - session.destroy(reason); + const {sessions, queue} = this; + + // eslint-disable-next-line guard-for-in + for (const key in sessions) { + const thisSessions = sessions[key]; + + for (let index = 0; index < thisSessions.length; index++) { + thisSessions[index].destroy(reason); } } - for (const entriesOfAuthority of Object.values(this.queue)) { - for (const entry of Object.values(entriesOfAuthority)) { - entry.destroyed = true; + // eslint-disable-next-line guard-for-in + for (const normalizedOptions in queue) { + const entries = queue[normalizedOptions]; + + // eslint-disable-next-line guard-for-in + for (const normalizedOrigin in entries) { + entries[normalizedOrigin].destroyed = true; } } // New requests should NOT attach to destroyed sessions this.queue = {}; + this.tlsSessionCache.clear(); } - get freeSessions() { - return getSessions({agent: this, isFree: true}); + get emptySessionCount() { + return this._emptySessionCount; } - get busySessions() { - return getSessions({agent: this, isFree: false}); + get pendingSessionCount() { + return this._sessionCount - this._emptySessionCount; + } + + get sessionCount() { + return this._sessionCount; } } -Agent.kCurrentStreamsCount = kCurrentStreamsCount; +Agent.kCurrentStreamCount = kCurrentStreamCount; Agent.kGracefullyClosing = kGracefullyClosing; module.exports = { @@ -19687,13 +17461,17 @@ module.exports = { "use strict"; -const http = __nccwpck_require__(8605); -const https = __nccwpck_require__(7211); +// See https://github.com/facebook/jest/issues/2549 +// eslint-disable-next-line node/prefer-global/url +const {URL, urlToHttpOptions} = __nccwpck_require__(7310); +const http = __nccwpck_require__(3685); +const https = __nccwpck_require__(5687); const resolveALPN = __nccwpck_require__(6624); const QuickLRU = __nccwpck_require__(9273); +const {Agent, globalAgent} = __nccwpck_require__(9898); const Http2ClientRequest = __nccwpck_require__(9632); const calculateServerName = __nccwpck_require__(1982); -const urlToOptions = __nccwpck_require__(2686); +const delayAsyncDestroy = __nccwpck_require__(9237); const cache = new QuickLRU({maxSize: 100}); const queue = new Map(); @@ -19713,10 +17491,24 @@ const installSocket = (agent, socket, options) => { socket.on('close', onClose); + const onTimeout = () => { + const {freeSockets} = agent; + + for (const sockets of Object.values(freeSockets)) { + if (sockets.includes(socket)) { + socket.destroy(); + return; + } + } + }; + + socket.on('timeout', onTimeout); + const onRemove = () => { agent.removeSocket(socket, options); socket.off('close', onClose); socket.off('free', onFree); + socket.off('timeout', onTimeout); socket.off('agentRemove', onRemove); }; @@ -19725,76 +17517,65 @@ const installSocket = (agent, socket, options) => { agent.emit('free', socket, options); }; -const resolveProtocol = async options => { - const name = `${options.host}:${options.port}:${options.ALPNProtocols.sort()}`; +const createResolveProtocol = (cache, queue = new Map(), connect = undefined) => { + return async options => { + const name = `${options.host}:${options.port}:${options.ALPNProtocols.sort()}`; - if (!cache.has(name)) { - if (queue.has(name)) { - const result = await queue.get(name); - return result.alpnProtocol; - } - - const {path, agent} = options; - options.path = options.socketPath; - - const resultPromise = resolveALPN(options); - queue.set(name, resultPromise); - - try { - const {socket, alpnProtocol} = await resultPromise; - cache.set(name, alpnProtocol); - - options.path = path; - - if (alpnProtocol === 'h2') { - // https://github.com/nodejs/node/issues/33343 - socket.destroy(); - } else { - const {globalAgent} = https; - const defaultCreateConnection = https.Agent.prototype.createConnection; - - if (agent) { - if (agent.createConnection === defaultCreateConnection) { - installSocket(agent, socket, options); - } else { - socket.destroy(); - } - } else if (globalAgent.createConnection === defaultCreateConnection) { - installSocket(globalAgent, socket, options); - } else { - socket.destroy(); - } + if (!cache.has(name)) { + if (queue.has(name)) { + const result = await queue.get(name); + return {alpnProtocol: result.alpnProtocol}; } - queue.delete(name); + const {path} = options; + options.path = options.socketPath; - return alpnProtocol; - } catch (error) { - queue.delete(name); + const resultPromise = resolveALPN(options, connect); + queue.set(name, resultPromise); - throw error; + try { + const result = await resultPromise; + + cache.set(name, result.alpnProtocol); + queue.delete(name); + + options.path = path; + + return result; + } catch (error) { + queue.delete(name); + + options.path = path; + + throw error; + } } - } - return cache.get(name); + return {alpnProtocol: cache.get(name)}; + }; }; +const defaultResolveProtocol = createResolveProtocol(cache, queue); + module.exports = async (input, options, callback) => { - if (typeof input === 'string' || input instanceof URL) { - input = urlToOptions(new URL(input)); + if (typeof input === 'string') { + input = urlToHttpOptions(new URL(input)); + } else if (input instanceof URL) { + input = urlToHttpOptions(input); + } else { + input = {...input}; } - if (typeof options === 'function') { + if (typeof options === 'function' || options === undefined) { + // (options, callback) callback = options; - options = undefined; + options = input; + } else { + // (input, options, callback) + options = Object.assign(input, options); } - options = { - ALPNProtocols: ['h2', 'http/1.1'], - ...input, - ...options, - resolveSocket: true - }; + options.ALPNProtocols = options.ALPNProtocols || ['h2', 'http/1.1']; if (!Array.isArray(options.ALPNProtocols) || options.ALPNProtocols.length === 0) { throw new Error('The `ALPNProtocols` option must be an Array with at least one entry'); @@ -19805,36 +17586,86 @@ module.exports = async (input, options, callback) => { options.host = options.hostname || options.host || 'localhost'; options.session = options.tlsSession; - options.servername = options.servername || calculateServerName(options); + options.servername = options.servername || calculateServerName((options.headers && options.headers.host) || options.host); options.port = options.port || (isHttps ? 443 : 80); options._defaultAgent = isHttps ? https.globalAgent : http.globalAgent; - const agents = options.agent; + const resolveProtocol = options.resolveProtocol || defaultResolveProtocol; - if (agents) { - if (agents.addRequest) { - throw new Error('The `options.agent` object can contain only `http`, `https` or `http2` properties'); - } + // Note: We don't support `h2session` here - options.agent = agents[isHttps ? 'https' : 'http']; + let {agent} = options; + if (agent !== undefined && agent !== false && agent.constructor.name !== 'Object') { + throw new Error('The `options.agent` can be only an object `http`, `https` or `http2` properties'); } if (isHttps) { - const protocol = await resolveProtocol(options); + options.resolveSocket = true; - if (protocol === 'h2') { - if (agents) { - options.agent = agents.http2; + let {socket, alpnProtocol, timeout} = await resolveProtocol(options); + + if (timeout) { + if (socket) { + socket.destroy(); } - return new Http2ClientRequest(options, callback); + const error = new Error(`Timed out resolving ALPN: ${options.timeout} ms`); + error.code = 'ETIMEDOUT'; + error.ms = options.timeout; + + throw error; } + + // We can't accept custom `createConnection` because the API is different for HTTP/2 + if (socket && options.createConnection) { + socket.destroy(); + socket = undefined; + } + + delete options.resolveSocket; + + const isHttp2 = alpnProtocol === 'h2'; + + if (agent) { + agent = isHttp2 ? agent.http2 : agent.https; + options.agent = agent; + } + + if (agent === undefined) { + agent = isHttp2 ? globalAgent : https.globalAgent; + } + + if (socket) { + if (agent === false) { + socket.destroy(); + } else { + const defaultCreateConnection = (isHttp2 ? Agent : https.Agent).prototype.createConnection; + + if (agent.createConnection === defaultCreateConnection) { + if (isHttp2) { + options._reuseSocket = socket; + } else { + installSocket(agent, socket, options); + } + } else { + socket.destroy(); + } + } + } + + if (isHttp2) { + return delayAsyncDestroy(new Http2ClientRequest(options, callback)); + } + } else if (agent) { + options.agent = agent.http; } - return http.request(options, callback); + return delayAsyncDestroy(http.request(options, callback)); }; module.exports.protocolCache = cache; +module.exports.resolveProtocol = defaultResolveProtocol; +module.exports.createResolveProtocol = createResolveProtocol; /***/ }), @@ -19844,26 +17675,28 @@ module.exports.protocolCache = cache; "use strict"; -const http2 = __nccwpck_require__(7565); -const {Writable} = __nccwpck_require__(2413); +// See https://github.com/facebook/jest/issues/2549 +// eslint-disable-next-line node/prefer-global/url +const {URL, urlToHttpOptions} = __nccwpck_require__(7310); +const http2 = __nccwpck_require__(5158); +const {Writable} = __nccwpck_require__(2781); const {Agent, globalAgent} = __nccwpck_require__(9898); const IncomingMessage = __nccwpck_require__(2575); -const urlToOptions = __nccwpck_require__(2686); const proxyEvents = __nccwpck_require__(1818); -const isRequestPseudoHeader = __nccwpck_require__(1199); const { ERR_INVALID_ARG_TYPE, ERR_INVALID_PROTOCOL, - ERR_HTTP_HEADERS_SENT, - ERR_INVALID_HTTP_TOKEN, - ERR_HTTP_INVALID_HEADER_VALUE, - ERR_INVALID_CHAR + ERR_HTTP_HEADERS_SENT } = __nccwpck_require__(7087); +const validateHeaderName = __nccwpck_require__(4592); +const validateHeaderValue = __nccwpck_require__(3549); +const proxySocketHandler = __nccwpck_require__(9404); const { HTTP2_HEADER_STATUS, HTTP2_HEADER_METHOD, HTTP2_HEADER_PATH, + HTTP2_HEADER_AUTHORITY, HTTP2_METHOD_CONNECT } = http2.constants; @@ -19873,59 +17706,66 @@ const kSession = Symbol('session'); const kOptions = Symbol('options'); const kFlushedHeaders = Symbol('flushedHeaders'); const kJobs = Symbol('jobs'); - -const isValidHttpToken = /^[\^`\-\w!#$%&*+.|~]+$/; -const isInvalidHeaderValue = /[^\t\u0020-\u007E\u0080-\u00FF]/; +const kPendingAgentPromise = Symbol('pendingAgentPromise'); class ClientRequest extends Writable { constructor(input, options, callback) { super({ - autoDestroy: false + autoDestroy: false, + emitClose: false }); - const hasInput = typeof input === 'string' || input instanceof URL; - if (hasInput) { - input = urlToOptions(input instanceof URL ? input : new URL(input)); + if (typeof input === 'string') { + input = urlToHttpOptions(new URL(input)); + } else if (input instanceof URL) { + input = urlToHttpOptions(input); + } else { + input = {...input}; } if (typeof options === 'function' || options === undefined) { // (options, callback) callback = options; - options = hasInput ? input : {...input}; + options = input; } else { // (input, options, callback) - options = {...input, ...options}; + options = Object.assign(input, options); } if (options.h2session) { this[kSession] = options.h2session; - } else if (options.agent === false) { - this.agent = new Agent({maxFreeSessions: 0}); - } else if (typeof options.agent === 'undefined' || options.agent === null) { - if (typeof options.createConnection === 'function') { - // This is a workaround - we don't have to create the session on our own. - this.agent = new Agent({maxFreeSessions: 0}); - this.agent.createConnection = options.createConnection; - } else { - this.agent = globalAgent; + + if (this[kSession].destroyed) { + throw new Error('The session has been closed already'); } + + this.protocol = this[kSession].socket.encrypted ? 'https:' : 'http:'; + } else if (options.agent === false) { + this.agent = new Agent({maxEmptySessions: 0}); + } else if (typeof options.agent === 'undefined' || options.agent === null) { + this.agent = globalAgent; } else if (typeof options.agent.request === 'function') { this.agent = options.agent; } else { - throw new ERR_INVALID_ARG_TYPE('options.agent', ['Agent-like Object', 'undefined', 'false'], options.agent); + throw new ERR_INVALID_ARG_TYPE('options.agent', ['http2wrapper.Agent-like Object', 'undefined', 'false'], options.agent); } - if (options.protocol && options.protocol !== 'https:') { - throw new ERR_INVALID_PROTOCOL(options.protocol, 'https:'); + if (this.agent) { + this.protocol = this.agent.protocol; } - const port = options.port || options.defaultPort || (this.agent && this.agent.defaultPort) || 443; - const host = options.hostname || options.host || 'localhost'; + if (options.protocol && options.protocol !== this.protocol) { + throw new ERR_INVALID_PROTOCOL(options.protocol, this.protocol); + } - // Don't enforce the origin via options. It may be changed in an Agent. + if (!options.port) { + options.port = options.defaultPort || (this.agent && this.agent.defaultPort) || 443; + } + + options.host = options.hostname || options.host || 'localhost'; + + // Unused delete options.hostname; - delete options.host; - delete options.port; const {timeout} = options; options.timeout = undefined; @@ -19933,19 +17773,26 @@ class ClientRequest extends Writable { this[kHeaders] = Object.create(null); this[kJobs] = []; + this[kPendingAgentPromise] = undefined; + this.socket = null; this.connection = null; this.method = options.method || 'GET'; - this.path = options.path; + + if (!(this.method === 'CONNECT' && (options.path === '/' || options.path === undefined))) { + this.path = options.path; + } this.res = null; this.aborted = false; this.reusedSocket = false; - if (options.headers) { - for (const [header, value] of Object.entries(options.headers)) { - this.setHeader(header, value); + const {headers} = options; + if (headers) { + // eslint-disable-next-line guard-for-in + for (const header in headers) { + this.setHeader(header, headers[header]); } } @@ -19959,18 +17806,21 @@ class ClientRequest extends Writable { this[kOptions] = options; // Clients that generate HTTP/2 requests directly SHOULD use the :authority pseudo-header field instead of the Host header field. - if (port === 443) { - this[kOrigin] = `https://${host}`; + this[kOrigin] = new URL(`${this.protocol}//${options.servername || options.host}:${options.port}`); - if (!(':authority' in this[kHeaders])) { - this[kHeaders][':authority'] = host; - } - } else { - this[kOrigin] = `https://${host}:${port}`; + // A socket is being reused + const reuseSocket = options._reuseSocket; + if (reuseSocket) { + options.createConnection = (...args) => { + if (reuseSocket.destroyed) { + return this.agent.createConnection(...args); + } - if (!(':authority' in this[kHeaders])) { - this[kHeaders][':authority'] = `${host}:${port}`; - } + return reuseSocket; + }; + + // eslint-disable-next-line promise/prefer-await-to-then + this.agent.getSession(this[kOrigin], this[kOptions]).catch(() => {}); } if (timeout) { @@ -19995,15 +17845,27 @@ class ClientRequest extends Writable { } get path() { - return this[kHeaders][HTTP2_HEADER_PATH]; + const header = this.method === 'CONNECT' ? HTTP2_HEADER_AUTHORITY : HTTP2_HEADER_PATH; + + return this[kHeaders][header]; } set path(value) { if (value) { - this[kHeaders][HTTP2_HEADER_PATH] = value; + const header = this.method === 'CONNECT' ? HTTP2_HEADER_AUTHORITY : HTTP2_HEADER_PATH; + + this[kHeaders][header] = value; } } + get host() { + return this[kOrigin].hostname; + } + + set host(_value) { + // Do nothing as this is read only. + } + get _mustNotHaveABody() { return this.method === 'GET' || this.method === 'HEAD' || this.method === 'DELETE'; } @@ -20027,15 +17889,11 @@ class ClientRequest extends Writable { } _final(callback) { - if (this.destroyed) { - return; - } - this.flushHeaders(); const callEnd = () => { - // For GET, HEAD and DELETE - if (this._mustNotHaveABody) { + // For GET, HEAD and DELETE and CONNECT + if (this._mustNotHaveABody || this.method === 'CONNECT') { callback(); return; } @@ -20064,13 +17922,25 @@ class ClientRequest extends Writable { this.destroy(); } - _destroy(error, callback) { + async _destroy(error, callback) { if (this.res) { this.res._dump(); } if (this._request) { this._request.destroy(); + } else { + process.nextTick(() => { + this.emit('close'); + }); + } + + try { + await this[kPendingAgentPromise]; + } catch (internalError) { + if (this.aborted) { + error = internalError; + } } callback(error); @@ -20096,46 +17966,45 @@ class ClientRequest extends Writable { // Forwards `timeout`, `continue`, `close` and `error` events to this instance. if (!isConnectMethod) { - proxyEvents(stream, this, ['timeout', 'continue', 'close', 'error']); + // TODO: Should we proxy `close` here? + proxyEvents(stream, this, ['timeout', 'continue']); } - // Wait for the `finish` event. We don't want to emit the `response` event - // before `request.end()` is called. - const waitForEnd = fn => { - return (...args) => { - if (!this.writable && !this.destroyed) { - fn(...args); - } else { - this.once('finish', () => { - fn(...args); - }); - } - }; - }; + stream.once('error', error => { + this.destroy(error); + }); - // This event tells we are ready to listen for the data. - stream.once('response', waitForEnd((headers, flags, rawHeaders) => { + stream.once('aborted', () => { + const {res} = this; + if (res) { + res.aborted = true; + res.emit('aborted'); + res.destroy(); + } else { + this.destroy(new Error('The server aborted the HTTP/2 stream')); + } + }); + + const onResponse = (headers, flags, rawHeaders) => { // If we were to emit raw request stream, it would be as fast as the native approach. // Note that wrapping the raw stream in a Proxy instance won't improve the performance (already tested it). const response = new IncomingMessage(this.socket, stream.readableHighWaterMark); this.res = response; + // Undocumented, but it is used by `cacheable-request` + response.url = `${this[kOrigin].origin}${this.path}`; + response.req = this; response.statusCode = headers[HTTP2_HEADER_STATUS]; response.headers = headers; response.rawHeaders = rawHeaders; response.once('end', () => { - if (this.aborted) { - response.aborted = true; - response.emit('aborted'); - } else { - response.complete = true; + response.complete = true; - // Has no effect, just be consistent with the Node.js behavior - response.socket = null; - response.connection = null; - } + // Has no effect, just be consistent with the Node.js behavior + response.socket = null; + response.connection = null; }); if (isConnectMethod) { @@ -20158,7 +18027,9 @@ class ClientRequest extends Writable { }); stream.once('end', () => { - response.push(null); + if (!this.aborted) { + response.push(null); + } }); if (!this.emit('response', response)) { @@ -20166,24 +18037,64 @@ class ClientRequest extends Writable { response._dump(); } } - })); + }; + + // This event tells we are ready to listen for the data. + stream.once('response', onResponse); // Emits `information` event - stream.once('headers', waitForEnd( - headers => this.emit('information', {statusCode: headers[HTTP2_HEADER_STATUS]}) - )); + stream.once('headers', headers => this.emit('information', {statusCode: headers[HTTP2_HEADER_STATUS]})); - stream.once('trailers', waitForEnd((trailers, flags, rawTrailers) => { + stream.once('trailers', (trailers, flags, rawTrailers) => { const {res} = this; + // https://github.com/nodejs/node/issues/41251 + if (res === null) { + onResponse(trailers, flags, rawTrailers); + return; + } + // Assigns trailers to the response object. res.trailers = trailers; res.rawTrailers = rawTrailers; - })); + }); - const {socket} = stream.session; - this.socket = socket; - this.connection = socket; + stream.once('close', () => { + const {aborted, res} = this; + if (res) { + if (aborted) { + res.aborted = true; + res.emit('aborted'); + res.destroy(); + } + + const finish = () => { + res.emit('close'); + + this.destroy(); + this.emit('close'); + }; + + if (res.readable) { + res.once('end', finish); + } else { + finish(); + } + + return; + } + + if (!this.destroyed) { + this.destroy(new Error('The HTTP/2 stream has been early terminated')); + this.emit('close'); + return; + } + + this.destroy(); + this.emit('close'); + }); + + this.socket = new Proxy(stream, proxySocketHandler); for (const job of this[kJobs]) { job(); @@ -20192,24 +18103,55 @@ class ClientRequest extends Writable { this.emit('socket', this.socket); }; + if (!(HTTP2_HEADER_AUTHORITY in this[kHeaders]) && !isConnectMethod) { + this[kHeaders][HTTP2_HEADER_AUTHORITY] = this[kOrigin].host; + } + // Makes a HTTP2 request if (this[kSession]) { try { onStream(this[kSession].request(this[kHeaders])); } catch (error) { - this.emit('error', error); + this.destroy(error); } } else { this.reusedSocket = true; try { - onStream(await this.agent.request(this[kOrigin], this[kOptions], this[kHeaders])); + const promise = this.agent.request(this[kOrigin], this[kOptions], this[kHeaders]); + this[kPendingAgentPromise] = promise; + + onStream(await promise); + + this[kPendingAgentPromise] = false; } catch (error) { - this.emit('error', error); + this[kPendingAgentPromise] = false; + + this.destroy(error); } } } + get connection() { + return this.socket; + } + + set connection(value) { + this.socket = value; + } + + getHeaderNames() { + return Object.keys(this[kHeaders]); + } + + hasHeader(name) { + if (typeof name !== 'string') { + throw new ERR_INVALID_ARG_TYPE('name', 'string', name); + } + + return Boolean(this[kHeaders][name.toLowerCase()]); + } + getHeader(name) { if (typeof name !== 'string') { throw new ERR_INVALID_ARG_TYPE('name', 'string', name); @@ -20239,19 +18181,24 @@ class ClientRequest extends Writable { throw new ERR_HTTP_HEADERS_SENT('set'); } - if (typeof name !== 'string' || (!isValidHttpToken.test(name) && !isRequestPseudoHeader(name))) { - throw new ERR_INVALID_HTTP_TOKEN('Header name', name); + validateHeaderName(name); + validateHeaderValue(name, value); + + const lowercased = name.toLowerCase(); + + if (lowercased === 'connection') { + if (value.toLowerCase() === 'keep-alive') { + return; + } + + throw new Error(`Invalid 'connection' header: ${value}`); } - if (typeof value === 'undefined') { - throw new ERR_HTTP_INVALID_HEADER_VALUE(value, name); + if (lowercased === 'host' && this.method === 'CONNECT') { + this[kHeaders][HTTP2_HEADER_AUTHORITY] = value; + } else { + this[kHeaders][lowercased] = value; } - - if (isInvalidHeaderValue.test(value)) { - throw new ERR_INVALID_CHAR('header content', name); - } - - this[kHeaders][name.toLowerCase()] = value; } setNoDelay() { @@ -20297,13 +18244,14 @@ module.exports = ClientRequest; "use strict"; -const {Readable} = __nccwpck_require__(2413); +const {Readable} = __nccwpck_require__(2781); class IncomingMessage extends Readable { constructor(socket, highWaterMark) { super({ - highWaterMark, - autoDestroy: false + emitClose: false, + autoDestroy: true, + highWaterMark }); this.statusCode = null; @@ -20323,12 +18271,26 @@ class IncomingMessage extends Readable { this.rawTrailers = []; this.socket = socket; - this.connection = socket; this._dumped = false; } - _destroy(error) { + get connection() { + return this.socket; + } + + set connection(value) { + this.socket = value; + } + + _destroy(error, callback) { + if (!this.readableEnded) { + this.aborted = true; + } + + // See https://github.com/nodejs/node/issues/35303 + callback(); + this.req._request.destroy(error); } @@ -20363,15 +18325,27 @@ module.exports = IncomingMessage; "use strict"; -const http2 = __nccwpck_require__(7565); -const agent = __nccwpck_require__(9898); +const http2 = __nccwpck_require__(5158); +const { + Agent, + globalAgent +} = __nccwpck_require__(9898); const ClientRequest = __nccwpck_require__(9632); const IncomingMessage = __nccwpck_require__(2575); const auto = __nccwpck_require__(7167); +const { + HttpOverHttp2, + HttpsOverHttp2 +} = __nccwpck_require__(8795); +const Http2OverHttp2 = __nccwpck_require__(8553); +const { + Http2OverHttp, + Http2OverHttps +} = __nccwpck_require__(9794); +const validateHeaderName = __nccwpck_require__(4592); +const validateHeaderValue = __nccwpck_require__(3549); -const request = (url, options, callback) => { - return new ClientRequest(url, options, callback); -}; +const request = (url, options, callback) => new ClientRequest(url, options, callback); const get = (url, options, callback) => { // eslint-disable-next-line unicorn/prevent-abbreviations @@ -20385,13 +18359,337 @@ module.exports = { ...http2, ClientRequest, IncomingMessage, - ...agent, + Agent, + globalAgent, request, get, - auto + auto, + proxies: { + HttpOverHttp2, + HttpsOverHttp2, + Http2OverHttp2, + Http2OverHttp, + Http2OverHttps + }, + validateHeaderName, + validateHeaderValue }; +/***/ }), + +/***/ 7885: +/***/ ((module) => { + +"use strict"; + + +module.exports = self => { + const {username, password} = self.proxyOptions.url; + + if (username || password) { + const data = `${username}:${password}`; + const authorization = `Basic ${Buffer.from(data).toString('base64')}`; + + return { + 'proxy-authorization': authorization, + authorization + }; + } + + return {}; +}; + + +/***/ }), + +/***/ 8795: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const tls = __nccwpck_require__(4404); +const http = __nccwpck_require__(3685); +const https = __nccwpck_require__(5687); +const JSStreamSocket = __nccwpck_require__(1564); +const {globalAgent} = __nccwpck_require__(9898); +const UnexpectedStatusCodeError = __nccwpck_require__(6203); +const initialize = __nccwpck_require__(1089); +const getAuthorizationHeaders = __nccwpck_require__(7885); + +const createConnection = (self, options, callback) => { + (async () => { + try { + const {proxyOptions} = self; + const {url, headers, raw} = proxyOptions; + + const stream = await globalAgent.request(url, proxyOptions, { + ...getAuthorizationHeaders(self), + ...headers, + ':method': 'CONNECT', + ':authority': `${options.host}:${options.port}` + }); + + stream.once('error', callback); + stream.once('response', headers => { + const statusCode = headers[':status']; + + if (statusCode !== 200) { + callback(new UnexpectedStatusCodeError(statusCode)); + return; + } + + const encrypted = self instanceof https.Agent; + + if (raw && encrypted) { + options.socket = stream; + const secureStream = tls.connect(options); + + secureStream.once('close', () => { + stream.destroy(); + }); + + callback(null, secureStream); + return; + } + + const socket = new JSStreamSocket(stream); + socket.encrypted = false; + socket._handle.getpeername = out => { + out.family = undefined; + out.address = undefined; + out.port = undefined; + }; + + callback(null, socket); + }); + } catch (error) { + callback(error); + } + })(); +}; + +class HttpOverHttp2 extends http.Agent { + constructor(options) { + super(options); + + initialize(this, options.proxyOptions); + } + + createConnection(options, callback) { + createConnection(this, options, callback); + } +} + +class HttpsOverHttp2 extends https.Agent { + constructor(options) { + super(options); + + initialize(this, options.proxyOptions); + } + + createConnection(options, callback) { + createConnection(this, options, callback); + } +} + +module.exports = { + HttpOverHttp2, + HttpsOverHttp2 +}; + + +/***/ }), + +/***/ 9794: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const http = __nccwpck_require__(3685); +const https = __nccwpck_require__(5687); +const Http2OverHttpX = __nccwpck_require__(1857); +const getAuthorizationHeaders = __nccwpck_require__(7885); + +const getStream = request => new Promise((resolve, reject) => { + const onConnect = (response, socket, head) => { + socket.unshift(head); + + request.off('error', reject); + resolve([socket, response.statusCode]); + }; + + request.once('error', reject); + request.once('connect', onConnect); +}); + +class Http2OverHttp extends Http2OverHttpX { + async _getProxyStream(authority) { + const {proxyOptions} = this; + const {url, headers} = this.proxyOptions; + + const network = url.protocol === 'https:' ? https : http; + + // `new URL('https://localhost/httpbin.org:443')` results in + // a `/httpbin.org:443` path, which has an invalid leading slash. + const request = network.request({ + ...proxyOptions, + hostname: url.hostname, + port: url.port, + path: authority, + headers: { + ...getAuthorizationHeaders(this), + ...headers, + host: authority + }, + method: 'CONNECT' + }).end(); + + return getStream(request); + } +} + +module.exports = { + Http2OverHttp, + Http2OverHttps: Http2OverHttp +}; + + +/***/ }), + +/***/ 8553: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const {globalAgent} = __nccwpck_require__(9898); +const Http2OverHttpX = __nccwpck_require__(1857); +const getAuthorizationHeaders = __nccwpck_require__(7885); + +const getStatusCode = stream => new Promise((resolve, reject) => { + stream.once('error', reject); + stream.once('response', headers => { + stream.off('error', reject); + resolve(headers[':status']); + }); +}); + +class Http2OverHttp2 extends Http2OverHttpX { + async _getProxyStream(authority) { + const {proxyOptions} = this; + + const headers = { + ...getAuthorizationHeaders(this), + ...proxyOptions.headers, + ':method': 'CONNECT', + ':authority': authority + }; + + const stream = await globalAgent.request(proxyOptions.url, proxyOptions, headers); + const statusCode = await getStatusCode(stream); + + return [stream, statusCode]; + } +} + +module.exports = Http2OverHttp2; + + +/***/ }), + +/***/ 1857: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const {Agent} = __nccwpck_require__(9898); +const JSStreamSocket = __nccwpck_require__(1564); +const UnexpectedStatusCodeError = __nccwpck_require__(6203); +const initialize = __nccwpck_require__(1089); + +class Http2OverHttpX extends Agent { + constructor(options) { + super(options); + + initialize(this, options.proxyOptions); + } + + async createConnection(origin, options) { + const authority = `${origin.hostname}:${origin.port || 443}`; + + const [stream, statusCode] = await this._getProxyStream(authority); + if (statusCode !== 200) { + throw new UnexpectedStatusCodeError(statusCode); + } + + if (this.proxyOptions.raw) { + options.socket = stream; + } else { + const socket = new JSStreamSocket(stream); + socket.encrypted = false; + socket._handle.getpeername = out => { + out.family = undefined; + out.address = undefined; + out.port = undefined; + }; + + return socket; + } + + return super.createConnection(origin, options); + } +} + +module.exports = Http2OverHttpX; + + +/***/ }), + +/***/ 1089: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +// See https://github.com/facebook/jest/issues/2549 +// eslint-disable-next-line node/prefer-global/url +const {URL} = __nccwpck_require__(7310); +const checkType = __nccwpck_require__(3453); + +module.exports = (self, proxyOptions) => { + checkType('proxyOptions', proxyOptions, ['object']); + checkType('proxyOptions.headers', proxyOptions.headers, ['object', 'undefined']); + checkType('proxyOptions.raw', proxyOptions.raw, ['boolean', 'undefined']); + checkType('proxyOptions.url', proxyOptions.url, [URL, 'string']); + + const url = new URL(proxyOptions.url); + + self.proxyOptions = { + raw: true, + ...proxyOptions, + headers: {...proxyOptions.headers}, + url + }; +}; + + +/***/ }), + +/***/ 6203: +/***/ ((module) => { + +"use strict"; + + +class UnexpectedStatusCodeError extends Error { + constructor(statusCode) { + super(`The proxy server rejected the request with status code ${statusCode}`); + this.statusCode = statusCode; + } +} + +module.exports = UnexpectedStatusCodeError; + + /***/ }), /***/ 1982: @@ -20399,27 +18697,29 @@ module.exports = { "use strict"; -const net = __nccwpck_require__(1631); -/* istanbul ignore file: https://github.com/nodejs/node/blob/v13.0.1/lib/_http_agent.js */ +const {isIP} = __nccwpck_require__(1808); +const assert = __nccwpck_require__(9491); -module.exports = options => { - let servername = options.host; - const hostHeader = options.headers && options.headers.host; +const getHost = host => { + if (host[0] === '[') { + const idx = host.indexOf(']'); - if (hostHeader) { - if (hostHeader.startsWith('[')) { - const index = hostHeader.indexOf(']'); - if (index === -1) { - servername = hostHeader; - } else { - servername = hostHeader.slice(1, -1); - } - } else { - servername = hostHeader.split(':', 1)[0]; - } + assert(idx !== -1); + return host.slice(1, idx); } - if (net.isIP(servername)) { + const idx = host.indexOf(':'); + if (idx === -1) { + return host; + } + + return host.slice(0, idx); +}; + +module.exports = host => { + const servername = getHost(host); + + if (isIP(servername)) { return ''; } @@ -20427,6 +18727,75 @@ module.exports = options => { }; +/***/ }), + +/***/ 3453: +/***/ ((module) => { + +"use strict"; + + +const checkType = (name, value, types) => { + const valid = types.some(type => { + const typeofType = typeof type; + if (typeofType === 'string') { + return typeof value === type; + } + + return value instanceof type; + }); + + if (!valid) { + const names = types.map(type => typeof type === 'string' ? type : type.name); + + throw new TypeError(`Expected '${name}' to be a type of ${names.join(' or ')}, got ${typeof value}`); + } +}; + +module.exports = checkType; + + +/***/ }), + +/***/ 9237: +/***/ ((module) => { + +"use strict"; + + +module.exports = stream => { + if (stream.listenerCount('error') !== 0) { + return stream; + } + + stream.__destroy = stream._destroy; + stream._destroy = (...args) => { + const callback = args.pop(); + + stream.__destroy(...args, async error => { + await Promise.resolve(); + callback(error); + }); + }; + + const onError = error => { + // eslint-disable-next-line promise/prefer-await-to-then + Promise.resolve().then(() => { + stream.emit('error', error); + }); + }; + + stream.once('error', onError); + + // eslint-disable-next-line promise/prefer-await-to-then + Promise.resolve().then(() => { + stream.off('error', onError); + }); + + return stream; +}; + + /***/ }), /***/ 7087: @@ -20459,25 +18828,31 @@ makeError(TypeError, 'ERR_INVALID_ARG_TYPE', args => { return `The "${args[0]}" ${type} must be ${isManyTypes ? 'one of' : 'of'} type ${valid}. Received ${typeof args[2]}`; }); -makeError(TypeError, 'ERR_INVALID_PROTOCOL', args => { - return `Protocol "${args[0]}" not supported. Expected "${args[1]}"`; -}); +makeError(TypeError, 'ERR_INVALID_PROTOCOL', args => + `Protocol "${args[0]}" not supported. Expected "${args[1]}"` +); -makeError(Error, 'ERR_HTTP_HEADERS_SENT', args => { - return `Cannot ${args[0]} headers after they are sent to the client`; -}); +makeError(Error, 'ERR_HTTP_HEADERS_SENT', args => + `Cannot ${args[0]} headers after they are sent to the client` +); -makeError(TypeError, 'ERR_INVALID_HTTP_TOKEN', args => { - return `${args[0]} must be a valid HTTP token [${args[1]}]`; -}); +makeError(TypeError, 'ERR_INVALID_HTTP_TOKEN', args => + `${args[0]} must be a valid HTTP token [${args[1]}]` +); -makeError(TypeError, 'ERR_HTTP_INVALID_HEADER_VALUE', args => { - return `Invalid value "${args[0]} for header "${args[1]}"`; -}); +makeError(TypeError, 'ERR_HTTP_INVALID_HEADER_VALUE', args => + `Invalid value "${args[0]} for header "${args[1]}"` +); -makeError(TypeError, 'ERR_INVALID_CHAR', args => { - return `Invalid character in ${args[0]} [${args[1]}]`; -}); +makeError(TypeError, 'ERR_INVALID_CHAR', args => + `Invalid character in ${args[0]} [${args[1]}]` +); + +makeError( + Error, + 'ERR_HTTP2_NO_SOCKET_MANIPULATION', + 'HTTP/2 sockets should not be directly manipulated (e.g. read and written)' +); /***/ }), @@ -20501,6 +18876,22 @@ module.exports = header => { }; +/***/ }), + +/***/ 1564: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const stream = __nccwpck_require__(2781); +const tls = __nccwpck_require__(4404); + +// Really awesome hack. +const JSStreamSocket = (new tls.TLSSocket(new stream.PassThrough()))._handle._parentWrap.constructor; + +module.exports = JSStreamSocket; + + /***/ }), /***/ 1818: @@ -20518,34 +18909,155 @@ module.exports = (from, to, events) => { /***/ }), -/***/ 2686: -/***/ ((module) => { +/***/ 9404: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -/* istanbul ignore file: https://github.com/nodejs/node/blob/a91293d4d9ab403046ab5eb022332e4e3d249bd3/lib/internal/url.js#L1257 */ +const {ERR_HTTP2_NO_SOCKET_MANIPULATION} = __nccwpck_require__(7087); -module.exports = url => { - const options = { - protocol: url.protocol, - hostname: typeof url.hostname === 'string' && url.hostname.startsWith('[') ? url.hostname.slice(1, -1) : url.hostname, - host: url.host, - hash: url.hash, - search: url.search, - pathname: url.pathname, - href: url.href, - path: `${url.pathname || ''}${url.search || ''}` - }; +/* istanbul ignore file */ +/* https://github.com/nodejs/node/blob/6eec858f34a40ffa489c1ec54bb24da72a28c781/lib/internal/http2/compat.js#L195-L272 */ - if (typeof url.port === 'string' && url.port.length !== 0) { - options.port = Number(url.port); +const proxySocketHandler = { + has(stream, property) { + // Replaced [kSocket] with .socket + const reference = stream.session === undefined ? stream : stream.session.socket; + return (property in stream) || (property in reference); + }, + + get(stream, property) { + switch (property) { + case 'on': + case 'once': + case 'end': + case 'emit': + case 'destroy': + return stream[property].bind(stream); + case 'writable': + case 'destroyed': + return stream[property]; + case 'readable': + if (stream.destroyed) { + return false; + } + + return stream.readable; + case 'setTimeout': { + const {session} = stream; + if (session !== undefined) { + return session.setTimeout.bind(session); + } + + return stream.setTimeout.bind(stream); + } + + case 'write': + case 'read': + case 'pause': + case 'resume': + throw new ERR_HTTP2_NO_SOCKET_MANIPULATION(); + default: { + // Replaced [kSocket] with .socket + const reference = stream.session === undefined ? stream : stream.session.socket; + const value = reference[property]; + + return typeof value === 'function' ? value.bind(reference) : value; + } + } + }, + + getPrototypeOf(stream) { + if (stream.session !== undefined) { + // Replaced [kSocket] with .socket + return Reflect.getPrototypeOf(stream.session.socket); + } + + return Reflect.getPrototypeOf(stream); + }, + + set(stream, property, value) { + switch (property) { + case 'writable': + case 'readable': + case 'destroyed': + case 'on': + case 'once': + case 'end': + case 'emit': + case 'destroy': + stream[property] = value; + return true; + case 'setTimeout': { + const {session} = stream; + if (session === undefined) { + stream.setTimeout = value; + } else { + session.setTimeout = value; + } + + return true; + } + + case 'write': + case 'read': + case 'pause': + case 'resume': + throw new ERR_HTTP2_NO_SOCKET_MANIPULATION(); + default: { + // Replaced [kSocket] with .socket + const reference = stream.session === undefined ? stream : stream.session.socket; + reference[property] = value; + return true; + } + } + } +}; + +module.exports = proxySocketHandler; + + +/***/ }), + +/***/ 4592: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const {ERR_INVALID_HTTP_TOKEN} = __nccwpck_require__(7087); +const isRequestPseudoHeader = __nccwpck_require__(1199); + +const isValidHttpToken = /^[\^`\-\w!#$%&*+.|~]+$/; + +module.exports = name => { + if (typeof name !== 'string' || (!isValidHttpToken.test(name) && !isRequestPseudoHeader(name))) { + throw new ERR_INVALID_HTTP_TOKEN('Header name', name); + } +}; + + +/***/ }), + +/***/ 3549: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const { + ERR_HTTP_INVALID_HEADER_VALUE, + ERR_INVALID_CHAR +} = __nccwpck_require__(7087); + +const isInvalidHeaderValue = /[^\t\u0020-\u007E\u0080-\u00FF]/; + +module.exports = (name, value) => { + if (typeof value === 'undefined') { + throw new ERR_HTTP_INVALID_HEADER_VALUE(value, name); } - if (url.username || url.password) { - options.auth = `${url.username || ''}:${url.password || ''}`; + if (isInvalidHeaderValue.test(value)) { + throw new ERR_INVALID_CHAR('header content', name); } - - return options; }; @@ -20590,8 +19102,128 @@ module.exports = function isExtglob(str) { var isExtglob = __nccwpck_require__(6435); var chars = { '{': '}', '(': ')', '[': ']'}; -var strictRegex = /\\(.)|(^!|\*|[\].+)]\?|\[[^\\\]]+\]|\{[^\\}]+\}|\(\?[:!=][^\\)]+\)|\([^|]+\|[^\\)]+\))/; -var relaxedRegex = /\\(.)|(^!|[*?{}()[\]]|\(\?)/; +var strictCheck = function(str) { + if (str[0] === '!') { + return true; + } + var index = 0; + var pipeIndex = -2; + var closeSquareIndex = -2; + var closeCurlyIndex = -2; + var closeParenIndex = -2; + var backSlashIndex = -2; + while (index < str.length) { + if (str[index] === '*') { + return true; + } + + if (str[index + 1] === '?' && /[\].+)]/.test(str[index])) { + return true; + } + + if (closeSquareIndex !== -1 && str[index] === '[' && str[index + 1] !== ']') { + if (closeSquareIndex < index) { + closeSquareIndex = str.indexOf(']', index); + } + if (closeSquareIndex > index) { + if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { + return true; + } + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { + return true; + } + } + } + + if (closeCurlyIndex !== -1 && str[index] === '{' && str[index + 1] !== '}') { + closeCurlyIndex = str.indexOf('}', index); + if (closeCurlyIndex > index) { + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeCurlyIndex) { + return true; + } + } + } + + if (closeParenIndex !== -1 && str[index] === '(' && str[index + 1] === '?' && /[:!=]/.test(str[index + 2]) && str[index + 3] !== ')') { + closeParenIndex = str.indexOf(')', index); + if (closeParenIndex > index) { + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { + return true; + } + } + } + + if (pipeIndex !== -1 && str[index] === '(' && str[index + 1] !== '|') { + if (pipeIndex < index) { + pipeIndex = str.indexOf('|', index); + } + if (pipeIndex !== -1 && str[pipeIndex + 1] !== ')') { + closeParenIndex = str.indexOf(')', pipeIndex); + if (closeParenIndex > pipeIndex) { + backSlashIndex = str.indexOf('\\', pipeIndex); + if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { + return true; + } + } + } + } + + if (str[index] === '\\') { + var open = str[index + 1]; + index += 2; + var close = chars[open]; + + if (close) { + var n = str.indexOf(close, index); + if (n !== -1) { + index = n + 1; + } + } + + if (str[index] === '!') { + return true; + } + } else { + index++; + } + } + return false; +}; + +var relaxedCheck = function(str) { + if (str[0] === '!') { + return true; + } + var index = 0; + while (index < str.length) { + if (/[*?{}()[\]]/.test(str[index])) { + return true; + } + + if (str[index] === '\\') { + var open = str[index + 1]; + index += 2; + var close = chars[open]; + + if (close) { + var n = str.indexOf(close, index); + if (n !== -1) { + index = n + 1; + } + } + + if (str[index] === '!') { + return true; + } + } else { + index++; + } + } + return false; +}; module.exports = function isGlob(str, options) { if (typeof str !== 'string' || str === '') { @@ -20602,35 +19234,89 @@ module.exports = function isGlob(str, options) { return true; } - var regex = strictRegex; - var match; + var check = strictCheck; - // optionally relax regex + // optionally relax check if (options && options.strict === false) { - regex = relaxedRegex; + check = relaxedCheck; } - while ((match = regex.exec(str))) { - if (match[2]) return true; - var idx = match.index + match[0].length; + return check(str); +}; - // if an open bracket/brace/paren is escaped, - // set the index to the next closing character - var open = match[1]; - var close = open ? chars[open] : null; - if (open && close) { - var n = str.indexOf(close, idx); - if (n !== -1) { - idx = n + 1; - } - } - str = str.slice(idx); +/***/ }), + +/***/ 5680: +/***/ ((module) => { + +"use strict"; +/*! + * is-number + * + * Copyright (c) 2014-present, Jon Schlinkert. + * Released under the MIT License. + */ + + + +module.exports = function(num) { + if (typeof num === 'number') { + return num - num === 0; + } + if (typeof num === 'string' && num.trim() !== '') { + return Number.isFinite ? Number.isFinite(+num) : isFinite(+num); } return false; }; +/***/ }), + +/***/ 3287: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +/*! + * is-plain-object + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +function isObject(o) { + return Object.prototype.toString.call(o) === '[object Object]'; +} + +function isPlainObject(o) { + var ctor,prot; + + if (isObject(o) === false) return false; + + // If has modified constructor + ctor = o.constructor; + if (ctor === undefined) return true; + + // If has modified prototype + prot = ctor.prototype; + if (isObject(prot) === false) return false; + + // If constructor does not have an Object-specific method + if (prot.hasOwnProperty('isPrototypeOf') === false) { + return false; + } + + // Most likely a plain Object + return true; +} + +exports.isPlainObject = isPlainObject; + + /***/ }), /***/ 2820: @@ -20704,75 +19390,195 @@ exports.parse = function (s) { "use strict"; -const EventEmitter = __nccwpck_require__(8614); +const EventEmitter = __nccwpck_require__(2361); const JSONB = __nccwpck_require__(2820); +const compressBrotli = __nccwpck_require__(5728); -const loadStore = opts => { +const loadStore = options => { const adapters = { redis: '@keyv/redis', + rediss: '@keyv/redis', mongodb: '@keyv/mongo', mongo: '@keyv/mongo', sqlite: '@keyv/sqlite', postgresql: '@keyv/postgres', postgres: '@keyv/postgres', - mysql: '@keyv/mysql' + mysql: '@keyv/mysql', + etcd: '@keyv/etcd', + offline: '@keyv/offline', + tiered: '@keyv/tiered', }; - if (opts.adapter || opts.uri) { - const adapter = opts.adapter || /^[^:]*/.exec(opts.uri)[0]; - return new (require(adapters[adapter]))(opts); + if (options.adapter || options.uri) { + const adapter = options.adapter || /^[^:+]*/.exec(options.uri)[0]; + return new (require(adapters[adapter]))(options); } return new Map(); }; +const iterableAdapters = [ + 'sqlite', + 'postgres', + 'mysql', + 'mongo', + 'redis', + 'tiered', +]; + class Keyv extends EventEmitter { - constructor(uri, opts) { + constructor(uri, {emitErrors = true, ...options} = {}) { super(); - this.opts = Object.assign( - { - namespace: 'keyv', - serialize: JSONB.stringify, - deserialize: JSONB.parse - }, - (typeof uri === 'string') ? { uri } : uri, - opts - ); + this.opts = { + namespace: 'keyv', + serialize: JSONB.stringify, + deserialize: JSONB.parse, + ...((typeof uri === 'string') ? {uri} : uri), + ...options, + }; if (!this.opts.store) { - const adapterOpts = Object.assign({}, this.opts); - this.opts.store = loadStore(adapterOpts); + const adapterOptions = {...this.opts}; + this.opts.store = loadStore(adapterOptions); } - if (typeof this.opts.store.on === 'function') { - this.opts.store.on('error', err => this.emit('error', err)); + if (this.opts.compress) { + const brotli = compressBrotli(this.opts.compress.opts); + this.opts.serialize = async ({value, expires}) => brotli.serialize({value: await brotli.compress(value), expires}); + this.opts.deserialize = async data => { + const {value, expires} = brotli.deserialize(data); + return {value: await brotli.decompress(value), expires}; + }; + } + + if (typeof this.opts.store.on === 'function' && emitErrors) { + this.opts.store.on('error', error => this.emit('error', error)); } this.opts.store.namespace = this.opts.namespace; + + const generateIterator = iterator => async function * () { + for await (const [key, raw] of typeof iterator === 'function' + ? iterator(this.opts.store.namespace) + : iterator) { + const data = this.opts.deserialize(raw); + if (this.opts.store.namespace && !key.includes(this.opts.store.namespace)) { + continue; + } + + if (typeof data.expires === 'number' && Date.now() > data.expires) { + this.delete(key); + continue; + } + + yield [this._getKeyUnprefix(key), data.value]; + } + }; + + // Attach iterators + if (typeof this.opts.store[Symbol.iterator] === 'function' && this.opts.store instanceof Map) { + this.iterator = generateIterator(this.opts.store); + } else if (typeof this.opts.store.iterator === 'function' && this.opts.store.opts + && this._checkIterableAdaptar()) { + this.iterator = generateIterator(this.opts.store.iterator.bind(this.opts.store)); + } + } + + _checkIterableAdaptar() { + return iterableAdapters.includes(this.opts.store.opts.dialect) + || iterableAdapters.findIndex(element => this.opts.store.opts.url.includes(element)) >= 0; } _getKeyPrefix(key) { return `${this.opts.namespace}:${key}`; } - get(key, opts) { - const keyPrefixed = this._getKeyPrefix(key); - const { store } = this.opts; + _getKeyPrefixArray(keys) { + return keys.map(key => `${this.opts.namespace}:${key}`); + } + + _getKeyUnprefix(key) { + return key + .split(':') + .splice(1) + .join(':'); + } + + get(key, options) { + const {store} = this.opts; + const isArray = Array.isArray(key); + const keyPrefixed = isArray ? this._getKeyPrefixArray(key) : this._getKeyPrefix(key); + if (isArray && store.getMany === undefined) { + const promises = []; + for (const key of keyPrefixed) { + promises.push(Promise.resolve() + .then(() => store.get(key)) + .then(data => (typeof data === 'string') ? this.opts.deserialize(data) : data) + .then(data => { + if (data === undefined || data === null) { + return undefined; + } + + if (typeof data.expires === 'number' && Date.now() > data.expires) { + return this.delete(key).then(() => undefined); + } + + return (options && options.raw) ? data : data.value; + }), + ); + } + + return Promise.allSettled(promises) + .then(values => { + const data = []; + for (const value of values) { + data.push(value.value); + } + + return data.every(x => x === undefined) ? [] : data; + }); + } + return Promise.resolve() - .then(() => store.get(keyPrefixed)) + .then(() => isArray ? store.getMany(keyPrefixed) : store.get(keyPrefixed)) + .then(data => (typeof data === 'string') ? this.opts.deserialize(data) : data) .then(data => { - return (typeof data === 'string') ? this.opts.deserialize(data) : data; - }) - .then(data => { - if (data === undefined) { + if (data === undefined || data === null) { return undefined; } + if (isArray) { + const result = []; + + if (data.length === 0) { + return []; + } + + for (let row of data) { + if ((typeof row === 'string')) { + row = this.opts.deserialize(row); + } + + if (row === undefined || row === null) { + result.push(undefined); + continue; + } + + if (typeof row.expires === 'number' && Date.now() > row.expires) { + this.delete(key).then(() => undefined); + result.push(undefined); + } else { + result.push((options && options.raw) ? row : row.value); + } + } + + return result.every(x => x === undefined) ? [] : result; + } + if (typeof data.expires === 'number' && Date.now() > data.expires) { - this.delete(key); - return undefined; + return this.delete(key).then(() => undefined); } - return (opts && opts.raw) ? data : data.value; + return (options && options.raw) ? data : data.value; }); } @@ -20786,12 +19592,16 @@ class Keyv extends EventEmitter { ttl = undefined; } - const { store } = this.opts; + const {store} = this.opts; return Promise.resolve() .then(() => { const expires = (typeof ttl === 'number') ? (Date.now() + ttl) : null; - value = { value, expires }; + if (typeof value === 'symbol') { + this.emit('error', 'symbol cannot be serialized'); + } + + value = {value, expires}; return this.opts.serialize(value); }) .then(value => store.set(keyPrefixed, value, ttl)) @@ -20799,40 +19609,59 @@ class Keyv extends EventEmitter { } delete(key) { + const {store} = this.opts; + if (Array.isArray(key)) { + const keyPrefixed = this._getKeyPrefixArray(key); + if (store.deleteMany === undefined) { + const promises = []; + for (const key of keyPrefixed) { + promises.push(store.delete(key)); + } + + return Promise.allSettled(promises) + .then(values => values.every(x => x.value === true)); + } + + return Promise.resolve() + .then(() => store.deleteMany(keyPrefixed)); + } + const keyPrefixed = this._getKeyPrefix(key); - const { store } = this.opts; return Promise.resolve() .then(() => store.delete(keyPrefixed)); } clear() { - const { store } = this.opts; + const {store} = this.opts; return Promise.resolve() .then(() => store.clear()); } + + has(key) { + const keyPrefixed = this._getKeyPrefix(key); + const {store} = this.opts; + return Promise.resolve() + .then(async () => { + if (typeof store.has === 'function') { + return store.has(keyPrefixed); + } + + const value = await store.get(keyPrefixed); + return value !== undefined; + }); + } + + disconnect() { + const {store} = this.opts; + if (typeof store.disconnect === 'function') { + return store.disconnect(); + } + } } module.exports = Keyv; -/***/ }), - -/***/ 9662: -/***/ ((module) => { - -"use strict"; - -module.exports = object => { - const result = {}; - - for (const [key, value] of Object.entries(object)) { - result[key.toLowerCase()] = value; - } - - return result; -}; - - /***/ }), /***/ 2578: @@ -20847,7 +19676,7 @@ module.exports = object => { * Copyright (c) 2014-2020 Teambition * Licensed under the MIT license. */ -const Stream = __nccwpck_require__(2413) +const Stream = __nccwpck_require__(2781) const PassThrough = Stream.PassThrough const slice = Array.prototype.slice @@ -20985,6 +19814,481 @@ function pauseStreams (streams, options) { } +/***/ }), + +/***/ 6228: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const util = __nccwpck_require__(3837); +const braces = __nccwpck_require__(610); +const picomatch = __nccwpck_require__(8569); +const utils = __nccwpck_require__(479); +const isEmptyString = val => val === '' || val === './'; + +/** + * Returns an array of strings that match one or more glob patterns. + * + * ```js + * const mm = require('micromatch'); + * // mm(list, patterns[, options]); + * + * console.log(mm(['a.js', 'a.txt'], ['*.js'])); + * //=> [ 'a.js' ] + * ``` + * @param {String|Array} `list` List of strings to match. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) + * @return {Array} Returns an array of matches + * @summary false + * @api public + */ + +const micromatch = (list, patterns, options) => { + patterns = [].concat(patterns); + list = [].concat(list); + + let omit = new Set(); + let keep = new Set(); + let items = new Set(); + let negatives = 0; + + let onResult = state => { + items.add(state.output); + if (options && options.onResult) { + options.onResult(state); + } + }; + + for (let i = 0; i < patterns.length; i++) { + let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true); + let negated = isMatch.state.negated || isMatch.state.negatedExtglob; + if (negated) negatives++; + + for (let item of list) { + let matched = isMatch(item, true); + + let match = negated ? !matched.isMatch : matched.isMatch; + if (!match) continue; + + if (negated) { + omit.add(matched.output); + } else { + omit.delete(matched.output); + keep.add(matched.output); + } + } + } + + let result = negatives === patterns.length ? [...items] : [...keep]; + let matches = result.filter(item => !omit.has(item)); + + if (options && matches.length === 0) { + if (options.failglob === true) { + throw new Error(`No matches found for "${patterns.join(', ')}"`); + } + + if (options.nonull === true || options.nullglob === true) { + return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns; + } + } + + return matches; +}; + +/** + * Backwards compatibility + */ + +micromatch.match = micromatch; + +/** + * Returns a matcher function from the given glob `pattern` and `options`. + * The returned function takes a string to match as its only argument and returns + * true if the string is a match. + * + * ```js + * const mm = require('micromatch'); + * // mm.matcher(pattern[, options]); + * + * const isMatch = mm.matcher('*.!(*a)'); + * console.log(isMatch('a.a')); //=> false + * console.log(isMatch('a.b')); //=> true + * ``` + * @param {String} `pattern` Glob pattern + * @param {Object} `options` + * @return {Function} Returns a matcher function. + * @api public + */ + +micromatch.matcher = (pattern, options) => picomatch(pattern, options); + +/** + * Returns true if **any** of the given glob `patterns` match the specified `string`. + * + * ```js + * const mm = require('micromatch'); + * // mm.isMatch(string, patterns[, options]); + * + * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true + * console.log(mm.isMatch('a.a', 'b.*')); //=> false + * ``` + * @param {String} `str` The string to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `[options]` See available [options](#options). + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +micromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); + +/** + * Backwards compatibility + */ + +micromatch.any = micromatch.isMatch; + +/** + * Returns a list of strings that _**do not match any**_ of the given `patterns`. + * + * ```js + * const mm = require('micromatch'); + * // mm.not(list, patterns[, options]); + * + * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); + * //=> ['b.b', 'c.c'] + * ``` + * @param {Array} `list` Array of strings to match. + * @param {String|Array} `patterns` One or more glob pattern to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Array} Returns an array of strings that **do not match** the given patterns. + * @api public + */ + +micromatch.not = (list, patterns, options = {}) => { + patterns = [].concat(patterns).map(String); + let result = new Set(); + let items = []; + + let onResult = state => { + if (options.onResult) options.onResult(state); + items.push(state.output); + }; + + let matches = new Set(micromatch(list, patterns, { ...options, onResult })); + + for (let item of items) { + if (!matches.has(item)) { + result.add(item); + } + } + return [...result]; +}; + +/** + * Returns true if the given `string` contains the given pattern. Similar + * to [.isMatch](#isMatch) but the pattern can match any part of the string. + * + * ```js + * var mm = require('micromatch'); + * // mm.contains(string, pattern[, options]); + * + * console.log(mm.contains('aa/bb/cc', '*b')); + * //=> true + * console.log(mm.contains('aa/bb/cc', '*d')); + * //=> false + * ``` + * @param {String} `str` The string to match. + * @param {String|Array} `patterns` Glob pattern to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any of the patterns matches any part of `str`. + * @api public + */ + +micromatch.contains = (str, pattern, options) => { + if (typeof str !== 'string') { + throw new TypeError(`Expected a string: "${util.inspect(str)}"`); + } + + if (Array.isArray(pattern)) { + return pattern.some(p => micromatch.contains(str, p, options)); + } + + if (typeof pattern === 'string') { + if (isEmptyString(str) || isEmptyString(pattern)) { + return false; + } + + if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) { + return true; + } + } + + return micromatch.isMatch(str, pattern, { ...options, contains: true }); +}; + +/** + * Filter the keys of the given object with the given `glob` pattern + * and `options`. Does not attempt to match nested keys. If you need this feature, + * use [glob-object][] instead. + * + * ```js + * const mm = require('micromatch'); + * // mm.matchKeys(object, patterns[, options]); + * + * const obj = { aa: 'a', ab: 'b', ac: 'c' }; + * console.log(mm.matchKeys(obj, '*b')); + * //=> { ab: 'b' } + * ``` + * @param {Object} `object` The object with keys to filter. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Object} Returns an object with only keys that match the given patterns. + * @api public + */ + +micromatch.matchKeys = (obj, patterns, options) => { + if (!utils.isObject(obj)) { + throw new TypeError('Expected the first argument to be an object'); + } + let keys = micromatch(Object.keys(obj), patterns, options); + let res = {}; + for (let key of keys) res[key] = obj[key]; + return res; +}; + +/** + * Returns true if some of the strings in the given `list` match any of the given glob `patterns`. + * + * ```js + * const mm = require('micromatch'); + * // mm.some(list, patterns[, options]); + * + * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); + * // true + * console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); + * // false + * ``` + * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any `patterns` matches any of the strings in `list` + * @api public + */ + +micromatch.some = (list, patterns, options) => { + let items = [].concat(list); + + for (let pattern of [].concat(patterns)) { + let isMatch = picomatch(String(pattern), options); + if (items.some(item => isMatch(item))) { + return true; + } + } + return false; +}; + +/** + * Returns true if every string in the given `list` matches + * any of the given glob `patterns`. + * + * ```js + * const mm = require('micromatch'); + * // mm.every(list, patterns[, options]); + * + * console.log(mm.every('foo.js', ['foo.js'])); + * // true + * console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); + * // true + * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); + * // false + * console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); + * // false + * ``` + * @param {String|Array} `list` The string or array of strings to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if all `patterns` matches all of the strings in `list` + * @api public + */ + +micromatch.every = (list, patterns, options) => { + let items = [].concat(list); + + for (let pattern of [].concat(patterns)) { + let isMatch = picomatch(String(pattern), options); + if (!items.every(item => isMatch(item))) { + return false; + } + } + return true; +}; + +/** + * Returns true if **all** of the given `patterns` match + * the specified string. + * + * ```js + * const mm = require('micromatch'); + * // mm.all(string, patterns[, options]); + * + * console.log(mm.all('foo.js', ['foo.js'])); + * // true + * + * console.log(mm.all('foo.js', ['*.js', '!foo.js'])); + * // false + * + * console.log(mm.all('foo.js', ['*.js', 'foo.js'])); + * // true + * + * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); + * // true + * ``` + * @param {String|Array} `str` The string to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +micromatch.all = (str, patterns, options) => { + if (typeof str !== 'string') { + throw new TypeError(`Expected a string: "${util.inspect(str)}"`); + } + + return [].concat(patterns).every(p => picomatch(p, options)(str)); +}; + +/** + * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match. + * + * ```js + * const mm = require('micromatch'); + * // mm.capture(pattern, string[, options]); + * + * console.log(mm.capture('test/*.js', 'test/foo.js')); + * //=> ['foo'] + * console.log(mm.capture('test/*.js', 'foo/bar.css')); + * //=> null + * ``` + * @param {String} `glob` Glob pattern to use for matching. + * @param {String} `input` String to match + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Array|null} Returns an array of captures if the input matches the glob pattern, otherwise `null`. + * @api public + */ + +micromatch.capture = (glob, input, options) => { + let posix = utils.isWindows(options); + let regex = picomatch.makeRe(String(glob), { ...options, capture: true }); + let match = regex.exec(posix ? utils.toPosixSlashes(input) : input); + + if (match) { + return match.slice(1).map(v => v === void 0 ? '' : v); + } +}; + +/** + * Create a regular expression from the given glob `pattern`. + * + * ```js + * const mm = require('micromatch'); + * // mm.makeRe(pattern[, options]); + * + * console.log(mm.makeRe('*.js')); + * //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ + * ``` + * @param {String} `pattern` A glob pattern to convert to regex. + * @param {Object} `options` + * @return {RegExp} Returns a regex created from the given pattern. + * @api public + */ + +micromatch.makeRe = (...args) => picomatch.makeRe(...args); + +/** + * Scan a glob pattern to separate the pattern into segments. Used + * by the [split](#split) method. + * + * ```js + * const mm = require('micromatch'); + * const state = mm.scan(pattern[, options]); + * ``` + * @param {String} `pattern` + * @param {Object} `options` + * @return {Object} Returns an object with + * @api public + */ + +micromatch.scan = (...args) => picomatch.scan(...args); + +/** + * Parse a glob pattern to create the source string for a regular + * expression. + * + * ```js + * const mm = require('micromatch'); + * const state = mm.parse(pattern[, options]); + * ``` + * @param {String} `glob` + * @param {Object} `options` + * @return {Object} Returns an object with useful properties and output to be used as regex source string. + * @api public + */ + +micromatch.parse = (patterns, options) => { + let res = []; + for (let pattern of [].concat(patterns || [])) { + for (let str of braces(String(pattern), options)) { + res.push(picomatch.parse(str, options)); + } + } + return res; +}; + +/** + * Process the given brace `pattern`. + * + * ```js + * const { braces } = require('micromatch'); + * console.log(braces('foo/{a,b,c}/bar')); + * //=> [ 'foo/(a|b|c)/bar' ] + * + * console.log(braces('foo/{a,b,c}/bar', { expand: true })); + * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ] + * ``` + * @param {String} `pattern` String with brace pattern to process. + * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options. + * @return {Array} + * @api public + */ + +micromatch.braces = (pattern, options) => { + if (typeof pattern !== 'string') throw new TypeError('Expected a string'); + if ((options && options.nobrace === true) || !/\{.*\}/.test(pattern)) { + return [pattern]; + } + return braces(pattern, options); +}; + +/** + * Expand braces + */ + +micromatch.braceExpand = (pattern, options) => { + if (typeof pattern !== 'string') throw new TypeError('Expected a string'); + return micromatch.braces(pattern, { ...options, expand: true }); +}; + +/** + * Expose micromatch + */ + +module.exports = micromatch; + + /***/ }), /***/ 2610: @@ -21037,11 +20341,12 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } -var Stream = _interopDefault(__nccwpck_require__(2413)); -var http = _interopDefault(__nccwpck_require__(8605)); -var Url = _interopDefault(__nccwpck_require__(8835)); -var https = _interopDefault(__nccwpck_require__(7211)); -var zlib = _interopDefault(__nccwpck_require__(8761)); +var Stream = _interopDefault(__nccwpck_require__(2781)); +var http = _interopDefault(__nccwpck_require__(3685)); +var Url = _interopDefault(__nccwpck_require__(7310)); +var whatwgUrl = _interopDefault(__nccwpck_require__(8665)); +var https = _interopDefault(__nccwpck_require__(5687)); +var zlib = _interopDefault(__nccwpck_require__(9796)); // Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js @@ -21192,7 +20497,7 @@ FetchError.prototype.name = 'FetchError'; let convert; try { - convert = __nccwpck_require__(2877).convert; + convert = (__nccwpck_require__(2877).convert); } catch (e) {} const INTERNALS = Symbol('Body internals'); @@ -22174,11 +21479,32 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, { }); const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; // fix an issue where "format", "parse" aren't a named export for node <10 const parse_url = Url.parse; const format_url = Url.format; +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; /** @@ -22215,14 +21541,14 @@ class Request { // in order to support Node.js' Url objects; though WHATWG's URL objects // will fall into this branch also (since their `toString()` will return // `href` property anyway) - parsedURL = parse_url(input.href); + parsedURL = parseURL(input.href); } else { // coerce input to a string before attempting to parse - parsedURL = parse_url(`${input}`); + parsedURL = parseURL(`${input}`); } input = {}; } else { - parsedURL = parse_url(input.url); + parsedURL = parseURL(input.url); } let method = init.method || input.method || 'GET'; @@ -22416,9 +21742,17 @@ AbortError.prototype = Object.create(Error.prototype); AbortError.prototype.constructor = AbortError; AbortError.prototype.name = 'AbortError'; +const URL$1 = Url.URL || whatwgUrl.URL; + // fix an issue where "PassThrough", "resolve" aren't a named export for node <10 const PassThrough$1 = Stream.PassThrough; -const resolve_url = Url.resolve; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; /** * Fetch function @@ -22506,7 +21840,19 @@ function fetch(url, opts) { const location = headers.get('Location'); // HTTP fetch step 5.3 - const locationURL = location === null ? null : resolve_url(request.url, location); + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } // HTTP fetch step 5.5 switch (request.redirect) { @@ -22554,6 +21900,12 @@ function fetch(url, opts) { size: request.size }; + if (!isDomainOrSubdomain(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + // HTTP-redirect fetch step 9 if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); @@ -22675,7 +22027,7 @@ fetch.Promise = global.Promise; module.exports = exports = fetch; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.default = exports; +exports["default"] = exports; exports.Headers = Headers; exports.Request = Request; exports.Response = Response; @@ -22685,12 +22037,10 @@ exports.FetchError = FetchError; /***/ }), /***/ 7952: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ ((module) => { "use strict"; -// TODO: Use the `URL` global when targeting Node.js 10 -const URLParser = typeof URL === 'undefined' ? __nccwpck_require__(8835).URL : URL; // https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs const DATA_URL_DEFAULT_MIME_TYPE = 'text/plain'; @@ -22701,21 +22051,20 @@ const testParameter = (name, filters) => { }; const normalizeDataURL = (urlString, {stripHash}) => { - const parts = urlString.match(/^data:(.*?),(.*?)(?:#(.*))?$/); + const match = /^data:(?[^,]*?),(?[^#]*?)(?:#(?.*))?$/.exec(urlString); - if (!parts) { + if (!match) { throw new Error(`Invalid URL: ${urlString}`); } - const mediaType = parts[1].split(';'); - const body = parts[2]; - const hash = stripHash ? '' : parts[3]; - - let base64 = false; + let {type, data, hash} = match.groups; + const mediaType = type.split(';'); + hash = stripHash ? '' : hash; + let isBase64 = false; if (mediaType[mediaType.length - 1] === 'base64') { mediaType.pop(); - base64 = true; + isBase64 = true; } // Lowercase MIME type @@ -22741,7 +22090,7 @@ const normalizeDataURL = (urlString, {stripHash}) => { ...attributes ]; - if (base64) { + if (isBase64) { normalizedMediaType.push('base64'); } @@ -22749,7 +22098,7 @@ const normalizeDataURL = (urlString, {stripHash}) => { normalizedMediaType.unshift(mimeType); } - return `data:${normalizedMediaType.join(';')},${base64 ? body.trim() : body}${hash ? `#${hash}` : ''}`; + return `data:${normalizedMediaType.join(';')},${isBase64 ? data.trim() : data}${hash ? `#${hash}` : ''}`; }; const normalizeUrl = (urlString, options) => { @@ -22760,27 +22109,16 @@ const normalizeUrl = (urlString, options) => { forceHttps: false, stripAuthentication: true, stripHash: false, + stripTextFragment: true, stripWWW: true, removeQueryParameters: [/^utm_\w+/i], removeTrailingSlash: true, + removeSingleSlash: true, removeDirectoryIndex: false, sortQueryParameters: true, ...options }; - // TODO: Remove this at some point in the future - if (Reflect.has(options, 'normalizeHttps')) { - throw new Error('options.normalizeHttps is renamed to options.forceHttp'); - } - - if (Reflect.has(options, 'normalizeHttp')) { - throw new Error('options.normalizeHttp is renamed to options.forceHttps'); - } - - if (Reflect.has(options, 'stripFragment')) { - throw new Error('options.stripFragment is renamed to options.stripHash'); - } - urlString = urlString.trim(); // Data URL @@ -22788,6 +22126,10 @@ const normalizeUrl = (urlString, options) => { return normalizeDataURL(urlString, options); } + if (/^view-source:/i.test(urlString)) { + throw new Error('`view-source:` is not supported as it is a non-standard protocol'); + } + const hasRelativeProtocol = urlString.startsWith('//'); const isRelativeUrl = !hasRelativeProtocol && /^\.*\//.test(urlString); @@ -22796,7 +22138,7 @@ const normalizeUrl = (urlString, options) => { urlString = urlString.replace(/^(?!(?:\w+:)?\/\/)|^\/\//, options.defaultProtocol); } - const urlObj = new URLParser(urlString); + const urlObj = new URL(urlString); if (options.forceHttp && options.forceHttps) { throw new Error('The `forceHttp` and `forceHttps` options cannot be used together'); @@ -22819,24 +22161,20 @@ const normalizeUrl = (urlString, options) => { // Remove hash if (options.stripHash) { urlObj.hash = ''; + } else if (options.stripTextFragment) { + urlObj.hash = urlObj.hash.replace(/#?:~:text.*?$/i, ''); } // Remove duplicate slashes if not preceded by a protocol if (urlObj.pathname) { - // TODO: Use the following instead when targeting Node.js 10 - // `urlObj.pathname = urlObj.pathname.replace(/(? { - if (/^(?!\/)/g.test(p1)) { - return `${p1}/`; - } - - return '/'; - }); + urlObj.pathname = urlObj.pathname.replace(/(? { urlObj.hostname = urlObj.hostname.replace(/\.$/, ''); // Remove `www.` - if (options.stripWWW && /^www\.([a-z\-\d]{2,63})\.([a-z.]{2,5})$/.test(urlObj.hostname)) { - // Each label should be max 63 at length (min: 2). - // The extension should be max 5 at length (min: 2). + if (options.stripWWW && /^www\.(?!www\.)(?:[a-z\-\d]{1,63})\.(?:[a-z.\-\d]{2,63})$/.test(urlObj.hostname)) { + // Each label should be max 63 at length (min: 1). // Source: https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names + // Each TLD should be up to 63 characters long (min: 2). + // It is technically possible to have a single character TLD, but none currently exist. urlObj.hostname = urlObj.hostname.replace(/^www\./, ''); } } @@ -22876,6 +22215,10 @@ const normalizeUrl = (urlString, options) => { } } + if (options.removeQueryParameters === true) { + urlObj.search = ''; + } + // Sort query parameters if (options.sortQueryParameters) { urlObj.searchParams.sort(); @@ -22885,11 +22228,17 @@ const normalizeUrl = (urlString, options) => { urlObj.pathname = urlObj.pathname.replace(/\/$/, ''); } + const oldUrlString = urlString; + // Take advantage of many of the Node `url` normalizations urlString = urlObj.toString(); - // Remove ending `/` - if ((options.removeTrailingSlash || urlObj.pathname === '/') && urlObj.hash === '') { + if (!options.removeSingleSlash && urlObj.pathname === '/' && !oldUrlString.endsWith('/') && urlObj.hash === '') { + urlString = urlString.replace(/\/$/, ''); + } + + // Remove ending `/` unless removeSingleSlash is false + if ((options.removeTrailingSlash || urlObj.pathname === '/') && urlObj.hash === '' && options.removeSingleSlash) { urlString = urlString.replace(/\/$/, ''); } @@ -22907,8 +22256,6 @@ const normalizeUrl = (urlString, options) => { }; module.exports = normalizeUrl; -// TODO: Remove this for the next major release -module.exports.default = normalizeUrl; /***/ }), @@ -22960,121 +22307,6 @@ function onceStrict (fn) { } -/***/ }), - -/***/ 9072: -/***/ ((module) => { - -"use strict"; - - -class CancelError extends Error { - constructor(reason) { - super(reason || 'Promise was canceled'); - this.name = 'CancelError'; - } - - get isCanceled() { - return true; - } -} - -class PCancelable { - static fn(userFn) { - return (...arguments_) => { - return new PCancelable((resolve, reject, onCancel) => { - arguments_.push(onCancel); - // eslint-disable-next-line promise/prefer-await-to-then - userFn(...arguments_).then(resolve, reject); - }); - }; - } - - constructor(executor) { - this._cancelHandlers = []; - this._isPending = true; - this._isCanceled = false; - this._rejectOnCancel = true; - - this._promise = new Promise((resolve, reject) => { - this._reject = reject; - - const onResolve = value => { - this._isPending = false; - resolve(value); - }; - - const onReject = error => { - this._isPending = false; - reject(error); - }; - - const onCancel = handler => { - if (!this._isPending) { - throw new Error('The `onCancel` handler was attached after the promise settled.'); - } - - this._cancelHandlers.push(handler); - }; - - Object.defineProperties(onCancel, { - shouldReject: { - get: () => this._rejectOnCancel, - set: boolean => { - this._rejectOnCancel = boolean; - } - } - }); - - return executor(onResolve, onReject, onCancel); - }); - } - - then(onFulfilled, onRejected) { - // eslint-disable-next-line promise/prefer-await-to-then - return this._promise.then(onFulfilled, onRejected); - } - - catch(onRejected) { - return this._promise.catch(onRejected); - } - - finally(onFinally) { - return this._promise.finally(onFinally); - } - - cancel(reason) { - if (!this._isPending || this._isCanceled) { - return; - } - - if (this._cancelHandlers.length > 0) { - try { - for (const handler of this._cancelHandlers) { - handler(); - } - } catch (error) { - this._reject(error); - } - } - - this._isCanceled = true; - if (this._rejectOnCancel) { - this._reject(new CancelError(reason)); - } - } - - get isCanceled() { - return this._isCanceled; - } -} - -Object.setPrototypeOf(PCancelable.prototype, Promise.prototype); - -module.exports = PCancelable; -module.exports.CancelError = CancelError; - - /***/ }), /***/ 8569: @@ -23094,7 +22326,7 @@ module.exports = __nccwpck_require__(3322); "use strict"; -const path = __nccwpck_require__(5622); +const path = __nccwpck_require__(1017); const WIN_SLASH = '\\\\/'; const WIN_NO_SLASH = `[^${WIN_SLASH}]`; @@ -23373,7 +22605,7 @@ const parse = (input, options) => { START_ANCHOR } = PLATFORM_CHARS; - const globstar = (opts) => { + const globstar = opts => { return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; }; @@ -23423,12 +22655,13 @@ const parse = (input, options) => { const eos = () => state.index === len - 1; const peek = state.peek = (n = 1) => input[state.index + n]; - const advance = state.advance = () => input[++state.index]; + const advance = state.advance = () => input[++state.index] || ''; const remaining = () => input.slice(state.index + 1); const consume = (value = '', num = 0) => { state.consumed += value; state.index += num; }; + const append = token => { state.output += token.output != null ? token.output : token.value; consume(token.value); @@ -23484,7 +22717,7 @@ const parse = (input, options) => { } } - if (extglobs.length && tok.type !== 'paren' && !EXTGLOB_CHARS[tok.value]) { + if (extglobs.length && tok.type !== 'paren') { extglobs[extglobs.length - 1].inner += tok.value; } @@ -23516,6 +22749,7 @@ const parse = (input, options) => { const extglobClose = token => { let output = token.close + (opts.capture ? ')' : ''); + let rest; if (token.type === 'negate') { let extglobStar = star; @@ -23528,7 +22762,18 @@ const parse = (input, options) => { output = token.close = `)$))${extglobStar}`; } - if (token.prev.type === 'bos' && eos()) { + if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) { + // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis. + // In this case, we need to parse the string and use it in the output of the original pattern. + // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`. + // + // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`. + const expression = parse(rest, { ...options, fastpaths: false }).output; + + output = token.close = `)${expression})${extglobStar})`; + } + + if (token.prev.type === 'bos') { state.negatedExtglob = true; } } @@ -23637,9 +22882,9 @@ const parse = (input, options) => { } if (opts.unescape === true) { - value = advance() || ''; + value = advance(); } else { - value += advance() || ''; + value += advance(); } if (state.brackets === 0) { @@ -24303,7 +23548,7 @@ parse.fastpaths = (input, options) => { star = `(${star})`; } - const globstar = (opts) => { + const globstar = opts => { if (opts.noglobstar === true) return star; return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; }; @@ -24367,7 +23612,7 @@ module.exports = parse; "use strict"; -const path = __nccwpck_require__(5622); +const path = __nccwpck_require__(1017); const scan = __nccwpck_require__(2429); const parse = __nccwpck_require__(2139); const utils = __nccwpck_require__(479); @@ -24597,6 +23842,40 @@ picomatch.parse = (pattern, options) => { picomatch.scan = (input, options) => scan(input, options); +/** + * Compile a regular expression from the `state` object returned by the + * [parse()](#parse) method. + * + * @param {Object} `state` + * @param {Object} `options` + * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser. + * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. + * @return {RegExp} + * @api public + */ + +picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => { + if (returnOutput === true) { + return state.output; + } + + const opts = options || {}; + const prepend = opts.contains ? '' : '^'; + const append = opts.contains ? '' : '$'; + + let source = `${prepend}(?:${state.output})${append}`; + if (state && state.negated === true) { + source = `^(?!${source}).*$`; + } + + const regex = picomatch.toRegex(source, options); + if (returnState === true) { + regex.state = state; + } + + return regex; +}; + /** * Create a regular expression from a parsed glob pattern. * @@ -24610,56 +23889,25 @@ picomatch.scan = (input, options) => scan(input, options); * ``` * @param {String} `state` The object returned from the `.parse` method. * @param {Object} `options` + * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. + * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression. * @return {RegExp} Returns a regex created from the given pattern. * @api public */ -picomatch.compileRe = (parsed, options, returnOutput = false, returnState = false) => { - if (returnOutput === true) { - return parsed.output; - } - - const opts = options || {}; - const prepend = opts.contains ? '' : '^'; - const append = opts.contains ? '' : '$'; - - let source = `${prepend}(?:${parsed.output})${append}`; - if (parsed && parsed.negated === true) { - source = `^(?!${source}).*$`; - } - - const regex = picomatch.toRegex(source, options); - if (returnState === true) { - regex.state = parsed; - } - - return regex; -}; - -picomatch.makeRe = (input, options, returnOutput = false, returnState = false) => { +picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => { if (!input || typeof input !== 'string') { throw new TypeError('Expected a non-empty string'); } - const opts = options || {}; let parsed = { negated: false, fastpaths: true }; - let prefix = ''; - let output; - if (input.startsWith('./')) { - input = input.slice(2); - prefix = parsed.prefix = './'; + if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { + parsed.output = parse.fastpaths(input, options); } - if (opts.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { - output = parse.fastpaths(input, options); - } - - if (output === undefined) { + if (!parsed.output) { parsed = parse(input, options); - parsed.prefix = prefix + (parsed.prefix || ''); - } else { - parsed.output = output; } return picomatch.compileRe(parsed, options, returnOutput, returnState); @@ -24746,7 +23994,8 @@ const depth = token => { /** * Quickly scans a glob pattern and returns an object with a handful of * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), - * `glob` (the actual pattern), and `negated` (true if the path starts with `!`). + * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not + * with `!(`) and `negatedExtglob` (true if the path starts with `!(`). * * ```js * const pm = require('picomatch'); @@ -24780,6 +24029,7 @@ const scan = (input, options) => { let braceEscaped = false; let backslashes = false; let negated = false; + let negatedExtglob = false; let finished = false; let braces = 0; let prev; @@ -24891,6 +24141,9 @@ const scan = (input, options) => { isGlob = token.isGlob = true; isExtglob = token.isExtglob = true; finished = true; + if (code === CHAR_EXCLAMATION_MARK && index === start) { + negatedExtglob = true; + } if (scanToEnd === true) { while (eos() !== true && (code = advance())) { @@ -24945,13 +24198,15 @@ const scan = (input, options) => { isBracket = token.isBracket = true; isGlob = token.isGlob = true; finished = true; - - if (scanToEnd === true) { - continue; - } break; } } + + if (scanToEnd === true) { + continue; + } + + break; } if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) { @@ -25042,7 +24297,8 @@ const scan = (input, options) => { isGlob, isExtglob, isGlobstar, - negated + negated, + negatedExtglob }; if (opts.tokens === true) { @@ -25105,7 +24361,7 @@ module.exports = scan; "use strict"; -const path = __nccwpck_require__(5622); +const path = __nccwpck_require__(1017); const win32 = process.platform === 'win32'; const { REGEX_BACKSLASH, @@ -25176,7 +24432,7 @@ exports.wrapOutput = (input, state = {}, options = {}) => { var once = __nccwpck_require__(1223) var eos = __nccwpck_require__(1205) -var fs = __nccwpck_require__(5747) // we only need fs to get the ReadStream and WriteStream prototypes +var fs = __nccwpck_require__(7147) // we only need fs to get the ReadStream and WriteStream prototypes var noop = function () {} var ancient = /^v?\.0/.test(process.version) @@ -25258,6 +24514,22 @@ var pump = function () { module.exports = pump +/***/ }), + +/***/ 9795: +/***/ ((module) => { + +/*! queue-microtask. MIT License. Feross Aboukhadijeh */ +let promise + +module.exports = typeof queueMicrotask === 'function' + ? queueMicrotask.bind(typeof window !== 'undefined' ? window : global) + // reuse resolved promise, and allocate it lazily + : cb => (promise || (promise = Promise.resolve())) + .then(cb) + .catch(err => setTimeout(() => { throw err }, 0)) + + /***/ }), /***/ 9273: @@ -25396,23 +24668,68 @@ module.exports = QuickLRU; "use strict"; -const tls = __nccwpck_require__(4016); +const tls = __nccwpck_require__(4404); + +module.exports = (options = {}, connect = tls.connect) => new Promise((resolve, reject) => { + let timeout = false; + + let socket; + + const callback = async () => { + await socketPromise; + + socket.off('timeout', onTimeout); + socket.off('error', reject); -module.exports = (options = {}) => new Promise((resolve, reject) => { - const socket = tls.connect(options, () => { if (options.resolveSocket) { - socket.off('error', reject); - resolve({alpnProtocol: socket.alpnProtocol, socket}); + resolve({alpnProtocol: socket.alpnProtocol, socket, timeout}); + + if (timeout) { + await Promise.resolve(); + socket.emit('timeout'); + } } else { socket.destroy(); - resolve({alpnProtocol: socket.alpnProtocol}); + resolve({alpnProtocol: socket.alpnProtocol, timeout}); } - }); + }; - socket.on('error', reject); + const onTimeout = async () => { + timeout = true; + callback(); + }; + + const socketPromise = (async () => { + try { + socket = await connect(options, callback); + + socket.on('error', reject); + socket.once('timeout', onTimeout); + } catch (error) { + reject(error); + } + })(); }); +/***/ }), + +/***/ 2075: +/***/ ((module) => { + +"use strict"; + +module.exports = object => { + const result = {}; + + for (const [key, value] of Object.entries(object)) { + result[key.toLowerCase()] = value; + } + + return result; +}; + + /***/ }), /***/ 9004: @@ -25421,8 +24738,8 @@ module.exports = (options = {}) => new Promise((resolve, reject) => { "use strict"; -const Readable = __nccwpck_require__(2413).Readable; -const lowercaseKeys = __nccwpck_require__(9662); +const Readable = (__nccwpck_require__(2781).Readable); +const lowercaseKeys = __nccwpck_require__(2075); class Response extends Readable { constructor(statusCode, headers, body, url) { @@ -25499,14 +24816,16 @@ module.exports = reusify /***/ }), /***/ 5288: -/***/ ((module) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /*! run-parallel. MIT License. Feross Aboukhadijeh */ module.exports = runParallel +const queueMicrotask = __nccwpck_require__(9795) + function runParallel (tasks, cb) { - var results, pending, keys - var isSync = true + let results, pending, keys + let isSync = true if (Array.isArray(tasks)) { results = [] @@ -25522,7 +24841,7 @@ function runParallel (tasks, cb) { if (cb) cb(err, results) cb = null } - if (isSync) process.nextTick(end) + if (isSync) queueMicrotask(end) else end() } @@ -25719,7 +25038,7 @@ function runParallel (tasks, cb) { var Stream try { - Stream = __nccwpck_require__(2413).Stream + Stream = (__nccwpck_require__(2781).Stream) } catch (ex) { Stream = function () {} } @@ -25789,7 +25108,7 @@ function runParallel (tasks, cb) { typeof Buffer.isBuffer === 'function' && Buffer.isBuffer(data)) { if (!this._decoder) { - var SD = __nccwpck_require__(4304).StringDecoder + var SD = (__nccwpck_require__(1576).StringDecoder) this._decoder = new SD('utf8') } data = this._decoder.write(data) @@ -27124,6 +26443,503 @@ function runParallel (tasks, cb) { })( false ? 0 : exports) +/***/ }), + +/***/ 1861: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/*! + * to-regex-range + * + * Copyright (c) 2015-present, Jon Schlinkert. + * Released under the MIT License. + */ + + + +const isNumber = __nccwpck_require__(5680); + +const toRegexRange = (min, max, options) => { + if (isNumber(min) === false) { + throw new TypeError('toRegexRange: expected the first argument to be a number'); + } + + if (max === void 0 || min === max) { + return String(min); + } + + if (isNumber(max) === false) { + throw new TypeError('toRegexRange: expected the second argument to be a number.'); + } + + let opts = { relaxZeros: true, ...options }; + if (typeof opts.strictZeros === 'boolean') { + opts.relaxZeros = opts.strictZeros === false; + } + + let relax = String(opts.relaxZeros); + let shorthand = String(opts.shorthand); + let capture = String(opts.capture); + let wrap = String(opts.wrap); + let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap; + + if (toRegexRange.cache.hasOwnProperty(cacheKey)) { + return toRegexRange.cache[cacheKey].result; + } + + let a = Math.min(min, max); + let b = Math.max(min, max); + + if (Math.abs(a - b) === 1) { + let result = min + '|' + max; + if (opts.capture) { + return `(${result})`; + } + if (opts.wrap === false) { + return result; + } + return `(?:${result})`; + } + + let isPadded = hasPadding(min) || hasPadding(max); + let state = { min, max, a, b }; + let positives = []; + let negatives = []; + + if (isPadded) { + state.isPadded = isPadded; + state.maxLen = String(state.max).length; + } + + if (a < 0) { + let newMin = b < 0 ? Math.abs(b) : 1; + negatives = splitToPatterns(newMin, Math.abs(a), state, opts); + a = state.a = 0; + } + + if (b >= 0) { + positives = splitToPatterns(a, b, state, opts); + } + + state.negatives = negatives; + state.positives = positives; + state.result = collatePatterns(negatives, positives, opts); + + if (opts.capture === true) { + state.result = `(${state.result})`; + } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) { + state.result = `(?:${state.result})`; + } + + toRegexRange.cache[cacheKey] = state; + return state.result; +}; + +function collatePatterns(neg, pos, options) { + let onlyNegative = filterPatterns(neg, pos, '-', false, options) || []; + let onlyPositive = filterPatterns(pos, neg, '', false, options) || []; + let intersected = filterPatterns(neg, pos, '-?', true, options) || []; + let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive); + return subpatterns.join('|'); +} + +function splitToRanges(min, max) { + let nines = 1; + let zeros = 1; + + let stop = countNines(min, nines); + let stops = new Set([max]); + + while (min <= stop && stop <= max) { + stops.add(stop); + nines += 1; + stop = countNines(min, nines); + } + + stop = countZeros(max + 1, zeros) - 1; + + while (min < stop && stop <= max) { + stops.add(stop); + zeros += 1; + stop = countZeros(max + 1, zeros) - 1; + } + + stops = [...stops]; + stops.sort(compare); + return stops; +} + +/** + * Convert a range to a regex pattern + * @param {Number} `start` + * @param {Number} `stop` + * @return {String} + */ + +function rangeToPattern(start, stop, options) { + if (start === stop) { + return { pattern: start, count: [], digits: 0 }; + } + + let zipped = zip(start, stop); + let digits = zipped.length; + let pattern = ''; + let count = 0; + + for (let i = 0; i < digits; i++) { + let [startDigit, stopDigit] = zipped[i]; + + if (startDigit === stopDigit) { + pattern += startDigit; + + } else if (startDigit !== '0' || stopDigit !== '9') { + pattern += toCharacterClass(startDigit, stopDigit, options); + + } else { + count++; + } + } + + if (count) { + pattern += options.shorthand === true ? '\\d' : '[0-9]'; + } + + return { pattern, count: [count], digits }; +} + +function splitToPatterns(min, max, tok, options) { + let ranges = splitToRanges(min, max); + let tokens = []; + let start = min; + let prev; + + for (let i = 0; i < ranges.length; i++) { + let max = ranges[i]; + let obj = rangeToPattern(String(start), String(max), options); + let zeros = ''; + + if (!tok.isPadded && prev && prev.pattern === obj.pattern) { + if (prev.count.length > 1) { + prev.count.pop(); + } + + prev.count.push(obj.count[0]); + prev.string = prev.pattern + toQuantifier(prev.count); + start = max + 1; + continue; + } + + if (tok.isPadded) { + zeros = padZeros(max, tok, options); + } + + obj.string = zeros + obj.pattern + toQuantifier(obj.count); + tokens.push(obj); + start = max + 1; + prev = obj; + } + + return tokens; +} + +function filterPatterns(arr, comparison, prefix, intersection, options) { + let result = []; + + for (let ele of arr) { + let { string } = ele; + + // only push if _both_ are negative... + if (!intersection && !contains(comparison, 'string', string)) { + result.push(prefix + string); + } + + // or _both_ are positive + if (intersection && contains(comparison, 'string', string)) { + result.push(prefix + string); + } + } + return result; +} + +/** + * Zip strings + */ + +function zip(a, b) { + let arr = []; + for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]); + return arr; +} + +function compare(a, b) { + return a > b ? 1 : b > a ? -1 : 0; +} + +function contains(arr, key, val) { + return arr.some(ele => ele[key] === val); +} + +function countNines(min, len) { + return Number(String(min).slice(0, -len) + '9'.repeat(len)); +} + +function countZeros(integer, zeros) { + return integer - (integer % Math.pow(10, zeros)); +} + +function toQuantifier(digits) { + let [start = 0, stop = ''] = digits; + if (stop || start > 1) { + return `{${start + (stop ? ',' + stop : '')}}`; + } + return ''; +} + +function toCharacterClass(a, b, options) { + return `[${a}${(b - a === 1) ? '' : '-'}${b}]`; +} + +function hasPadding(str) { + return /^-?(0+)\d/.test(str); +} + +function padZeros(value, tok, options) { + if (!tok.isPadded) { + return value; + } + + let diff = Math.abs(tok.maxLen - String(value).length); + let relax = options.relaxZeros !== false; + + switch (diff) { + case 0: + return ''; + case 1: + return relax ? '0?' : '0'; + case 2: + return relax ? '0{0,2}' : '00'; + default: { + return relax ? `0{0,${diff}}` : `0{${diff}}`; + } + } +} + +/** + * Cache + */ + +toRegexRange.cache = {}; +toRegexRange.clearCache = () => (toRegexRange.cache = {}); + +/** + * Expose `toRegexRange` + */ + +module.exports = toRegexRange; + + +/***/ }), + +/***/ 4256: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var punycode = __nccwpck_require__(5477); +var mappingTable = __nccwpck_require__(2020); + +var PROCESSING_OPTIONS = { + TRANSITIONAL: 0, + NONTRANSITIONAL: 1 +}; + +function normalize(str) { // fix bug in v8 + return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000'); +} + +function findStatus(val) { + var start = 0; + var end = mappingTable.length - 1; + + while (start <= end) { + var mid = Math.floor((start + end) / 2); + + var target = mappingTable[mid]; + if (target[0][0] <= val && target[0][1] >= val) { + return target; + } else if (target[0][0] > val) { + end = mid - 1; + } else { + start = mid + 1; + } + } + + return null; +} + +var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g; + +function countSymbols(string) { + return string + // replace every surrogate pair with a BMP symbol + .replace(regexAstralSymbols, '_') + // then get the length + .length; +} + +function mapChars(domain_name, useSTD3, processing_option) { + var hasError = false; + var processed = ""; + + var len = countSymbols(domain_name); + for (var i = 0; i < len; ++i) { + var codePoint = domain_name.codePointAt(i); + var status = findStatus(codePoint); + + switch (status[1]) { + case "disallowed": + hasError = true; + processed += String.fromCodePoint(codePoint); + break; + case "ignored": + break; + case "mapped": + processed += String.fromCodePoint.apply(String, status[2]); + break; + case "deviation": + if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) { + processed += String.fromCodePoint.apply(String, status[2]); + } else { + processed += String.fromCodePoint(codePoint); + } + break; + case "valid": + processed += String.fromCodePoint(codePoint); + break; + case "disallowed_STD3_mapped": + if (useSTD3) { + hasError = true; + processed += String.fromCodePoint(codePoint); + } else { + processed += String.fromCodePoint.apply(String, status[2]); + } + break; + case "disallowed_STD3_valid": + if (useSTD3) { + hasError = true; + } + + processed += String.fromCodePoint(codePoint); + break; + } + } + + return { + string: processed, + error: hasError + }; +} + +var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/; + +function validateLabel(label, processing_option) { + if (label.substr(0, 4) === "xn--") { + label = punycode.toUnicode(label); + processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL; + } + + var error = false; + + if (normalize(label) !== label || + (label[3] === "-" && label[4] === "-") || + label[0] === "-" || label[label.length - 1] === "-" || + label.indexOf(".") !== -1 || + label.search(combiningMarksRegex) === 0) { + error = true; + } + + var len = countSymbols(label); + for (var i = 0; i < len; ++i) { + var status = findStatus(label.codePointAt(i)); + if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") || + (processing === PROCESSING_OPTIONS.NONTRANSITIONAL && + status[1] !== "valid" && status[1] !== "deviation")) { + error = true; + break; + } + } + + return { + label: label, + error: error + }; +} + +function processing(domain_name, useSTD3, processing_option) { + var result = mapChars(domain_name, useSTD3, processing_option); + result.string = normalize(result.string); + + var labels = result.string.split("."); + for (var i = 0; i < labels.length; ++i) { + try { + var validation = validateLabel(labels[i]); + labels[i] = validation.label; + result.error = result.error || validation.error; + } catch(e) { + result.error = true; + } + } + + return { + string: labels.join("."), + error: result.error + }; +} + +module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) { + var result = processing(domain_name, useSTD3, processing_option); + var labels = result.string.split("."); + labels = labels.map(function(l) { + try { + return punycode.toASCII(l); + } catch(e) { + result.error = true; + return l; + } + }); + + if (verifyDnsLength) { + var total = labels.slice(0, labels.length - 1).join(".").length; + if (total.length > 253 || total.length === 0) { + result.error = true; + } + + for (var i=0; i < labels.length; ++i) { + if (labels.length > 63 || labels.length === 0) { + result.error = true; + break; + } + } + } + + if (result.error) return null; + return labels.join("."); +}; + +module.exports.toUnicode = function(domain_name, useSTD3) { + var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL); + + return { + domain: result.string, + error: result.error + }; +}; + +module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; + + /***/ }), /***/ 4294: @@ -27140,13 +26956,13 @@ module.exports = __nccwpck_require__(4219); "use strict"; -var net = __nccwpck_require__(1631); -var tls = __nccwpck_require__(4016); -var http = __nccwpck_require__(8605); -var https = __nccwpck_require__(7211); -var events = __nccwpck_require__(8614); -var assert = __nccwpck_require__(2357); -var util = __nccwpck_require__(1669); +var net = __nccwpck_require__(1808); +var tls = __nccwpck_require__(4404); +var http = __nccwpck_require__(3685); +var https = __nccwpck_require__(5687); +var events = __nccwpck_require__(2361); +var assert = __nccwpck_require__(9491); +var util = __nccwpck_require__(3837); exports.httpOverHttp = httpOverHttp; @@ -27430,6 +27246,2613 @@ exports.getUserAgent = getUserAgent; //# sourceMappingURL=index.js.map +/***/ }), + +/***/ 5840: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(8628)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(6409)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(5122)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(9120)); + +var _nil = _interopRequireDefault(__nccwpck_require__(5332)); + +var _version = _interopRequireDefault(__nccwpck_require__(1595)); + +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); + +var _parse = _interopRequireDefault(__nccwpck_require__(2746)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 4569: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 5332: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 2746: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 814: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 807: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 5274: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 8950: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 8628: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(807)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 6409: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(5998)); + +var _md = _interopRequireDefault(__nccwpck_require__(4569)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 5998: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); + +var _parse = _interopRequireDefault(__nccwpck_require__(2746)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 5122: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(807)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 9120: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(5998)); + +var _sha = _interopRequireDefault(__nccwpck_require__(5274)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 6900: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(814)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 1595: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 4886: +/***/ ((module) => { + +"use strict"; + + +var conversions = {}; +module.exports = conversions; + +function sign(x) { + return x < 0 ? -1 : 1; +} + +function evenRound(x) { + // Round x to the nearest integer, choosing the even integer if it lies halfway between two. + if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor) + return Math.floor(x); + } else { + return Math.round(x); + } +} + +function createNumberConversion(bitLength, typeOpts) { + if (!typeOpts.unsigned) { + --bitLength; + } + const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength); + const upperBound = Math.pow(2, bitLength) - 1; + + const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength); + const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1); + + return function(V, opts) { + if (!opts) opts = {}; + + let x = +V; + + if (opts.enforceRange) { + if (!Number.isFinite(x)) { + throw new TypeError("Argument is not a finite number"); + } + + x = sign(x) * Math.floor(Math.abs(x)); + if (x < lowerBound || x > upperBound) { + throw new TypeError("Argument is not in byte range"); + } + + return x; + } + + if (!isNaN(x) && opts.clamp) { + x = evenRound(x); + + if (x < lowerBound) x = lowerBound; + if (x > upperBound) x = upperBound; + return x; + } + + if (!Number.isFinite(x) || x === 0) { + return 0; + } + + x = sign(x) * Math.floor(Math.abs(x)); + x = x % moduloVal; + + if (!typeOpts.unsigned && x >= moduloBound) { + return x - moduloVal; + } else if (typeOpts.unsigned) { + if (x < 0) { + x += moduloVal; + } else if (x === -0) { // don't return negative zero + return 0; + } + } + + return x; + } +} + +conversions["void"] = function () { + return undefined; +}; + +conversions["boolean"] = function (val) { + return !!val; +}; + +conversions["byte"] = createNumberConversion(8, { unsigned: false }); +conversions["octet"] = createNumberConversion(8, { unsigned: true }); + +conversions["short"] = createNumberConversion(16, { unsigned: false }); +conversions["unsigned short"] = createNumberConversion(16, { unsigned: true }); + +conversions["long"] = createNumberConversion(32, { unsigned: false }); +conversions["unsigned long"] = createNumberConversion(32, { unsigned: true }); + +conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 }); +conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 }); + +conversions["double"] = function (V) { + const x = +V; + + if (!Number.isFinite(x)) { + throw new TypeError("Argument is not a finite floating-point value"); + } + + return x; +}; + +conversions["unrestricted double"] = function (V) { + const x = +V; + + if (isNaN(x)) { + throw new TypeError("Argument is NaN"); + } + + return x; +}; + +// not quite valid, but good enough for JS +conversions["float"] = conversions["double"]; +conversions["unrestricted float"] = conversions["unrestricted double"]; + +conversions["DOMString"] = function (V, opts) { + if (!opts) opts = {}; + + if (opts.treatNullAsEmptyString && V === null) { + return ""; + } + + return String(V); +}; + +conversions["ByteString"] = function (V, opts) { + const x = String(V); + let c = undefined; + for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) { + if (c > 255) { + throw new TypeError("Argument is not a valid bytestring"); + } + } + + return x; +}; + +conversions["USVString"] = function (V) { + const S = String(V); + const n = S.length; + const U = []; + for (let i = 0; i < n; ++i) { + const c = S.charCodeAt(i); + if (c < 0xD800 || c > 0xDFFF) { + U.push(String.fromCodePoint(c)); + } else if (0xDC00 <= c && c <= 0xDFFF) { + U.push(String.fromCodePoint(0xFFFD)); + } else { + if (i === n - 1) { + U.push(String.fromCodePoint(0xFFFD)); + } else { + const d = S.charCodeAt(i + 1); + if (0xDC00 <= d && d <= 0xDFFF) { + const a = c & 0x3FF; + const b = d & 0x3FF; + U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b)); + ++i; + } else { + U.push(String.fromCodePoint(0xFFFD)); + } + } + } + } + + return U.join(''); +}; + +conversions["Date"] = function (V, opts) { + if (!(V instanceof Date)) { + throw new TypeError("Argument is not a Date object"); + } + if (isNaN(V)) { + return undefined; + } + + return V; +}; + +conversions["RegExp"] = function (V, opts) { + if (!(V instanceof RegExp)) { + V = new RegExp(V); + } + + return V; +}; + + +/***/ }), + +/***/ 7537: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +const usm = __nccwpck_require__(2158); + +exports.implementation = class URLImpl { + constructor(constructorArgs) { + const url = constructorArgs[0]; + const base = constructorArgs[1]; + + let parsedBase = null; + if (base !== undefined) { + parsedBase = usm.basicURLParse(base); + if (parsedBase === "failure") { + throw new TypeError("Invalid base URL"); + } + } + + const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase }); + if (parsedURL === "failure") { + throw new TypeError("Invalid URL"); + } + + this._url = parsedURL; + + // TODO: query stuff + } + + get href() { + return usm.serializeURL(this._url); + } + + set href(v) { + const parsedURL = usm.basicURLParse(v); + if (parsedURL === "failure") { + throw new TypeError("Invalid URL"); + } + + this._url = parsedURL; + } + + get origin() { + return usm.serializeURLOrigin(this._url); + } + + get protocol() { + return this._url.scheme + ":"; + } + + set protocol(v) { + usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" }); + } + + get username() { + return this._url.username; + } + + set username(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } + + usm.setTheUsername(this._url, v); + } + + get password() { + return this._url.password; + } + + set password(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } + + usm.setThePassword(this._url, v); + } + + get host() { + const url = this._url; + + if (url.host === null) { + return ""; + } + + if (url.port === null) { + return usm.serializeHost(url.host); + } + + return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port); + } + + set host(v) { + if (this._url.cannotBeABaseURL) { + return; + } + + usm.basicURLParse(v, { url: this._url, stateOverride: "host" }); + } + + get hostname() { + if (this._url.host === null) { + return ""; + } + + return usm.serializeHost(this._url.host); + } + + set hostname(v) { + if (this._url.cannotBeABaseURL) { + return; + } + + usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" }); + } + + get port() { + if (this._url.port === null) { + return ""; + } + + return usm.serializeInteger(this._url.port); + } + + set port(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } + + if (v === "") { + this._url.port = null; + } else { + usm.basicURLParse(v, { url: this._url, stateOverride: "port" }); + } + } + + get pathname() { + if (this._url.cannotBeABaseURL) { + return this._url.path[0]; + } + + if (this._url.path.length === 0) { + return ""; + } + + return "/" + this._url.path.join("/"); + } + + set pathname(v) { + if (this._url.cannotBeABaseURL) { + return; + } + + this._url.path = []; + usm.basicURLParse(v, { url: this._url, stateOverride: "path start" }); + } + + get search() { + if (this._url.query === null || this._url.query === "") { + return ""; + } + + return "?" + this._url.query; + } + + set search(v) { + // TODO: query stuff + + const url = this._url; + + if (v === "") { + url.query = null; + return; + } + + const input = v[0] === "?" ? v.substring(1) : v; + url.query = ""; + usm.basicURLParse(input, { url, stateOverride: "query" }); + } + + get hash() { + if (this._url.fragment === null || this._url.fragment === "") { + return ""; + } + + return "#" + this._url.fragment; + } + + set hash(v) { + if (v === "") { + this._url.fragment = null; + return; + } + + const input = v[0] === "#" ? v.substring(1) : v; + this._url.fragment = ""; + usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" }); + } + + toJSON() { + return this.href; + } +}; + + +/***/ }), + +/***/ 3394: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const conversions = __nccwpck_require__(4886); +const utils = __nccwpck_require__(3185); +const Impl = __nccwpck_require__(7537); + +const impl = utils.implSymbol; + +function URL(url) { + if (!this || this[impl] || !(this instanceof URL)) { + throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function."); + } + if (arguments.length < 1) { + throw new TypeError("Failed to construct 'URL': 1 argument required, but only " + arguments.length + " present."); + } + const args = []; + for (let i = 0; i < arguments.length && i < 2; ++i) { + args[i] = arguments[i]; + } + args[0] = conversions["USVString"](args[0]); + if (args[1] !== undefined) { + args[1] = conversions["USVString"](args[1]); + } + + module.exports.setup(this, args); +} + +URL.prototype.toJSON = function toJSON() { + if (!this || !module.exports.is(this)) { + throw new TypeError("Illegal invocation"); + } + const args = []; + for (let i = 0; i < arguments.length && i < 0; ++i) { + args[i] = arguments[i]; + } + return this[impl].toJSON.apply(this[impl], args); +}; +Object.defineProperty(URL.prototype, "href", { + get() { + return this[impl].href; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].href = V; + }, + enumerable: true, + configurable: true +}); + +URL.prototype.toString = function () { + if (!this || !module.exports.is(this)) { + throw new TypeError("Illegal invocation"); + } + return this.href; +}; + +Object.defineProperty(URL.prototype, "origin", { + get() { + return this[impl].origin; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "protocol", { + get() { + return this[impl].protocol; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].protocol = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "username", { + get() { + return this[impl].username; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].username = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "password", { + get() { + return this[impl].password; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].password = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "host", { + get() { + return this[impl].host; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].host = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "hostname", { + get() { + return this[impl].hostname; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].hostname = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "port", { + get() { + return this[impl].port; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].port = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "pathname", { + get() { + return this[impl].pathname; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].pathname = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "search", { + get() { + return this[impl].search; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].search = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "hash", { + get() { + return this[impl].hash; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].hash = V; + }, + enumerable: true, + configurable: true +}); + + +module.exports = { + is(obj) { + return !!obj && obj[impl] instanceof Impl.implementation; + }, + create(constructorArgs, privateData) { + let obj = Object.create(URL.prototype); + this.setup(obj, constructorArgs, privateData); + return obj; + }, + setup(obj, constructorArgs, privateData) { + if (!privateData) privateData = {}; + privateData.wrapper = obj; + + obj[impl] = new Impl.implementation(constructorArgs, privateData); + obj[impl][utils.wrapperSymbol] = obj; + }, + interface: URL, + expose: { + Window: { URL: URL }, + Worker: { URL: URL } + } +}; + + + +/***/ }), + +/***/ 8665: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +exports.URL = __nccwpck_require__(3394)["interface"]; +exports.serializeURL = __nccwpck_require__(2158).serializeURL; +exports.serializeURLOrigin = __nccwpck_require__(2158).serializeURLOrigin; +exports.basicURLParse = __nccwpck_require__(2158).basicURLParse; +exports.setTheUsername = __nccwpck_require__(2158).setTheUsername; +exports.setThePassword = __nccwpck_require__(2158).setThePassword; +exports.serializeHost = __nccwpck_require__(2158).serializeHost; +exports.serializeInteger = __nccwpck_require__(2158).serializeInteger; +exports.parseURL = __nccwpck_require__(2158).parseURL; + + +/***/ }), + +/***/ 2158: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const punycode = __nccwpck_require__(5477); +const tr46 = __nccwpck_require__(4256); + +const specialSchemes = { + ftp: 21, + file: null, + gopher: 70, + http: 80, + https: 443, + ws: 80, + wss: 443 +}; + +const failure = Symbol("failure"); + +function countSymbols(str) { + return punycode.ucs2.decode(str).length; +} + +function at(input, idx) { + const c = input[idx]; + return isNaN(c) ? undefined : String.fromCodePoint(c); +} + +function isASCIIDigit(c) { + return c >= 0x30 && c <= 0x39; +} + +function isASCIIAlpha(c) { + return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A); +} + +function isASCIIAlphanumeric(c) { + return isASCIIAlpha(c) || isASCIIDigit(c); +} + +function isASCIIHex(c) { + return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66); +} + +function isSingleDot(buffer) { + return buffer === "." || buffer.toLowerCase() === "%2e"; +} + +function isDoubleDot(buffer) { + buffer = buffer.toLowerCase(); + return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e"; +} + +function isWindowsDriveLetterCodePoints(cp1, cp2) { + return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124); +} + +function isWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|"); +} + +function isNormalizedWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":"; +} + +function containsForbiddenHostCodePoint(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1; +} + +function containsForbiddenHostCodePointExcludingPercent(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1; +} + +function isSpecialScheme(scheme) { + return specialSchemes[scheme] !== undefined; +} + +function isSpecial(url) { + return isSpecialScheme(url.scheme); +} + +function defaultPort(scheme) { + return specialSchemes[scheme]; +} + +function percentEncode(c) { + let hex = c.toString(16).toUpperCase(); + if (hex.length === 1) { + hex = "0" + hex; + } + + return "%" + hex; +} + +function utf8PercentEncode(c) { + const buf = new Buffer(c); + + let str = ""; + + for (let i = 0; i < buf.length; ++i) { + str += percentEncode(buf[i]); + } + + return str; +} + +function utf8PercentDecode(str) { + const input = new Buffer(str); + const output = []; + for (let i = 0; i < input.length; ++i) { + if (input[i] !== 37) { + output.push(input[i]); + } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) { + output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16)); + i += 2; + } else { + output.push(input[i]); + } + } + return new Buffer(output).toString(); +} + +function isC0ControlPercentEncode(c) { + return c <= 0x1F || c > 0x7E; +} + +const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]); +function isPathPercentEncode(c) { + return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c); +} + +const extraUserinfoPercentEncodeSet = + new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]); +function isUserinfoPercentEncode(c) { + return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c); +} + +function percentEncodeChar(c, encodeSetPredicate) { + const cStr = String.fromCodePoint(c); + + if (encodeSetPredicate(c)) { + return utf8PercentEncode(cStr); + } + + return cStr; +} + +function parseIPv4Number(input) { + let R = 10; + + if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") { + input = input.substring(2); + R = 16; + } else if (input.length >= 2 && input.charAt(0) === "0") { + input = input.substring(1); + R = 8; + } + + if (input === "") { + return 0; + } + + const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/); + if (regex.test(input)) { + return failure; + } + + return parseInt(input, R); +} + +function parseIPv4(input) { + const parts = input.split("."); + if (parts[parts.length - 1] === "") { + if (parts.length > 1) { + parts.pop(); + } + } + + if (parts.length > 4) { + return input; + } + + const numbers = []; + for (const part of parts) { + if (part === "") { + return input; + } + const n = parseIPv4Number(part); + if (n === failure) { + return input; + } + + numbers.push(n); + } + + for (let i = 0; i < numbers.length - 1; ++i) { + if (numbers[i] > 255) { + return failure; + } + } + if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) { + return failure; + } + + let ipv4 = numbers.pop(); + let counter = 0; + + for (const n of numbers) { + ipv4 += n * Math.pow(256, 3 - counter); + ++counter; + } + + return ipv4; +} + +function serializeIPv4(address) { + let output = ""; + let n = address; + + for (let i = 1; i <= 4; ++i) { + output = String(n % 256) + output; + if (i !== 4) { + output = "." + output; + } + n = Math.floor(n / 256); + } + + return output; +} + +function parseIPv6(input) { + const address = [0, 0, 0, 0, 0, 0, 0, 0]; + let pieceIndex = 0; + let compress = null; + let pointer = 0; + + input = punycode.ucs2.decode(input); + + if (input[pointer] === 58) { + if (input[pointer + 1] !== 58) { + return failure; + } + + pointer += 2; + ++pieceIndex; + compress = pieceIndex; + } + + while (pointer < input.length) { + if (pieceIndex === 8) { + return failure; + } + + if (input[pointer] === 58) { + if (compress !== null) { + return failure; + } + ++pointer; + ++pieceIndex; + compress = pieceIndex; + continue; + } + + let value = 0; + let length = 0; + + while (length < 4 && isASCIIHex(input[pointer])) { + value = value * 0x10 + parseInt(at(input, pointer), 16); + ++pointer; + ++length; + } + + if (input[pointer] === 46) { + if (length === 0) { + return failure; + } + + pointer -= length; + + if (pieceIndex > 6) { + return failure; + } + + let numbersSeen = 0; + + while (input[pointer] !== undefined) { + let ipv4Piece = null; + + if (numbersSeen > 0) { + if (input[pointer] === 46 && numbersSeen < 4) { + ++pointer; + } else { + return failure; + } + } + + if (!isASCIIDigit(input[pointer])) { + return failure; + } + + while (isASCIIDigit(input[pointer])) { + const number = parseInt(at(input, pointer)); + if (ipv4Piece === null) { + ipv4Piece = number; + } else if (ipv4Piece === 0) { + return failure; + } else { + ipv4Piece = ipv4Piece * 10 + number; + } + if (ipv4Piece > 255) { + return failure; + } + ++pointer; + } + + address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece; + + ++numbersSeen; + + if (numbersSeen === 2 || numbersSeen === 4) { + ++pieceIndex; + } + } + + if (numbersSeen !== 4) { + return failure; + } + + break; + } else if (input[pointer] === 58) { + ++pointer; + if (input[pointer] === undefined) { + return failure; + } + } else if (input[pointer] !== undefined) { + return failure; + } + + address[pieceIndex] = value; + ++pieceIndex; + } + + if (compress !== null) { + let swaps = pieceIndex - compress; + pieceIndex = 7; + while (pieceIndex !== 0 && swaps > 0) { + const temp = address[compress + swaps - 1]; + address[compress + swaps - 1] = address[pieceIndex]; + address[pieceIndex] = temp; + --pieceIndex; + --swaps; + } + } else if (compress === null && pieceIndex !== 8) { + return failure; + } + + return address; +} + +function serializeIPv6(address) { + let output = ""; + const seqResult = findLongestZeroSequence(address); + const compress = seqResult.idx; + let ignore0 = false; + + for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) { + if (ignore0 && address[pieceIndex] === 0) { + continue; + } else if (ignore0) { + ignore0 = false; + } + + if (compress === pieceIndex) { + const separator = pieceIndex === 0 ? "::" : ":"; + output += separator; + ignore0 = true; + continue; + } + + output += address[pieceIndex].toString(16); + + if (pieceIndex !== 7) { + output += ":"; + } + } + + return output; +} + +function parseHost(input, isSpecialArg) { + if (input[0] === "[") { + if (input[input.length - 1] !== "]") { + return failure; + } + + return parseIPv6(input.substring(1, input.length - 1)); + } + + if (!isSpecialArg) { + return parseOpaqueHost(input); + } + + const domain = utf8PercentDecode(input); + const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false); + if (asciiDomain === null) { + return failure; + } + + if (containsForbiddenHostCodePoint(asciiDomain)) { + return failure; + } + + const ipv4Host = parseIPv4(asciiDomain); + if (typeof ipv4Host === "number" || ipv4Host === failure) { + return ipv4Host; + } + + return asciiDomain; +} + +function parseOpaqueHost(input) { + if (containsForbiddenHostCodePointExcludingPercent(input)) { + return failure; + } + + let output = ""; + const decoded = punycode.ucs2.decode(input); + for (let i = 0; i < decoded.length; ++i) { + output += percentEncodeChar(decoded[i], isC0ControlPercentEncode); + } + return output; +} + +function findLongestZeroSequence(arr) { + let maxIdx = null; + let maxLen = 1; // only find elements > 1 + let currStart = null; + let currLen = 0; + + for (let i = 0; i < arr.length; ++i) { + if (arr[i] !== 0) { + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } + + currStart = null; + currLen = 0; + } else { + if (currStart === null) { + currStart = i; + } + ++currLen; + } + } + + // if trailing zeros + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } + + return { + idx: maxIdx, + len: maxLen + }; +} + +function serializeHost(host) { + if (typeof host === "number") { + return serializeIPv4(host); + } + + // IPv6 serializer + if (host instanceof Array) { + return "[" + serializeIPv6(host) + "]"; + } + + return host; +} + +function trimControlChars(url) { + return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, ""); +} + +function trimTabAndNewline(url) { + return url.replace(/\u0009|\u000A|\u000D/g, ""); +} + +function shortenPath(url) { + const path = url.path; + if (path.length === 0) { + return; + } + if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) { + return; + } + + path.pop(); +} + +function includesCredentials(url) { + return url.username !== "" || url.password !== ""; +} + +function cannotHaveAUsernamePasswordPort(url) { + return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file"; +} + +function isNormalizedWindowsDriveLetter(string) { + return /^[A-Za-z]:$/.test(string); +} + +function URLStateMachine(input, base, encodingOverride, url, stateOverride) { + this.pointer = 0; + this.input = input; + this.base = base || null; + this.encodingOverride = encodingOverride || "utf-8"; + this.stateOverride = stateOverride; + this.url = url; + this.failure = false; + this.parseError = false; + + if (!this.url) { + this.url = { + scheme: "", + username: "", + password: "", + host: null, + port: null, + path: [], + query: null, + fragment: null, + + cannotBeABaseURL: false + }; + + const res = trimControlChars(this.input); + if (res !== this.input) { + this.parseError = true; + } + this.input = res; + } + + const res = trimTabAndNewline(this.input); + if (res !== this.input) { + this.parseError = true; + } + this.input = res; + + this.state = stateOverride || "scheme start"; + + this.buffer = ""; + this.atFlag = false; + this.arrFlag = false; + this.passwordTokenSeenFlag = false; + + this.input = punycode.ucs2.decode(this.input); + + for (; this.pointer <= this.input.length; ++this.pointer) { + const c = this.input[this.pointer]; + const cStr = isNaN(c) ? undefined : String.fromCodePoint(c); + + // exec state machine + const ret = this["parse " + this.state](c, cStr); + if (!ret) { + break; // terminate algorithm + } else if (ret === failure) { + this.failure = true; + break; + } + } +} + +URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) { + if (isASCIIAlpha(c)) { + this.buffer += cStr.toLowerCase(); + this.state = "scheme"; + } else if (!this.stateOverride) { + this.state = "no scheme"; + --this.pointer; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) { + if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) { + this.buffer += cStr.toLowerCase(); + } else if (c === 58) { + if (this.stateOverride) { + if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) { + return false; + } + + if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) { + return false; + } + + if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") { + return false; + } + + if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) { + return false; + } + } + this.url.scheme = this.buffer; + this.buffer = ""; + if (this.stateOverride) { + return false; + } + if (this.url.scheme === "file") { + if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) { + this.parseError = true; + } + this.state = "file"; + } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) { + this.state = "special relative or authority"; + } else if (isSpecial(this.url)) { + this.state = "special authority slashes"; + } else if (this.input[this.pointer + 1] === 47) { + this.state = "path or authority"; + ++this.pointer; + } else { + this.url.cannotBeABaseURL = true; + this.url.path.push(""); + this.state = "cannot-be-a-base-URL path"; + } + } else if (!this.stateOverride) { + this.buffer = ""; + this.state = "no scheme"; + this.pointer = -1; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) { + if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) { + return failure; + } else if (this.base.cannotBeABaseURL && c === 35) { + this.url.scheme = this.base.scheme; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.url.cannotBeABaseURL = true; + this.state = "fragment"; + } else if (this.base.scheme === "file") { + this.state = "file"; + --this.pointer; + } else { + this.state = "relative"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "relative"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) { + if (c === 47) { + this.state = "authority"; + } else { + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse relative"] = function parseRelative(c) { + this.url.scheme = this.base.scheme; + if (isNaN(c)) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 47) { + this.state = "relative slash"; + } else if (c === 63) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else if (isSpecial(this.url) && c === 92) { + this.parseError = true; + this.state = "relative slash"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(0, this.base.path.length - 1); + + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) { + if (isSpecial(this.url) && (c === 47 || c === 92)) { + if (c === 92) { + this.parseError = true; + } + this.state = "special authority ignore slashes"; + } else if (c === 47) { + this.state = "authority"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "special authority ignore slashes"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) { + if (c !== 47 && c !== 92) { + this.state = "authority"; + --this.pointer; + } else { + this.parseError = true; + } + + return true; +}; + +URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) { + if (c === 64) { + this.parseError = true; + if (this.atFlag) { + this.buffer = "%40" + this.buffer; + } + this.atFlag = true; + + // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars + const len = countSymbols(this.buffer); + for (let pointer = 0; pointer < len; ++pointer) { + const codePoint = this.buffer.codePointAt(pointer); + + if (codePoint === 58 && !this.passwordTokenSeenFlag) { + this.passwordTokenSeenFlag = true; + continue; + } + const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode); + if (this.passwordTokenSeenFlag) { + this.url.password += encodedCodePoints; + } else { + this.url.username += encodedCodePoints; + } + } + this.buffer = ""; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + if (this.atFlag && this.buffer === "") { + this.parseError = true; + return failure; + } + this.pointer -= countSymbols(this.buffer) + 1; + this.buffer = ""; + this.state = "host"; + } else { + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse hostname"] = +URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) { + if (this.stateOverride && this.url.scheme === "file") { + --this.pointer; + this.state = "file host"; + } else if (c === 58 && !this.arrFlag) { + if (this.buffer === "") { + this.parseError = true; + return failure; + } + + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + + this.url.host = host; + this.buffer = ""; + this.state = "port"; + if (this.stateOverride === "hostname") { + return false; + } + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + --this.pointer; + if (isSpecial(this.url) && this.buffer === "") { + this.parseError = true; + return failure; + } else if (this.stateOverride && this.buffer === "" && + (includesCredentials(this.url) || this.url.port !== null)) { + this.parseError = true; + return false; + } + + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + + this.url.host = host; + this.buffer = ""; + this.state = "path start"; + if (this.stateOverride) { + return false; + } + } else { + if (c === 91) { + this.arrFlag = true; + } else if (c === 93) { + this.arrFlag = false; + } + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) { + if (isASCIIDigit(c)) { + this.buffer += cStr; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92) || + this.stateOverride) { + if (this.buffer !== "") { + const port = parseInt(this.buffer); + if (port > Math.pow(2, 16) - 1) { + this.parseError = true; + return failure; + } + this.url.port = port === defaultPort(this.url.scheme) ? null : port; + this.buffer = ""; + } + if (this.stateOverride) { + return false; + } + this.state = "path start"; + --this.pointer; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]); + +URLStateMachine.prototype["parse file"] = function parseFile(c) { + this.url.scheme = "file"; + + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; + } + this.state = "file slash"; + } else if (this.base !== null && this.base.scheme === "file") { + if (isNaN(c)) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 63) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else { + if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points + !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) || + (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points + !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + shortenPath(this.url); + } else { + this.parseError = true; + } + + this.state = "path"; + --this.pointer; + } + } else { + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) { + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; + } + this.state = "file host"; + } else { + if (this.base !== null && this.base.scheme === "file") { + if (isNormalizedWindowsDriveLetterString(this.base.path[0])) { + this.url.path.push(this.base.path[0]); + } else { + this.url.host = this.base.host; + } + } + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) { + if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) { + --this.pointer; + if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) { + this.parseError = true; + this.state = "path"; + } else if (this.buffer === "") { + this.url.host = ""; + if (this.stateOverride) { + return false; + } + this.state = "path start"; + } else { + let host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + if (host === "localhost") { + host = ""; + } + this.url.host = host; + + if (this.stateOverride) { + return false; + } + + this.buffer = ""; + this.state = "path start"; + } + } else { + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse path start"] = function parsePathStart(c) { + if (isSpecial(this.url)) { + if (c === 92) { + this.parseError = true; + } + this.state = "path"; + + if (c !== 47 && c !== 92) { + --this.pointer; + } + } else if (!this.stateOverride && c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (!this.stateOverride && c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else if (c !== undefined) { + this.state = "path"; + if (c !== 47) { + --this.pointer; + } + } + + return true; +}; + +URLStateMachine.prototype["parse path"] = function parsePath(c) { + if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) || + (!this.stateOverride && (c === 63 || c === 35))) { + if (isSpecial(this.url) && c === 92) { + this.parseError = true; + } + + if (isDoubleDot(this.buffer)) { + shortenPath(this.url); + if (c !== 47 && !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } + } else if (isSingleDot(this.buffer) && c !== 47 && + !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } else if (!isSingleDot(this.buffer)) { + if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) { + if (this.url.host !== "" && this.url.host !== null) { + this.parseError = true; + this.url.host = ""; + } + this.buffer = this.buffer[0] + ":"; + } + this.url.path.push(this.buffer); + } + this.buffer = ""; + if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) { + while (this.url.path.length > 1 && this.url.path[0] === "") { + this.parseError = true; + this.url.path.shift(); + } + } + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } + } else { + // TODO: If c is not a URL code point and not "%", parse error. + + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.buffer += percentEncodeChar(c, isPathPercentEncode); + } + + return true; +}; + +URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) { + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else { + // TODO: Add: not a URL code point + if (!isNaN(c) && c !== 37) { + this.parseError = true; + } + + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + if (!isNaN(c)) { + this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode); + } + } + + return true; +}; + +URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) { + if (isNaN(c) || (!this.stateOverride && c === 35)) { + if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") { + this.encodingOverride = "utf-8"; + } + + const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead + for (let i = 0; i < buffer.length; ++i) { + if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 || + buffer[i] === 0x3C || buffer[i] === 0x3E) { + this.url.query += percentEncode(buffer[i]); + } else { + this.url.query += String.fromCodePoint(buffer[i]); + } + } + + this.buffer = ""; + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse fragment"] = function parseFragment(c) { + if (isNaN(c)) { // do nothing + } else if (c === 0x0) { + this.parseError = true; + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode); + } + + return true; +}; + +function serializeURL(url, excludeFragment) { + let output = url.scheme + ":"; + if (url.host !== null) { + output += "//"; + + if (url.username !== "" || url.password !== "") { + output += url.username; + if (url.password !== "") { + output += ":" + url.password; + } + output += "@"; + } + + output += serializeHost(url.host); + + if (url.port !== null) { + output += ":" + url.port; + } + } else if (url.host === null && url.scheme === "file") { + output += "//"; + } + + if (url.cannotBeABaseURL) { + output += url.path[0]; + } else { + for (const string of url.path) { + output += "/" + string; + } + } + + if (url.query !== null) { + output += "?" + url.query; + } + + if (!excludeFragment && url.fragment !== null) { + output += "#" + url.fragment; + } + + return output; +} + +function serializeOrigin(tuple) { + let result = tuple.scheme + "://"; + result += serializeHost(tuple.host); + + if (tuple.port !== null) { + result += ":" + tuple.port; + } + + return result; +} + +module.exports.serializeURL = serializeURL; + +module.exports.serializeURLOrigin = function (url) { + // https://url.spec.whatwg.org/#concept-url-origin + switch (url.scheme) { + case "blob": + try { + return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0])); + } catch (e) { + // serializing an opaque origin returns "null" + return "null"; + } + case "ftp": + case "gopher": + case "http": + case "https": + case "ws": + case "wss": + return serializeOrigin({ + scheme: url.scheme, + host: url.host, + port: url.port + }); + case "file": + // spec says "exercise to the reader", chrome says "file://" + return "file://"; + default: + // serializing an opaque origin returns "null" + return "null"; + } +}; + +module.exports.basicURLParse = function (input, options) { + if (options === undefined) { + options = {}; + } + + const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride); + if (usm.failure) { + return "failure"; + } + + return usm.url; +}; + +module.exports.setTheUsername = function (url, username) { + url.username = ""; + const decoded = punycode.ucs2.decode(username); + for (let i = 0; i < decoded.length; ++i) { + url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode); + } +}; + +module.exports.setThePassword = function (url, password) { + url.password = ""; + const decoded = punycode.ucs2.decode(password); + for (let i = 0; i < decoded.length; ++i) { + url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode); + } +}; + +module.exports.serializeHost = serializeHost; + +module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort; + +module.exports.serializeInteger = function (integer) { + return String(integer); +}; + +module.exports.parseURL = function (input, options) { + if (options === undefined) { + options = {}; + } + + // We don't handle blobs, so this just delegates: + return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride }); +}; + + +/***/ }), + +/***/ 3185: +/***/ ((module) => { + +"use strict"; + + +module.exports.mixin = function mixin(target, source) { + const keys = Object.getOwnPropertyNames(source); + for (let i = 0; i < keys.length; ++i) { + Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i])); + } +}; + +module.exports.wrapperSymbol = Symbol("wrapper"); +module.exports.implSymbol = Symbol("impl"); + +module.exports.wrapperForImpl = function (impl) { + return impl[module.exports.wrapperSymbol]; +}; + +module.exports.implForWrapper = function (wrapper) { + return wrapper[module.exports.implSymbol]; +}; + + + /***/ }), /***/ 2940: @@ -27502,7 +29925,7 @@ function wrappy (fn, cb) { builder = __nccwpck_require__(2958); - defaults = __nccwpck_require__(7251).defaults; + defaults = (__nccwpck_require__(7251).defaults); requiresCDATA = function(entry) { return typeof entry === "string" && (entry.indexOf('&') >= 0 || entry.indexOf('>') >= 0 || entry.indexOf('<') >= 0); @@ -27717,15 +30140,15 @@ function wrappy (fn, cb) { sax = __nccwpck_require__(2043); - events = __nccwpck_require__(8614); + events = __nccwpck_require__(2361); bom = __nccwpck_require__(2624); processors = __nccwpck_require__(9236); - setImmediate = __nccwpck_require__(8213).setImmediate; + setImmediate = (__nccwpck_require__(9512).setImmediate); - defaults = __nccwpck_require__(7251).defaults; + defaults = (__nccwpck_require__(7251).defaults); isEmpty = function(thing) { return typeof thing === "object" && (thing != null) && Object.keys(thing).length === 0; @@ -28906,7 +31329,7 @@ function wrappy (fn, cb) { extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, hasProp = {}.hasOwnProperty; - isObject = __nccwpck_require__(8229).isObject; + isObject = (__nccwpck_require__(8229).isObject); XMLNode = __nccwpck_require__(7608); @@ -29069,7 +31492,7 @@ function wrappy (fn, cb) { extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, hasProp = {}.hasOwnProperty; - isObject = __nccwpck_require__(8229).isObject; + isObject = (__nccwpck_require__(8229).isObject); XMLNode = __nccwpck_require__(7608); @@ -29119,7 +31542,7 @@ function wrappy (fn, cb) { extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, hasProp = {}.hasOwnProperty; - isObject = __nccwpck_require__(8229).isObject; + isObject = (__nccwpck_require__(8229).isObject); XMLNode = __nccwpck_require__(7608); @@ -29312,7 +31735,7 @@ function wrappy (fn, cb) { extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, hasProp = {}.hasOwnProperty; - isPlainObject = __nccwpck_require__(8229).isPlainObject; + isPlainObject = (__nccwpck_require__(8229).isPlainObject); XMLDOMImplementation = __nccwpck_require__(8310); @@ -31976,7 +34399,7 @@ function wrappy (fn, cb) { var NodeType, WriterState, XMLCData, XMLComment, XMLDTDAttList, XMLDTDElement, XMLDTDEntity, XMLDTDNotation, XMLDeclaration, XMLDocType, XMLDummy, XMLElement, XMLProcessingInstruction, XMLRaw, XMLText, XMLWriterBase, assign, hasProp = {}.hasOwnProperty; - assign = __nccwpck_require__(8229).assign; + assign = (__nccwpck_require__(8229).assign); NodeType = __nccwpck_require__(9267); @@ -32491,155 +34914,4834 @@ module.exports = eval("require")("original-fs"); /***/ }), -/***/ 2357: +/***/ 9491: /***/ ((module) => { "use strict"; -module.exports = require("assert");; +module.exports = require("assert"); /***/ }), -/***/ 4293: +/***/ 4300: /***/ ((module) => { "use strict"; -module.exports = require("buffer");; +module.exports = require("buffer"); /***/ }), -/***/ 3129: +/***/ 2081: /***/ ((module) => { "use strict"; -module.exports = require("child_process");; +module.exports = require("child_process"); /***/ }), -/***/ 881: +/***/ 6113: /***/ ((module) => { "use strict"; -module.exports = require("dns");; +module.exports = require("crypto"); /***/ }), -/***/ 8614: +/***/ 9523: /***/ ((module) => { "use strict"; -module.exports = require("events");; +module.exports = require("dns"); /***/ }), -/***/ 5747: +/***/ 2361: /***/ ((module) => { "use strict"; -module.exports = require("fs");; +module.exports = require("events"); /***/ }), -/***/ 8605: +/***/ 7147: /***/ ((module) => { "use strict"; -module.exports = require("http");; +module.exports = require("fs"); /***/ }), -/***/ 7565: +/***/ 3685: /***/ ((module) => { "use strict"; -module.exports = require("http2");; +module.exports = require("http"); /***/ }), -/***/ 7211: +/***/ 5158: /***/ ((module) => { "use strict"; -module.exports = require("https");; +module.exports = require("http2"); /***/ }), -/***/ 1631: +/***/ 5687: /***/ ((module) => { "use strict"; -module.exports = require("net");; +module.exports = require("https"); /***/ }), -/***/ 2087: +/***/ 1808: /***/ ((module) => { "use strict"; -module.exports = require("os");; +module.exports = require("net"); /***/ }), -/***/ 5622: +/***/ 2037: /***/ ((module) => { "use strict"; -module.exports = require("path");; +module.exports = require("os"); /***/ }), -/***/ 2413: +/***/ 1017: /***/ ((module) => { "use strict"; -module.exports = require("stream");; +module.exports = require("path"); /***/ }), -/***/ 4304: +/***/ 5477: /***/ ((module) => { "use strict"; -module.exports = require("string_decoder");; +module.exports = require("punycode"); /***/ }), -/***/ 8213: +/***/ 2781: /***/ ((module) => { "use strict"; -module.exports = require("timers");; +module.exports = require("stream"); /***/ }), -/***/ 4016: +/***/ 1576: /***/ ((module) => { "use strict"; -module.exports = require("tls");; +module.exports = require("string_decoder"); /***/ }), -/***/ 8835: +/***/ 9512: /***/ ((module) => { "use strict"; -module.exports = require("url");; +module.exports = require("timers"); /***/ }), -/***/ 1669: +/***/ 4404: /***/ ((module) => { "use strict"; -module.exports = require("util");; +module.exports = require("tls"); /***/ }), -/***/ 8761: +/***/ 7310: /***/ ((module) => { "use strict"; -module.exports = require("zlib");; +module.exports = require("url"); + +/***/ }), + +/***/ 3837: +/***/ ((module) => { + +"use strict"; +module.exports = require("util"); + +/***/ }), + +/***/ 9796: +/***/ ((module) => { + +"use strict"; +module.exports = require("zlib"); + +/***/ }), + +/***/ 9859: +/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __nccwpck_require__) => { + +"use strict"; +// ESM COMPAT FLAG +__nccwpck_require__.r(__webpack_exports__); + +// EXPORTS +__nccwpck_require__.d(__webpack_exports__, { + "AbortError": () => (/* reexport */ AbortError), + "CacheError": () => (/* reexport */ CacheError), + "CancelError": () => (/* reexport */ types_CancelError), + "HTTPError": () => (/* reexport */ HTTPError), + "MaxRedirectsError": () => (/* reexport */ MaxRedirectsError), + "Options": () => (/* reexport */ Options), + "ParseError": () => (/* reexport */ ParseError), + "ReadError": () => (/* reexport */ ReadError), + "RequestError": () => (/* reexport */ RequestError), + "RetryError": () => (/* reexport */ RetryError), + "TimeoutError": () => (/* reexport */ TimeoutError), + "UploadError": () => (/* reexport */ UploadError), + "calculateRetryDelay": () => (/* reexport */ calculate_retry_delay), + "create": () => (/* reexport */ source_create), + "default": () => (/* binding */ got_dist_source), + "got": () => (/* binding */ got), + "isResponseOk": () => (/* reexport */ isResponseOk), + "parseBody": () => (/* reexport */ parseBody), + "parseLinkHeader": () => (/* reexport */ parseLinkHeader) +}); + +;// CONCATENATED MODULE: ./node_modules/@sindresorhus/is/dist/index.js +const typedArrayTypeNames = [ + 'Int8Array', + 'Uint8Array', + 'Uint8ClampedArray', + 'Int16Array', + 'Uint16Array', + 'Int32Array', + 'Uint32Array', + 'Float32Array', + 'Float64Array', + 'BigInt64Array', + 'BigUint64Array', +]; +function isTypedArrayName(name) { + return typedArrayTypeNames.includes(name); +} +const objectTypeNames = [ + 'Function', + 'Generator', + 'AsyncGenerator', + 'GeneratorFunction', + 'AsyncGeneratorFunction', + 'AsyncFunction', + 'Observable', + 'Array', + 'Buffer', + 'Blob', + 'Object', + 'RegExp', + 'Date', + 'Error', + 'Map', + 'Set', + 'WeakMap', + 'WeakSet', + 'WeakRef', + 'ArrayBuffer', + 'SharedArrayBuffer', + 'DataView', + 'Promise', + 'URL', + 'FormData', + 'URLSearchParams', + 'HTMLElement', + 'NaN', + ...typedArrayTypeNames, +]; +function isObjectTypeName(name) { + return objectTypeNames.includes(name); +} +const primitiveTypeNames = [ + 'null', + 'undefined', + 'string', + 'number', + 'bigint', + 'boolean', + 'symbol', +]; +function isPrimitiveTypeName(name) { + return primitiveTypeNames.includes(name); +} +// eslint-disable-next-line @typescript-eslint/ban-types +function isOfType(type) { + return (value) => typeof value === type; +} +const { toString: dist_toString } = Object.prototype; +const getObjectType = (value) => { + const objectTypeName = dist_toString.call(value).slice(8, -1); + if (/HTML\w+Element/.test(objectTypeName) && is.domElement(value)) { + return 'HTMLElement'; + } + if (isObjectTypeName(objectTypeName)) { + return objectTypeName; + } + return undefined; +}; +const isObjectOfType = (type) => (value) => getObjectType(value) === type; +function is(value) { + if (value === null) { + return 'null'; + } + switch (typeof value) { + case 'undefined': + return 'undefined'; + case 'string': + return 'string'; + case 'number': + return Number.isNaN(value) ? 'NaN' : 'number'; + case 'boolean': + return 'boolean'; + case 'function': + return 'Function'; + case 'bigint': + return 'bigint'; + case 'symbol': + return 'symbol'; + default: + } + if (is.observable(value)) { + return 'Observable'; + } + if (is.array(value)) { + return 'Array'; + } + if (is.buffer(value)) { + return 'Buffer'; + } + const tagType = getObjectType(value); + if (tagType) { + return tagType; + } + if (value instanceof String || value instanceof Boolean || value instanceof Number) { + throw new TypeError('Please don\'t use object wrappers for primitive types'); + } + return 'Object'; +} +is.undefined = isOfType('undefined'); +is.string = isOfType('string'); +const isNumberType = isOfType('number'); +is.number = (value) => isNumberType(value) && !is.nan(value); +is.bigint = isOfType('bigint'); +// eslint-disable-next-line @typescript-eslint/ban-types +is.function_ = isOfType('function'); +// eslint-disable-next-line @typescript-eslint/ban-types +is.null_ = (value) => value === null; +is.class_ = (value) => is.function_(value) && value.toString().startsWith('class '); +is.boolean = (value) => value === true || value === false; +is.symbol = isOfType('symbol'); +is.numericString = (value) => is.string(value) && !is.emptyStringOrWhitespace(value) && !Number.isNaN(Number(value)); +is.array = (value, assertion) => { + if (!Array.isArray(value)) { + return false; + } + if (!is.function_(assertion)) { + return true; + } + return value.every(element => assertion(element)); +}; +// eslint-disable-next-line @typescript-eslint/no-unsafe-return, @typescript-eslint/no-unsafe-call +is.buffer = (value) => value?.constructor?.isBuffer?.(value) ?? false; +is.blob = (value) => isObjectOfType('Blob')(value); +is.nullOrUndefined = (value) => is.null_(value) || is.undefined(value); // eslint-disable-line @typescript-eslint/ban-types +is.object = (value) => !is.null_(value) && (typeof value === 'object' || is.function_(value)); // eslint-disable-line @typescript-eslint/ban-types +is.iterable = (value) => is.function_(value?.[Symbol.iterator]); +is.asyncIterable = (value) => is.function_(value?.[Symbol.asyncIterator]); +is.generator = (value) => is.iterable(value) && is.function_(value?.next) && is.function_(value?.throw); +is.asyncGenerator = (value) => is.asyncIterable(value) && is.function_(value.next) && is.function_(value.throw); +is.nativePromise = (value) => isObjectOfType('Promise')(value); +const hasPromiseApi = (value) => is.function_(value?.then) + && is.function_(value?.catch); +is.promise = (value) => is.nativePromise(value) || hasPromiseApi(value); +is.generatorFunction = isObjectOfType('GeneratorFunction'); +is.asyncGeneratorFunction = (value) => getObjectType(value) === 'AsyncGeneratorFunction'; +is.asyncFunction = (value) => getObjectType(value) === 'AsyncFunction'; +// eslint-disable-next-line no-prototype-builtins, @typescript-eslint/ban-types +is.boundFunction = (value) => is.function_(value) && !value.hasOwnProperty('prototype'); +is.regExp = isObjectOfType('RegExp'); +is.date = isObjectOfType('Date'); +is.error = isObjectOfType('Error'); +is.map = (value) => isObjectOfType('Map')(value); +is.set = (value) => isObjectOfType('Set')(value); +is.weakMap = (value) => isObjectOfType('WeakMap')(value); // eslint-disable-line @typescript-eslint/ban-types +is.weakSet = (value) => isObjectOfType('WeakSet')(value); // eslint-disable-line @typescript-eslint/ban-types +is.weakRef = (value) => isObjectOfType('WeakRef')(value); // eslint-disable-line @typescript-eslint/ban-types +is.int8Array = isObjectOfType('Int8Array'); +is.uint8Array = isObjectOfType('Uint8Array'); +is.uint8ClampedArray = isObjectOfType('Uint8ClampedArray'); +is.int16Array = isObjectOfType('Int16Array'); +is.uint16Array = isObjectOfType('Uint16Array'); +is.int32Array = isObjectOfType('Int32Array'); +is.uint32Array = isObjectOfType('Uint32Array'); +is.float32Array = isObjectOfType('Float32Array'); +is.float64Array = isObjectOfType('Float64Array'); +is.bigInt64Array = isObjectOfType('BigInt64Array'); +is.bigUint64Array = isObjectOfType('BigUint64Array'); +is.arrayBuffer = isObjectOfType('ArrayBuffer'); +is.sharedArrayBuffer = isObjectOfType('SharedArrayBuffer'); +is.dataView = isObjectOfType('DataView'); +is.enumCase = (value, targetEnum) => Object.values(targetEnum).includes(value); +is.directInstanceOf = (instance, class_) => Object.getPrototypeOf(instance) === class_.prototype; +is.urlInstance = (value) => isObjectOfType('URL')(value); +is.urlString = (value) => { + if (!is.string(value)) { + return false; + } + try { + new URL(value); // eslint-disable-line no-new + return true; + } + catch { + return false; + } +}; +// Example: `is.truthy = (value: unknown): value is (not false | not 0 | not '' | not undefined | not null) => Boolean(value);` +is.truthy = (value) => Boolean(value); // eslint-disable-line unicorn/prefer-native-coercion-functions +// Example: `is.falsy = (value: unknown): value is (not true | 0 | '' | undefined | null) => Boolean(value);` +is.falsy = (value) => !value; +is.nan = (value) => Number.isNaN(value); +is.primitive = (value) => is.null_(value) || isPrimitiveTypeName(typeof value); +is.integer = (value) => Number.isInteger(value); +is.safeInteger = (value) => Number.isSafeInteger(value); +is.plainObject = (value) => { + // From: https://github.com/sindresorhus/is-plain-obj/blob/main/index.js + if (typeof value !== 'object' || value === null) { + return false; + } + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + const prototype = Object.getPrototypeOf(value); + return (prototype === null || prototype === Object.prototype || Object.getPrototypeOf(prototype) === null) && !(Symbol.toStringTag in value) && !(Symbol.iterator in value); +}; +is.typedArray = (value) => isTypedArrayName(getObjectType(value)); +const isValidLength = (value) => is.safeInteger(value) && value >= 0; +is.arrayLike = (value) => !is.nullOrUndefined(value) && !is.function_(value) && isValidLength(value.length); +is.inRange = (value, range) => { + if (is.number(range)) { + return value >= Math.min(0, range) && value <= Math.max(range, 0); + } + if (is.array(range) && range.length === 2) { + return value >= Math.min(...range) && value <= Math.max(...range); + } + throw new TypeError(`Invalid range: ${JSON.stringify(range)}`); +}; +// eslint-disable-next-line @typescript-eslint/naming-convention +const NODE_TYPE_ELEMENT = 1; +// eslint-disable-next-line @typescript-eslint/naming-convention +const DOM_PROPERTIES_TO_CHECK = [ + 'innerHTML', + 'ownerDocument', + 'style', + 'attributes', + 'nodeValue', +]; +is.domElement = (value) => is.object(value) + && value.nodeType === NODE_TYPE_ELEMENT + && is.string(value.nodeName) + && !is.plainObject(value) + && DOM_PROPERTIES_TO_CHECK.every(property => property in value); +is.observable = (value) => { + if (!value) { + return false; + } + // eslint-disable-next-line no-use-extend-native/no-use-extend-native, @typescript-eslint/no-unsafe-call + if (value === value[Symbol.observable]?.()) { + return true; + } + // eslint-disable-next-line @typescript-eslint/no-unsafe-call + if (value === value['@@observable']?.()) { + return true; + } + return false; +}; +is.nodeStream = (value) => is.object(value) && is.function_(value.pipe) && !is.observable(value); +is.infinite = (value) => value === Number.POSITIVE_INFINITY || value === Number.NEGATIVE_INFINITY; +const isAbsoluteMod2 = (remainder) => (value) => is.integer(value) && Math.abs(value % 2) === remainder; +is.evenInteger = isAbsoluteMod2(0); +is.oddInteger = isAbsoluteMod2(1); +is.emptyArray = (value) => is.array(value) && value.length === 0; +is.nonEmptyArray = (value) => is.array(value) && value.length > 0; +is.emptyString = (value) => is.string(value) && value.length === 0; +const isWhiteSpaceString = (value) => is.string(value) && !/\S/.test(value); +is.emptyStringOrWhitespace = (value) => is.emptyString(value) || isWhiteSpaceString(value); +// TODO: Use `not ''` when the `not` operator is available. +is.nonEmptyString = (value) => is.string(value) && value.length > 0; +// TODO: Use `not ''` when the `not` operator is available. +is.nonEmptyStringAndNotWhitespace = (value) => is.string(value) && !is.emptyStringOrWhitespace(value); +// eslint-disable-next-line unicorn/no-array-callback-reference +is.emptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length === 0; +// TODO: Use `not` operator here to remove `Map` and `Set` from type guard: +// - https://github.com/Microsoft/TypeScript/pull/29317 +// eslint-disable-next-line unicorn/no-array-callback-reference +is.nonEmptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length > 0; +is.emptySet = (value) => is.set(value) && value.size === 0; +is.nonEmptySet = (value) => is.set(value) && value.size > 0; +// eslint-disable-next-line unicorn/no-array-callback-reference +is.emptyMap = (value) => is.map(value) && value.size === 0; +// eslint-disable-next-line unicorn/no-array-callback-reference +is.nonEmptyMap = (value) => is.map(value) && value.size > 0; +// `PropertyKey` is any value that can be used as an object key (string, number, or symbol) +is.propertyKey = (value) => is.any([is.string, is.number, is.symbol], value); +is.formData = (value) => isObjectOfType('FormData')(value); +is.urlSearchParams = (value) => isObjectOfType('URLSearchParams')(value); +const predicateOnArray = (method, predicate, values) => { + if (!is.function_(predicate)) { + throw new TypeError(`Invalid predicate: ${JSON.stringify(predicate)}`); + } + if (values.length === 0) { + throw new TypeError('Invalid number of values'); + } + return method.call(values, predicate); +}; +is.any = (predicate, ...values) => { + const predicates = is.array(predicate) ? predicate : [predicate]; + return predicates.some(singlePredicate => predicateOnArray(Array.prototype.some, singlePredicate, values)); +}; +is.all = (predicate, ...values) => predicateOnArray(Array.prototype.every, predicate, values); +const assertType = (condition, description, value, options = {}) => { + if (!condition) { + const { multipleValues } = options; + const valuesMessage = multipleValues + ? `received values of types ${[ + ...new Set(value.map(singleValue => `\`${is(singleValue)}\``)), + ].join(', ')}` + : `received value of type \`${is(value)}\``; + throw new TypeError(`Expected value which is \`${description}\`, ${valuesMessage}.`); + } +}; +/* eslint-disable @typescript-eslint/no-confusing-void-expression */ +const assert = { + // Unknowns. + undefined: (value) => assertType(is.undefined(value), 'undefined', value), + string: (value) => assertType(is.string(value), 'string', value), + number: (value) => assertType(is.number(value), 'number', value), + bigint: (value) => assertType(is.bigint(value), 'bigint', value), + // eslint-disable-next-line @typescript-eslint/ban-types + function_: (value) => assertType(is.function_(value), 'Function', value), + null_: (value) => assertType(is.null_(value), 'null', value), + class_: (value) => assertType(is.class_(value), "Class" /* AssertionTypeDescription.class_ */, value), + boolean: (value) => assertType(is.boolean(value), 'boolean', value), + symbol: (value) => assertType(is.symbol(value), 'symbol', value), + numericString: (value) => assertType(is.numericString(value), "string with a number" /* AssertionTypeDescription.numericString */, value), + array: (value, assertion) => { + const assert = assertType; + assert(is.array(value), 'Array', value); + if (assertion) { + // eslint-disable-next-line unicorn/no-array-for-each, unicorn/no-array-callback-reference + value.forEach(assertion); + } + }, + buffer: (value) => assertType(is.buffer(value), 'Buffer', value), + blob: (value) => assertType(is.blob(value), 'Blob', value), + nullOrUndefined: (value) => assertType(is.nullOrUndefined(value), "null or undefined" /* AssertionTypeDescription.nullOrUndefined */, value), + object: (value) => assertType(is.object(value), 'Object', value), + iterable: (value) => assertType(is.iterable(value), "Iterable" /* AssertionTypeDescription.iterable */, value), + asyncIterable: (value) => assertType(is.asyncIterable(value), "AsyncIterable" /* AssertionTypeDescription.asyncIterable */, value), + generator: (value) => assertType(is.generator(value), 'Generator', value), + asyncGenerator: (value) => assertType(is.asyncGenerator(value), 'AsyncGenerator', value), + nativePromise: (value) => assertType(is.nativePromise(value), "native Promise" /* AssertionTypeDescription.nativePromise */, value), + promise: (value) => assertType(is.promise(value), 'Promise', value), + generatorFunction: (value) => assertType(is.generatorFunction(value), 'GeneratorFunction', value), + asyncGeneratorFunction: (value) => assertType(is.asyncGeneratorFunction(value), 'AsyncGeneratorFunction', value), + // eslint-disable-next-line @typescript-eslint/ban-types + asyncFunction: (value) => assertType(is.asyncFunction(value), 'AsyncFunction', value), + // eslint-disable-next-line @typescript-eslint/ban-types + boundFunction: (value) => assertType(is.boundFunction(value), 'Function', value), + regExp: (value) => assertType(is.regExp(value), 'RegExp', value), + date: (value) => assertType(is.date(value), 'Date', value), + error: (value) => assertType(is.error(value), 'Error', value), + map: (value) => assertType(is.map(value), 'Map', value), + set: (value) => assertType(is.set(value), 'Set', value), + weakMap: (value) => assertType(is.weakMap(value), 'WeakMap', value), + weakSet: (value) => assertType(is.weakSet(value), 'WeakSet', value), + weakRef: (value) => assertType(is.weakRef(value), 'WeakRef', value), + int8Array: (value) => assertType(is.int8Array(value), 'Int8Array', value), + uint8Array: (value) => assertType(is.uint8Array(value), 'Uint8Array', value), + uint8ClampedArray: (value) => assertType(is.uint8ClampedArray(value), 'Uint8ClampedArray', value), + int16Array: (value) => assertType(is.int16Array(value), 'Int16Array', value), + uint16Array: (value) => assertType(is.uint16Array(value), 'Uint16Array', value), + int32Array: (value) => assertType(is.int32Array(value), 'Int32Array', value), + uint32Array: (value) => assertType(is.uint32Array(value), 'Uint32Array', value), + float32Array: (value) => assertType(is.float32Array(value), 'Float32Array', value), + float64Array: (value) => assertType(is.float64Array(value), 'Float64Array', value), + bigInt64Array: (value) => assertType(is.bigInt64Array(value), 'BigInt64Array', value), + bigUint64Array: (value) => assertType(is.bigUint64Array(value), 'BigUint64Array', value), + arrayBuffer: (value) => assertType(is.arrayBuffer(value), 'ArrayBuffer', value), + sharedArrayBuffer: (value) => assertType(is.sharedArrayBuffer(value), 'SharedArrayBuffer', value), + dataView: (value) => assertType(is.dataView(value), 'DataView', value), + enumCase: (value, targetEnum) => assertType(is.enumCase(value, targetEnum), 'EnumCase', value), + urlInstance: (value) => assertType(is.urlInstance(value), 'URL', value), + urlString: (value) => assertType(is.urlString(value), "string with a URL" /* AssertionTypeDescription.urlString */, value), + truthy: (value) => assertType(is.truthy(value), "truthy" /* AssertionTypeDescription.truthy */, value), + falsy: (value) => assertType(is.falsy(value), "falsy" /* AssertionTypeDescription.falsy */, value), + nan: (value) => assertType(is.nan(value), "NaN" /* AssertionTypeDescription.nan */, value), + primitive: (value) => assertType(is.primitive(value), "primitive" /* AssertionTypeDescription.primitive */, value), + integer: (value) => assertType(is.integer(value), "integer" /* AssertionTypeDescription.integer */, value), + safeInteger: (value) => assertType(is.safeInteger(value), "integer" /* AssertionTypeDescription.safeInteger */, value), + plainObject: (value) => assertType(is.plainObject(value), "plain object" /* AssertionTypeDescription.plainObject */, value), + typedArray: (value) => assertType(is.typedArray(value), "TypedArray" /* AssertionTypeDescription.typedArray */, value), + arrayLike: (value) => assertType(is.arrayLike(value), "array-like" /* AssertionTypeDescription.arrayLike */, value), + domElement: (value) => assertType(is.domElement(value), "HTMLElement" /* AssertionTypeDescription.domElement */, value), + observable: (value) => assertType(is.observable(value), 'Observable', value), + nodeStream: (value) => assertType(is.nodeStream(value), "Node.js Stream" /* AssertionTypeDescription.nodeStream */, value), + infinite: (value) => assertType(is.infinite(value), "infinite number" /* AssertionTypeDescription.infinite */, value), + emptyArray: (value) => assertType(is.emptyArray(value), "empty array" /* AssertionTypeDescription.emptyArray */, value), + nonEmptyArray: (value) => assertType(is.nonEmptyArray(value), "non-empty array" /* AssertionTypeDescription.nonEmptyArray */, value), + emptyString: (value) => assertType(is.emptyString(value), "empty string" /* AssertionTypeDescription.emptyString */, value), + emptyStringOrWhitespace: (value) => assertType(is.emptyStringOrWhitespace(value), "empty string or whitespace" /* AssertionTypeDescription.emptyStringOrWhitespace */, value), + nonEmptyString: (value) => assertType(is.nonEmptyString(value), "non-empty string" /* AssertionTypeDescription.nonEmptyString */, value), + nonEmptyStringAndNotWhitespace: (value) => assertType(is.nonEmptyStringAndNotWhitespace(value), "non-empty string and not whitespace" /* AssertionTypeDescription.nonEmptyStringAndNotWhitespace */, value), + emptyObject: (value) => assertType(is.emptyObject(value), "empty object" /* AssertionTypeDescription.emptyObject */, value), + nonEmptyObject: (value) => assertType(is.nonEmptyObject(value), "non-empty object" /* AssertionTypeDescription.nonEmptyObject */, value), + emptySet: (value) => assertType(is.emptySet(value), "empty set" /* AssertionTypeDescription.emptySet */, value), + nonEmptySet: (value) => assertType(is.nonEmptySet(value), "non-empty set" /* AssertionTypeDescription.nonEmptySet */, value), + emptyMap: (value) => assertType(is.emptyMap(value), "empty map" /* AssertionTypeDescription.emptyMap */, value), + nonEmptyMap: (value) => assertType(is.nonEmptyMap(value), "non-empty map" /* AssertionTypeDescription.nonEmptyMap */, value), + propertyKey: (value) => assertType(is.propertyKey(value), 'PropertyKey', value), + formData: (value) => assertType(is.formData(value), 'FormData', value), + urlSearchParams: (value) => assertType(is.urlSearchParams(value), 'URLSearchParams', value), + // Numbers. + evenInteger: (value) => assertType(is.evenInteger(value), "even integer" /* AssertionTypeDescription.evenInteger */, value), + oddInteger: (value) => assertType(is.oddInteger(value), "odd integer" /* AssertionTypeDescription.oddInteger */, value), + // Two arguments. + directInstanceOf: (instance, class_) => assertType(is.directInstanceOf(instance, class_), "T" /* AssertionTypeDescription.directInstanceOf */, instance), + inRange: (value, range) => assertType(is.inRange(value, range), "in range" /* AssertionTypeDescription.inRange */, value), + // Variadic functions. + any: (predicate, ...values) => assertType(is.any(predicate, ...values), "predicate returns truthy for any value" /* AssertionTypeDescription.any */, values, { multipleValues: true }), + all: (predicate, ...values) => assertType(is.all(predicate, ...values), "predicate returns truthy for all values" /* AssertionTypeDescription.all */, values, { multipleValues: true }), +}; +/* eslint-enable @typescript-eslint/no-confusing-void-expression */ +// Some few keywords are reserved, but we'll populate them for Node.js users +// See https://github.com/Microsoft/TypeScript/issues/2536 +Object.defineProperties(is, { + class: { + value: is.class_, + }, + function: { + value: is.function_, + }, + null: { + value: is.null_, + }, +}); +Object.defineProperties(assert, { + class: { + value: assert.class_, + }, + function: { + value: assert.function_, + }, + null: { + value: assert.null_, + }, +}); +/* harmony default export */ const dist = (is); + +;// CONCATENATED MODULE: external "node:events" +const external_node_events_namespaceObject = require("node:events"); +;// CONCATENATED MODULE: ./node_modules/p-cancelable/index.js +class CancelError extends Error { + constructor(reason) { + super(reason || 'Promise was canceled'); + this.name = 'CancelError'; + } + + get isCanceled() { + return true; + } +} + +// TODO: Use private class fields when ESLint 8 is out. + +class PCancelable { + static fn(userFunction) { + return (...arguments_) => { + return new PCancelable((resolve, reject, onCancel) => { + arguments_.push(onCancel); + // eslint-disable-next-line promise/prefer-await-to-then + userFunction(...arguments_).then(resolve, reject); + }); + }; + } + + constructor(executor) { + this._cancelHandlers = []; + this._isPending = true; + this._isCanceled = false; + this._rejectOnCancel = true; + + this._promise = new Promise((resolve, reject) => { + this._reject = reject; + + const onResolve = value => { + if (!this._isCanceled || !onCancel.shouldReject) { + this._isPending = false; + resolve(value); + } + }; + + const onReject = error => { + this._isPending = false; + reject(error); + }; + + const onCancel = handler => { + if (!this._isPending) { + throw new Error('The `onCancel` handler was attached after the promise settled.'); + } + + this._cancelHandlers.push(handler); + }; + + Object.defineProperties(onCancel, { + shouldReject: { + get: () => this._rejectOnCancel, + set: boolean => { + this._rejectOnCancel = boolean; + } + } + }); + + executor(onResolve, onReject, onCancel); + }); + } + + then(onFulfilled, onRejected) { + // eslint-disable-next-line promise/prefer-await-to-then + return this._promise.then(onFulfilled, onRejected); + } + + catch(onRejected) { + // eslint-disable-next-line promise/prefer-await-to-then + return this._promise.catch(onRejected); + } + + finally(onFinally) { + // eslint-disable-next-line promise/prefer-await-to-then + return this._promise.finally(onFinally); + } + + cancel(reason) { + if (!this._isPending || this._isCanceled) { + return; + } + + this._isCanceled = true; + + if (this._cancelHandlers.length > 0) { + try { + for (const handler of this._cancelHandlers) { + handler(); + } + } catch (error) { + this._reject(error); + return; + } + } + + if (this._rejectOnCancel) { + this._reject(new CancelError(reason)); + } + } + + get isCanceled() { + return this._isCanceled; + } +} + +Object.setPrototypeOf(PCancelable.prototype, Promise.prototype); + +;// CONCATENATED MODULE: ./node_modules/got/dist/source/core/errors.js + +// A hacky check to prevent circular references. +function isRequest(x) { + return dist.object(x) && '_onResponse' in x; +} +/** +An error to be thrown when a request fails. +Contains a `code` property with error class code, like `ECONNREFUSED`. +*/ +class RequestError extends Error { + constructor(message, error, self) { + super(message); + Object.defineProperty(this, "input", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "code", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "stack", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "response", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "request", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "timings", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Error.captureStackTrace(this, this.constructor); + this.name = 'RequestError'; + this.code = error.code ?? 'ERR_GOT_REQUEST_ERROR'; + this.input = error.input; + if (isRequest(self)) { + Object.defineProperty(this, 'request', { + enumerable: false, + value: self, + }); + Object.defineProperty(this, 'response', { + enumerable: false, + value: self.response, + }); + this.options = self.options; + } + else { + this.options = self; + } + this.timings = this.request?.timings; + // Recover the original stacktrace + if (dist.string(error.stack) && dist.string(this.stack)) { + const indexOfMessage = this.stack.indexOf(this.message) + this.message.length; + const thisStackTrace = this.stack.slice(indexOfMessage).split('\n').reverse(); + const errorStackTrace = error.stack.slice(error.stack.indexOf(error.message) + error.message.length).split('\n').reverse(); + // Remove duplicated traces + while (errorStackTrace.length > 0 && errorStackTrace[0] === thisStackTrace[0]) { + thisStackTrace.shift(); + } + this.stack = `${this.stack.slice(0, indexOfMessage)}${thisStackTrace.reverse().join('\n')}${errorStackTrace.reverse().join('\n')}`; + } + } +} +/** +An error to be thrown when the server redirects you more than ten times. +Includes a `response` property. +*/ +class MaxRedirectsError extends RequestError { + constructor(request) { + super(`Redirected ${request.options.maxRedirects} times. Aborting.`, {}, request); + this.name = 'MaxRedirectsError'; + this.code = 'ERR_TOO_MANY_REDIRECTS'; + } +} +/** +An error to be thrown when the server response code is not 2xx nor 3xx if `options.followRedirect` is `true`, but always except for 304. +Includes a `response` property. +*/ +// eslint-disable-next-line @typescript-eslint/naming-convention +class HTTPError extends RequestError { + constructor(response) { + super(`Response code ${response.statusCode} (${response.statusMessage})`, {}, response.request); + this.name = 'HTTPError'; + this.code = 'ERR_NON_2XX_3XX_RESPONSE'; + } +} +/** +An error to be thrown when a cache method fails. +For example, if the database goes down or there's a filesystem error. +*/ +class CacheError extends RequestError { + constructor(error, request) { + super(error.message, error, request); + this.name = 'CacheError'; + this.code = this.code === 'ERR_GOT_REQUEST_ERROR' ? 'ERR_CACHE_ACCESS' : this.code; + } +} +/** +An error to be thrown when the request body is a stream and an error occurs while reading from that stream. +*/ +class UploadError extends RequestError { + constructor(error, request) { + super(error.message, error, request); + this.name = 'UploadError'; + this.code = this.code === 'ERR_GOT_REQUEST_ERROR' ? 'ERR_UPLOAD' : this.code; + } +} +/** +An error to be thrown when the request is aborted due to a timeout. +Includes an `event` and `timings` property. +*/ +class TimeoutError extends RequestError { + constructor(error, timings, request) { + super(error.message, error, request); + Object.defineProperty(this, "timings", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "event", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + this.name = 'TimeoutError'; + this.event = error.event; + this.timings = timings; + } +} +/** +An error to be thrown when reading from response stream fails. +*/ +class ReadError extends RequestError { + constructor(error, request) { + super(error.message, error, request); + this.name = 'ReadError'; + this.code = this.code === 'ERR_GOT_REQUEST_ERROR' ? 'ERR_READING_RESPONSE_STREAM' : this.code; + } +} +/** +An error which always triggers a new retry when thrown. +*/ +class RetryError extends RequestError { + constructor(request) { + super('Retrying', {}, request); + this.name = 'RetryError'; + this.code = 'ERR_RETRYING'; + } +} +/** +An error to be thrown when the request is aborted by AbortController. +*/ +class AbortError extends RequestError { + constructor(request) { + super('This operation was aborted.', {}, request); + this.code = 'ERR_ABORTED'; + this.name = 'AbortError'; + } +} + +;// CONCATENATED MODULE: external "node:process" +const external_node_process_namespaceObject = require("node:process"); +;// CONCATENATED MODULE: external "node:buffer" +const external_node_buffer_namespaceObject = require("node:buffer"); +;// CONCATENATED MODULE: external "node:stream" +const external_node_stream_namespaceObject = require("node:stream"); +;// CONCATENATED MODULE: external "node:url" +const external_node_url_namespaceObject = require("node:url"); +;// CONCATENATED MODULE: external "node:http" +const external_node_http_namespaceObject = require("node:http"); +// EXTERNAL MODULE: external "events" +var external_events_ = __nccwpck_require__(2361); +// EXTERNAL MODULE: external "util" +var external_util_ = __nccwpck_require__(3837); +// EXTERNAL MODULE: ./node_modules/defer-to-connect/dist/source/index.js +var source = __nccwpck_require__(6214); +;// CONCATENATED MODULE: ./node_modules/@szmarczak/http-timer/dist/source/index.js + + + +const timer = (request) => { + if (request.timings) { + return request.timings; + } + const timings = { + start: Date.now(), + socket: undefined, + lookup: undefined, + connect: undefined, + secureConnect: undefined, + upload: undefined, + response: undefined, + end: undefined, + error: undefined, + abort: undefined, + phases: { + wait: undefined, + dns: undefined, + tcp: undefined, + tls: undefined, + request: undefined, + firstByte: undefined, + download: undefined, + total: undefined, + }, + }; + request.timings = timings; + const handleError = (origin) => { + origin.once(external_events_.errorMonitor, () => { + timings.error = Date.now(); + timings.phases.total = timings.error - timings.start; + }); + }; + handleError(request); + const onAbort = () => { + timings.abort = Date.now(); + timings.phases.total = timings.abort - timings.start; + }; + request.prependOnceListener('abort', onAbort); + const onSocket = (socket) => { + timings.socket = Date.now(); + timings.phases.wait = timings.socket - timings.start; + if (external_util_.types.isProxy(socket)) { + return; + } + const lookupListener = () => { + timings.lookup = Date.now(); + timings.phases.dns = timings.lookup - timings.socket; + }; + socket.prependOnceListener('lookup', lookupListener); + source(socket, { + connect: () => { + timings.connect = Date.now(); + if (timings.lookup === undefined) { + socket.removeListener('lookup', lookupListener); + timings.lookup = timings.connect; + timings.phases.dns = timings.lookup - timings.socket; + } + timings.phases.tcp = timings.connect - timings.lookup; + }, + secureConnect: () => { + timings.secureConnect = Date.now(); + timings.phases.tls = timings.secureConnect - timings.connect; + }, + }); + }; + if (request.socket) { + onSocket(request.socket); + } + else { + request.prependOnceListener('socket', onSocket); + } + const onUpload = () => { + timings.upload = Date.now(); + timings.phases.request = timings.upload - (timings.secureConnect ?? timings.connect); + }; + if (request.writableFinished) { + onUpload(); + } + else { + request.prependOnceListener('finish', onUpload); + } + request.prependOnceListener('response', (response) => { + timings.response = Date.now(); + timings.phases.firstByte = timings.response - timings.upload; + response.timings = timings; + handleError(response); + response.prependOnceListener('end', () => { + request.off('abort', onAbort); + response.off('aborted', onAbort); + if (timings.phases.total) { + // Aborted or errored + return; + } + timings.end = Date.now(); + timings.phases.download = timings.end - timings.response; + timings.phases.total = timings.end - timings.start; + }); + response.prependOnceListener('aborted', onAbort); + }); + return timings; +}; +/* harmony default export */ const dist_source = (timer); + +// EXTERNAL MODULE: ./node_modules/cacheable-request/src/index.js +var src = __nccwpck_require__(8116); +// EXTERNAL MODULE: ./node_modules/decompress-response/index.js +var decompress_response = __nccwpck_require__(2391); +// EXTERNAL MODULE: ./node_modules/get-stream/index.js +var get_stream = __nccwpck_require__(1766); +;// CONCATENATED MODULE: ./node_modules/form-data-encoder/lib/util/isFunction.js +const isFunction = (value) => (typeof value === "function"); + +;// CONCATENATED MODULE: ./node_modules/form-data-encoder/lib/util/isFormData.js + +const isFormData = (value) => Boolean(value + && isFunction(value.constructor) + && value[Symbol.toStringTag] === "FormData" + && isFunction(value.append) + && isFunction(value.getAll) + && isFunction(value.entries) + && isFunction(value[Symbol.iterator])); + +;// CONCATENATED MODULE: ./node_modules/form-data-encoder/lib/util/createBoundary.js +const alphabet = "abcdefghijklmnopqrstuvwxyz0123456789"; +function createBoundary() { + let size = 16; + let res = ""; + while (size--) { + res += alphabet[(Math.random() * alphabet.length) << 0]; + } + return res; +} + +;// CONCATENATED MODULE: ./node_modules/form-data-encoder/lib/util/normalizeValue.js +const normalizeValue = (value) => String(value) + .replace(/\r|\n/g, (match, i, str) => { + if ((match === "\r" && str[i + 1] !== "\n") + || (match === "\n" && str[i - 1] !== "\r")) { + return "\r\n"; + } + return match; +}); + +;// CONCATENATED MODULE: ./node_modules/form-data-encoder/lib/util/isPlainObject.js +const getType = (value) => (Object.prototype.toString.call(value).slice(8, -1).toLowerCase()); +function isPlainObject(value) { + if (getType(value) !== "object") { + return false; + } + const pp = Object.getPrototypeOf(value); + if (pp === null || pp === undefined) { + return true; + } + const Ctor = pp.constructor && pp.constructor.toString(); + return Ctor === Object.toString(); +} + +;// CONCATENATED MODULE: ./node_modules/form-data-encoder/lib/util/proxyHeaders.js +function getProperty(target, prop) { + if (typeof prop !== "string") { + return target[prop]; + } + for (const [name, value] of Object.entries(target)) { + if (prop.toLowerCase() === name.toLowerCase()) { + return value; + } + } + return undefined; +} +const proxyHeaders = (object) => new Proxy(object, { + get: (target, prop) => getProperty(target, prop), + has: (target, prop) => getProperty(target, prop) !== undefined +}); + +;// CONCATENATED MODULE: ./node_modules/form-data-encoder/lib/util/escapeName.js +const escapeName = (name) => String(name) + .replace(/\r/g, "%0D") + .replace(/\n/g, "%0A") + .replace(/"/g, "%22"); + +;// CONCATENATED MODULE: ./node_modules/form-data-encoder/lib/util/isFile.js + +const isFile = (value) => Boolean(value + && typeof value === "object" + && isFunction(value.constructor) + && value[Symbol.toStringTag] === "File" + && isFunction(value.stream) + && value.name != null); +const isFileLike = (/* unused pure expression or super */ null && (isFile)); + +;// CONCATENATED MODULE: ./node_modules/form-data-encoder/lib/FormDataEncoder.js +var __classPrivateFieldSet = (undefined && undefined.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _FormDataEncoder_instances, _FormDataEncoder_CRLF, _FormDataEncoder_CRLF_BYTES, _FormDataEncoder_CRLF_BYTES_LENGTH, _FormDataEncoder_DASHES, _FormDataEncoder_encoder, _FormDataEncoder_footer, _FormDataEncoder_form, _FormDataEncoder_options, _FormDataEncoder_getFieldHeader, _FormDataEncoder_getContentLength; + + + + + + + +const defaultOptions = { + enableAdditionalHeaders: false +}; +const readonlyProp = { writable: false, configurable: false }; +class FormDataEncoder { + constructor(form, boundaryOrOptions, options) { + _FormDataEncoder_instances.add(this); + _FormDataEncoder_CRLF.set(this, "\r\n"); + _FormDataEncoder_CRLF_BYTES.set(this, void 0); + _FormDataEncoder_CRLF_BYTES_LENGTH.set(this, void 0); + _FormDataEncoder_DASHES.set(this, "-".repeat(2)); + _FormDataEncoder_encoder.set(this, new TextEncoder()); + _FormDataEncoder_footer.set(this, void 0); + _FormDataEncoder_form.set(this, void 0); + _FormDataEncoder_options.set(this, void 0); + if (!isFormData(form)) { + throw new TypeError("Expected first argument to be a FormData instance."); + } + let boundary; + if (isPlainObject(boundaryOrOptions)) { + options = boundaryOrOptions; + } + else { + boundary = boundaryOrOptions; + } + if (!boundary) { + boundary = createBoundary(); + } + if (typeof boundary !== "string") { + throw new TypeError("Expected boundary argument to be a string."); + } + if (options && !isPlainObject(options)) { + throw new TypeError("Expected options argument to be an object."); + } + __classPrivateFieldSet(this, _FormDataEncoder_form, Array.from(form.entries()), "f"); + __classPrivateFieldSet(this, _FormDataEncoder_options, { ...defaultOptions, ...options }, "f"); + __classPrivateFieldSet(this, _FormDataEncoder_CRLF_BYTES, __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")), "f"); + __classPrivateFieldSet(this, _FormDataEncoder_CRLF_BYTES_LENGTH, __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES, "f").byteLength, "f"); + this.boundary = `form-data-boundary-${boundary}`; + this.contentType = `multipart/form-data; boundary=${this.boundary}`; + __classPrivateFieldSet(this, _FormDataEncoder_footer, __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(`${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${this.boundary}${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f").repeat(2)}`), "f"); + const headers = { + "Content-Type": this.contentType + }; + const contentLength = __classPrivateFieldGet(this, _FormDataEncoder_instances, "m", _FormDataEncoder_getContentLength).call(this); + if (contentLength) { + this.contentLength = contentLength; + headers["Content-Length"] = contentLength; + } + this.headers = proxyHeaders(Object.freeze(headers)); + Object.defineProperties(this, { + boundary: readonlyProp, + contentType: readonlyProp, + contentLength: readonlyProp, + headers: readonlyProp + }); + } + getContentLength() { + return this.contentLength == null ? undefined : Number(this.contentLength); + } + *values() { + for (const [name, raw] of __classPrivateFieldGet(this, _FormDataEncoder_form, "f")) { + const value = isFile(raw) ? raw : __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(normalizeValue(raw)); + yield __classPrivateFieldGet(this, _FormDataEncoder_instances, "m", _FormDataEncoder_getFieldHeader).call(this, name, value); + yield value; + yield __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES, "f"); + } + yield __classPrivateFieldGet(this, _FormDataEncoder_footer, "f"); + } + async *encode() { + for (const part of this.values()) { + if (isFile(part)) { + yield* part.stream(); + } + else { + yield part; + } + } + } + [(_FormDataEncoder_CRLF = new WeakMap(), _FormDataEncoder_CRLF_BYTES = new WeakMap(), _FormDataEncoder_CRLF_BYTES_LENGTH = new WeakMap(), _FormDataEncoder_DASHES = new WeakMap(), _FormDataEncoder_encoder = new WeakMap(), _FormDataEncoder_footer = new WeakMap(), _FormDataEncoder_form = new WeakMap(), _FormDataEncoder_options = new WeakMap(), _FormDataEncoder_instances = new WeakSet(), _FormDataEncoder_getFieldHeader = function _FormDataEncoder_getFieldHeader(name, value) { + let header = ""; + header += `${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, "f")}${this.boundary}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}`; + header += `Content-Disposition: form-data; name="${escapeName(name)}"`; + if (isFile(value)) { + header += `; filename="${escapeName(value.name)}"${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}`; + header += `Content-Type: ${value.type || "application/octet-stream"}`; + } + const size = isFile(value) ? value.size : value.byteLength; + if (__classPrivateFieldGet(this, _FormDataEncoder_options, "f").enableAdditionalHeaders === true + && size != null + && !isNaN(size)) { + header += `${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f")}Content-Length: ${isFile(value) ? value.size : value.byteLength}`; + } + return __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(`${header}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, "f").repeat(2)}`); + }, _FormDataEncoder_getContentLength = function _FormDataEncoder_getContentLength() { + let length = 0; + for (const [name, raw] of __classPrivateFieldGet(this, _FormDataEncoder_form, "f")) { + const value = isFile(raw) ? raw : __classPrivateFieldGet(this, _FormDataEncoder_encoder, "f").encode(normalizeValue(raw)); + const size = isFile(value) ? value.size : value.byteLength; + if (size == null || isNaN(size)) { + return undefined; + } + length += __classPrivateFieldGet(this, _FormDataEncoder_instances, "m", _FormDataEncoder_getFieldHeader).call(this, name, value).byteLength; + length += size; + length += __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES_LENGTH, "f"); + } + return String(length + __classPrivateFieldGet(this, _FormDataEncoder_footer, "f").byteLength); + }, Symbol.iterator)]() { + return this.values(); + } + [Symbol.asyncIterator]() { + return this.encode(); + } +} + +;// CONCATENATED MODULE: external "node:util" +const external_node_util_namespaceObject = require("node:util"); +;// CONCATENATED MODULE: ./node_modules/got/dist/source/core/utils/is-form-data.js + +function is_form_data_isFormData(body) { + return dist.nodeStream(body) && dist.function_(body.getBoundary); +} + +;// CONCATENATED MODULE: ./node_modules/got/dist/source/core/utils/get-body-size.js + + + + +async function getBodySize(body, headers) { + if (headers && 'content-length' in headers) { + return Number(headers['content-length']); + } + if (!body) { + return 0; + } + if (dist.string(body)) { + return external_node_buffer_namespaceObject.Buffer.byteLength(body); + } + if (dist.buffer(body)) { + return body.length; + } + if (is_form_data_isFormData(body)) { + return (0,external_node_util_namespaceObject.promisify)(body.getLength.bind(body))(); + } + return undefined; +} + +;// CONCATENATED MODULE: ./node_modules/got/dist/source/core/utils/proxy-events.js +function proxyEvents(from, to, events) { + const eventFunctions = {}; + for (const event of events) { + const eventFunction = (...args) => { + to.emit(event, ...args); + }; + eventFunctions[event] = eventFunction; + from.on(event, eventFunction); + } + return () => { + for (const [event, eventFunction] of Object.entries(eventFunctions)) { + from.off(event, eventFunction); + } + }; +} + +;// CONCATENATED MODULE: external "node:net" +const external_node_net_namespaceObject = require("node:net"); +;// CONCATENATED MODULE: ./node_modules/got/dist/source/core/utils/unhandle.js +// When attaching listeners, it's very easy to forget about them. +// Especially if you do error handling and set timeouts. +// So instead of checking if it's proper to throw an error on every timeout ever, +// use this simple tool which will remove all listeners you have attached. +function unhandle() { + const handlers = []; + return { + once(origin, event, fn) { + origin.once(event, fn); + handlers.push({ origin, event, fn }); + }, + unhandleAll() { + for (const handler of handlers) { + const { origin, event, fn } = handler; + origin.removeListener(event, fn); + } + handlers.length = 0; + }, + }; +} + +;// CONCATENATED MODULE: ./node_modules/got/dist/source/core/timed-out.js + + +const reentry = Symbol('reentry'); +const noop = () => { }; +class timed_out_TimeoutError extends Error { + constructor(threshold, event) { + super(`Timeout awaiting '${event}' for ${threshold}ms`); + Object.defineProperty(this, "event", { + enumerable: true, + configurable: true, + writable: true, + value: event + }); + Object.defineProperty(this, "code", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + this.name = 'TimeoutError'; + this.code = 'ETIMEDOUT'; + } +} +function timedOut(request, delays, options) { + if (reentry in request) { + return noop; + } + request[reentry] = true; + const cancelers = []; + const { once, unhandleAll } = unhandle(); + const addTimeout = (delay, callback, event) => { + const timeout = setTimeout(callback, delay, delay, event); + timeout.unref?.(); + const cancel = () => { + clearTimeout(timeout); + }; + cancelers.push(cancel); + return cancel; + }; + const { host, hostname } = options; + const timeoutHandler = (delay, event) => { + request.destroy(new timed_out_TimeoutError(delay, event)); + }; + const cancelTimeouts = () => { + for (const cancel of cancelers) { + cancel(); + } + unhandleAll(); + }; + request.once('error', error => { + cancelTimeouts(); + // Save original behavior + /* istanbul ignore next */ + if (request.listenerCount('error') === 0) { + throw error; + } + }); + if (typeof delays.request !== 'undefined') { + const cancelTimeout = addTimeout(delays.request, timeoutHandler, 'request'); + once(request, 'response', (response) => { + once(response, 'end', cancelTimeout); + }); + } + if (typeof delays.socket !== 'undefined') { + const { socket } = delays; + const socketTimeoutHandler = () => { + timeoutHandler(socket, 'socket'); + }; + request.setTimeout(socket, socketTimeoutHandler); + // `request.setTimeout(0)` causes a memory leak. + // We can just remove the listener and forget about the timer - it's unreffed. + // See https://github.com/sindresorhus/got/issues/690 + cancelers.push(() => { + request.removeListener('timeout', socketTimeoutHandler); + }); + } + const hasLookup = typeof delays.lookup !== 'undefined'; + const hasConnect = typeof delays.connect !== 'undefined'; + const hasSecureConnect = typeof delays.secureConnect !== 'undefined'; + const hasSend = typeof delays.send !== 'undefined'; + if (hasLookup || hasConnect || hasSecureConnect || hasSend) { + once(request, 'socket', (socket) => { + const { socketPath } = request; + /* istanbul ignore next: hard to test */ + if (socket.connecting) { + const hasPath = Boolean(socketPath ?? external_node_net_namespaceObject.isIP(hostname ?? host ?? '') !== 0); + if (hasLookup && !hasPath && typeof socket.address().address === 'undefined') { + const cancelTimeout = addTimeout(delays.lookup, timeoutHandler, 'lookup'); + once(socket, 'lookup', cancelTimeout); + } + if (hasConnect) { + const timeConnect = () => addTimeout(delays.connect, timeoutHandler, 'connect'); + if (hasPath) { + once(socket, 'connect', timeConnect()); + } + else { + once(socket, 'lookup', (error) => { + if (error === null) { + once(socket, 'connect', timeConnect()); + } + }); + } + } + if (hasSecureConnect && options.protocol === 'https:') { + once(socket, 'connect', () => { + const cancelTimeout = addTimeout(delays.secureConnect, timeoutHandler, 'secureConnect'); + once(socket, 'secureConnect', cancelTimeout); + }); + } + } + if (hasSend) { + const timeRequest = () => addTimeout(delays.send, timeoutHandler, 'send'); + /* istanbul ignore next: hard to test */ + if (socket.connecting) { + once(socket, 'connect', () => { + once(request, 'upload-complete', timeRequest()); + }); + } + else { + once(request, 'upload-complete', timeRequest()); + } + } + }); + } + if (typeof delays.response !== 'undefined') { + once(request, 'upload-complete', () => { + const cancelTimeout = addTimeout(delays.response, timeoutHandler, 'response'); + once(request, 'response', cancelTimeout); + }); + } + if (typeof delays.read !== 'undefined') { + once(request, 'response', (response) => { + const cancelTimeout = addTimeout(delays.read, timeoutHandler, 'read'); + once(response, 'end', cancelTimeout); + }); + } + return cancelTimeouts; +} + +;// CONCATENATED MODULE: ./node_modules/got/dist/source/core/utils/url-to-options.js + +function urlToOptions(url) { + // Cast to URL + url = url; + const options = { + protocol: url.protocol, + hostname: dist.string(url.hostname) && url.hostname.startsWith('[') ? url.hostname.slice(1, -1) : url.hostname, + host: url.host, + hash: url.hash, + search: url.search, + pathname: url.pathname, + href: url.href, + path: `${url.pathname || ''}${url.search || ''}`, + }; + if (dist.string(url.port) && url.port.length > 0) { + options.port = Number(url.port); + } + if (url.username || url.password) { + options.auth = `${url.username || ''}:${url.password || ''}`; + } + return options; +} + +;// CONCATENATED MODULE: ./node_modules/got/dist/source/core/utils/weakable-map.js +class WeakableMap { + constructor() { + Object.defineProperty(this, "weakMap", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "map", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + this.weakMap = new WeakMap(); + this.map = new Map(); + } + set(key, value) { + if (typeof key === 'object') { + this.weakMap.set(key, value); + } + else { + this.map.set(key, value); + } + } + get(key) { + if (typeof key === 'object') { + return this.weakMap.get(key); + } + return this.map.get(key); + } + has(key) { + if (typeof key === 'object') { + return this.weakMap.has(key); + } + return this.map.has(key); + } +} + +;// CONCATENATED MODULE: ./node_modules/got/dist/source/core/calculate-retry-delay.js +const calculateRetryDelay = ({ attemptCount, retryOptions, error, retryAfter, computedValue, }) => { + if (error.name === 'RetryError') { + return 1; + } + if (attemptCount > retryOptions.limit) { + return 0; + } + const hasMethod = retryOptions.methods.includes(error.options.method); + const hasErrorCode = retryOptions.errorCodes.includes(error.code); + const hasStatusCode = error.response && retryOptions.statusCodes.includes(error.response.statusCode); + if (!hasMethod || (!hasErrorCode && !hasStatusCode)) { + return 0; + } + if (error.response) { + if (retryAfter) { + // In this case `computedValue` is `options.request.timeout` + if (retryAfter > computedValue) { + return 0; + } + return retryAfter; + } + if (error.response.statusCode === 413) { + return 0; + } + } + const noise = Math.random() * retryOptions.noise; + return Math.min(((2 ** (attemptCount - 1)) * 1000), retryOptions.backoffLimit) + noise; +}; +/* harmony default export */ const calculate_retry_delay = (calculateRetryDelay); + +;// CONCATENATED MODULE: external "node:tls" +const external_node_tls_namespaceObject = require("node:tls"); +;// CONCATENATED MODULE: external "node:https" +const external_node_https_namespaceObject = require("node:https"); +;// CONCATENATED MODULE: ./node_modules/lowercase-keys/index.js +function lowercaseKeys(object) { + return Object.fromEntries(Object.entries(object).map(([key, value]) => [key.toLowerCase(), value])); +} + +// EXTERNAL MODULE: ./node_modules/cacheable-lookup/source/index.js +var cacheable_lookup_source = __nccwpck_require__(2286); +// EXTERNAL MODULE: ./node_modules/http2-wrapper/source/index.js +var http2_wrapper_source = __nccwpck_require__(4645); +;// CONCATENATED MODULE: ./node_modules/got/dist/source/core/parse-link-header.js +function parseLinkHeader(link) { + const parsed = []; + const items = link.split(','); + for (const item of items) { + // https://tools.ietf.org/html/rfc5988#section-5 + const [rawUriReference, ...rawLinkParameters] = item.split(';'); + const trimmedUriReference = rawUriReference.trim(); + // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with + if (trimmedUriReference[0] !== '<' || trimmedUriReference[trimmedUriReference.length - 1] !== '>') { + throw new Error(`Invalid format of the Link header reference: ${trimmedUriReference}`); + } + const reference = trimmedUriReference.slice(1, -1); + const parameters = {}; + if (rawLinkParameters.length === 0) { + throw new Error(`Unexpected end of Link header parameters: ${rawLinkParameters.join(';')}`); + } + for (const rawParameter of rawLinkParameters) { + const trimmedRawParameter = rawParameter.trim(); + const center = trimmedRawParameter.indexOf('='); + if (center === -1) { + throw new Error(`Failed to parse Link header: ${link}`); + } + const name = trimmedRawParameter.slice(0, center).trim(); + const value = trimmedRawParameter.slice(center + 1).trim(); + parameters[name] = value; + } + parsed.push({ + reference, + parameters, + }); + } + return parsed; +} + +;// CONCATENATED MODULE: ./node_modules/got/dist/source/core/options.js + + + + +// DO NOT use destructuring for `https.request` and `http.request` as it's not compatible with `nock`. + + + + + + + + +const [major, minor] = external_node_process_namespaceObject.versions.node.split('.').map(Number); +function validateSearchParameters(searchParameters) { + // eslint-disable-next-line guard-for-in + for (const key in searchParameters) { + const value = searchParameters[key]; + assert.any([dist.string, dist.number, dist.boolean, dist.null_, dist.undefined], value); + } +} +const globalCache = new Map(); +let globalDnsCache; +const getGlobalDnsCache = () => { + if (globalDnsCache) { + return globalDnsCache; + } + globalDnsCache = new cacheable_lookup_source(); + return globalDnsCache; +}; +const defaultInternals = { + request: undefined, + agent: { + http: undefined, + https: undefined, + http2: undefined, + }, + h2session: undefined, + decompress: true, + timeout: { + connect: undefined, + lookup: undefined, + read: undefined, + request: undefined, + response: undefined, + secureConnect: undefined, + send: undefined, + socket: undefined, + }, + prefixUrl: '', + body: undefined, + form: undefined, + json: undefined, + cookieJar: undefined, + ignoreInvalidCookies: false, + searchParams: undefined, + dnsLookup: undefined, + dnsCache: undefined, + context: {}, + hooks: { + init: [], + beforeRequest: [], + beforeError: [], + beforeRedirect: [], + beforeRetry: [], + afterResponse: [], + }, + followRedirect: true, + maxRedirects: 10, + cache: undefined, + throwHttpErrors: true, + username: '', + password: '', + http2: false, + allowGetBody: false, + headers: { + 'user-agent': 'got (https://github.com/sindresorhus/got)', + }, + methodRewriting: false, + dnsLookupIpVersion: undefined, + parseJson: JSON.parse, + stringifyJson: JSON.stringify, + retry: { + limit: 2, + methods: [ + 'GET', + 'PUT', + 'HEAD', + 'DELETE', + 'OPTIONS', + 'TRACE', + ], + statusCodes: [ + 408, + 413, + 429, + 500, + 502, + 503, + 504, + 521, + 522, + 524, + ], + errorCodes: [ + 'ETIMEDOUT', + 'ECONNRESET', + 'EADDRINUSE', + 'ECONNREFUSED', + 'EPIPE', + 'ENOTFOUND', + 'ENETUNREACH', + 'EAI_AGAIN', + ], + maxRetryAfter: undefined, + calculateDelay: ({ computedValue }) => computedValue, + backoffLimit: Number.POSITIVE_INFINITY, + noise: 100, + }, + localAddress: undefined, + method: 'GET', + createConnection: undefined, + cacheOptions: { + shared: undefined, + cacheHeuristic: undefined, + immutableMinTimeToLive: undefined, + ignoreCargoCult: undefined, + }, + https: { + alpnProtocols: undefined, + rejectUnauthorized: undefined, + checkServerIdentity: undefined, + certificateAuthority: undefined, + key: undefined, + certificate: undefined, + passphrase: undefined, + pfx: undefined, + ciphers: undefined, + honorCipherOrder: undefined, + minVersion: undefined, + maxVersion: undefined, + signatureAlgorithms: undefined, + tlsSessionLifetime: undefined, + dhparam: undefined, + ecdhCurve: undefined, + certificateRevocationLists: undefined, + }, + encoding: undefined, + resolveBodyOnly: false, + isStream: false, + responseType: 'text', + url: undefined, + pagination: { + transform(response) { + if (response.request.options.responseType === 'json') { + return response.body; + } + return JSON.parse(response.body); + }, + paginate({ response }) { + const rawLinkHeader = response.headers.link; + if (typeof rawLinkHeader !== 'string' || rawLinkHeader.trim() === '') { + return false; + } + const parsed = parseLinkHeader(rawLinkHeader); + const next = parsed.find(entry => entry.parameters.rel === 'next' || entry.parameters.rel === '"next"'); + if (next) { + return { + url: new external_node_url_namespaceObject.URL(next.reference, response.url), + }; + } + return false; + }, + filter: () => true, + shouldContinue: () => true, + countLimit: Number.POSITIVE_INFINITY, + backoff: 0, + requestLimit: 10000, + stackAllItems: false, + }, + setHost: true, + maxHeaderSize: undefined, + signal: undefined, + enableUnixSockets: true, +}; +const cloneInternals = (internals) => { + const { hooks, retry } = internals; + const result = { + ...internals, + context: { ...internals.context }, + cacheOptions: { ...internals.cacheOptions }, + https: { ...internals.https }, + agent: { ...internals.agent }, + headers: { ...internals.headers }, + retry: { + ...retry, + errorCodes: [...retry.errorCodes], + methods: [...retry.methods], + statusCodes: [...retry.statusCodes], + }, + timeout: { ...internals.timeout }, + hooks: { + init: [...hooks.init], + beforeRequest: [...hooks.beforeRequest], + beforeError: [...hooks.beforeError], + beforeRedirect: [...hooks.beforeRedirect], + beforeRetry: [...hooks.beforeRetry], + afterResponse: [...hooks.afterResponse], + }, + searchParams: internals.searchParams ? new external_node_url_namespaceObject.URLSearchParams(internals.searchParams) : undefined, + pagination: { ...internals.pagination }, + }; + if (result.url !== undefined) { + result.prefixUrl = ''; + } + return result; +}; +const cloneRaw = (raw) => { + const { hooks, retry } = raw; + const result = { ...raw }; + if (dist.object(raw.context)) { + result.context = { ...raw.context }; + } + if (dist.object(raw.cacheOptions)) { + result.cacheOptions = { ...raw.cacheOptions }; + } + if (dist.object(raw.https)) { + result.https = { ...raw.https }; + } + if (dist.object(raw.cacheOptions)) { + result.cacheOptions = { ...result.cacheOptions }; + } + if (dist.object(raw.agent)) { + result.agent = { ...raw.agent }; + } + if (dist.object(raw.headers)) { + result.headers = { ...raw.headers }; + } + if (dist.object(retry)) { + result.retry = { ...retry }; + if (dist.array(retry.errorCodes)) { + result.retry.errorCodes = [...retry.errorCodes]; + } + if (dist.array(retry.methods)) { + result.retry.methods = [...retry.methods]; + } + if (dist.array(retry.statusCodes)) { + result.retry.statusCodes = [...retry.statusCodes]; + } + } + if (dist.object(raw.timeout)) { + result.timeout = { ...raw.timeout }; + } + if (dist.object(hooks)) { + result.hooks = { + ...hooks, + }; + if (dist.array(hooks.init)) { + result.hooks.init = [...hooks.init]; + } + if (dist.array(hooks.beforeRequest)) { + result.hooks.beforeRequest = [...hooks.beforeRequest]; + } + if (dist.array(hooks.beforeError)) { + result.hooks.beforeError = [...hooks.beforeError]; + } + if (dist.array(hooks.beforeRedirect)) { + result.hooks.beforeRedirect = [...hooks.beforeRedirect]; + } + if (dist.array(hooks.beforeRetry)) { + result.hooks.beforeRetry = [...hooks.beforeRetry]; + } + if (dist.array(hooks.afterResponse)) { + result.hooks.afterResponse = [...hooks.afterResponse]; + } + } + // TODO: raw.searchParams + if (dist.object(raw.pagination)) { + result.pagination = { ...raw.pagination }; + } + return result; +}; +const getHttp2TimeoutOption = (internals) => { + const delays = [internals.timeout.socket, internals.timeout.connect, internals.timeout.lookup, internals.timeout.request, internals.timeout.secureConnect].filter(delay => typeof delay === 'number'); + if (delays.length > 0) { + return Math.min(...delays); + } + return undefined; +}; +const init = (options, withOptions, self) => { + const initHooks = options.hooks?.init; + if (initHooks) { + for (const hook of initHooks) { + hook(withOptions, self); + } + } +}; +class Options { + constructor(input, options, defaults) { + Object.defineProperty(this, "_unixOptions", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_internals", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_merging", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_init", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + assert.any([dist.string, dist.urlInstance, dist.object, dist.undefined], input); + assert.any([dist.object, dist.undefined], options); + assert.any([dist.object, dist.undefined], defaults); + if (input instanceof Options || options instanceof Options) { + throw new TypeError('The defaults must be passed as the third argument'); + } + this._internals = cloneInternals(defaults?._internals ?? defaults ?? defaultInternals); + this._init = [...(defaults?._init ?? [])]; + this._merging = false; + this._unixOptions = undefined; + // This rule allows `finally` to be considered more important. + // Meaning no matter the error thrown in the `try` block, + // if `finally` throws then the `finally` error will be thrown. + // + // Yes, we want this. If we set `url` first, then the `url.searchParams` + // would get merged. Instead we set the `searchParams` first, then + // `url.searchParams` is overwritten as expected. + // + /* eslint-disable no-unsafe-finally */ + try { + if (dist.plainObject(input)) { + try { + this.merge(input); + this.merge(options); + } + finally { + this.url = input.url; + } + } + else { + try { + this.merge(options); + } + finally { + if (options?.url !== undefined) { + if (input === undefined) { + this.url = options.url; + } + else { + throw new TypeError('The `url` option is mutually exclusive with the `input` argument'); + } + } + else if (input !== undefined) { + this.url = input; + } + } + } + } + catch (error) { + error.options = this; + throw error; + } + /* eslint-enable no-unsafe-finally */ + } + merge(options) { + if (!options) { + return; + } + if (options instanceof Options) { + for (const init of options._init) { + this.merge(init); + } + return; + } + options = cloneRaw(options); + init(this, options, this); + init(options, options, this); + this._merging = true; + // Always merge `isStream` first + if ('isStream' in options) { + this.isStream = options.isStream; + } + try { + let push = false; + for (const key in options) { + // `got.extend()` options + if (key === 'mutableDefaults' || key === 'handlers') { + continue; + } + // Never merge `url` + if (key === 'url') { + continue; + } + if (!(key in this)) { + throw new Error(`Unexpected option: ${key}`); + } + // @ts-expect-error Type 'unknown' is not assignable to type 'never'. + this[key] = options[key]; + push = true; + } + if (push) { + this._init.push(options); + } + } + finally { + this._merging = false; + } + } + /** + Custom request function. + The main purpose of this is to [support HTTP2 using a wrapper](https://github.com/szmarczak/http2-wrapper). + + @default http.request | https.request + */ + get request() { + return this._internals.request; + } + set request(value) { + assert.any([dist.function_, dist.undefined], value); + this._internals.request = value; + } + /** + An object representing `http`, `https` and `http2` keys for [`http.Agent`](https://nodejs.org/api/http.html#http_class_http_agent), [`https.Agent`](https://nodejs.org/api/https.html#https_class_https_agent) and [`http2wrapper.Agent`](https://github.com/szmarczak/http2-wrapper#new-http2agentoptions) instance. + This is necessary because a request to one protocol might redirect to another. + In such a scenario, Got will switch over to the right protocol agent for you. + + If a key is not present, it will default to a global agent. + + @example + ``` + import got from 'got'; + import HttpAgent from 'agentkeepalive'; + + const {HttpsAgent} = HttpAgent; + + await got('https://sindresorhus.com', { + agent: { + http: new HttpAgent(), + https: new HttpsAgent() + } + }); + ``` + */ + get agent() { + return this._internals.agent; + } + set agent(value) { + assert.plainObject(value); + // eslint-disable-next-line guard-for-in + for (const key in value) { + if (!(key in this._internals.agent)) { + throw new TypeError(`Unexpected agent option: ${key}`); + } + assert.any([dist.object, dist.undefined], value[key]); + } + if (this._merging) { + Object.assign(this._internals.agent, value); + } + else { + this._internals.agent = { ...value }; + } + } + get h2session() { + return this._internals.h2session; + } + set h2session(value) { + this._internals.h2session = value; + } + /** + Decompress the response automatically. + + This will set the `accept-encoding` header to `gzip, deflate, br` unless you set it yourself. + + If this is disabled, a compressed response is returned as a `Buffer`. + This may be useful if you want to handle decompression yourself or stream the raw compressed data. + + @default true + */ + get decompress() { + return this._internals.decompress; + } + set decompress(value) { + assert.boolean(value); + this._internals.decompress = value; + } + /** + Milliseconds to wait for the server to end the response before aborting the request with `got.TimeoutError` error (a.k.a. `request` property). + By default, there's no timeout. + + This also accepts an `object` with the following fields to constrain the duration of each phase of the request lifecycle: + + - `lookup` starts when a socket is assigned and ends when the hostname has been resolved. + Does not apply when using a Unix domain socket. + - `connect` starts when `lookup` completes (or when the socket is assigned if lookup does not apply to the request) and ends when the socket is connected. + - `secureConnect` starts when `connect` completes and ends when the handshaking process completes (HTTPS only). + - `socket` starts when the socket is connected. See [request.setTimeout](https://nodejs.org/api/http.html#http_request_settimeout_timeout_callback). + - `response` starts when the request has been written to the socket and ends when the response headers are received. + - `send` starts when the socket is connected and ends with the request has been written to the socket. + - `request` starts when the request is initiated and ends when the response's end event fires. + */ + get timeout() { + // We always return `Delays` here. + // It has to be `Delays | number`, otherwise TypeScript will error because the getter and the setter have incompatible types. + return this._internals.timeout; + } + set timeout(value) { + assert.plainObject(value); + // eslint-disable-next-line guard-for-in + for (const key in value) { + if (!(key in this._internals.timeout)) { + throw new Error(`Unexpected timeout option: ${key}`); + } + assert.any([dist.number, dist.undefined], value[key]); + } + if (this._merging) { + Object.assign(this._internals.timeout, value); + } + else { + this._internals.timeout = { ...value }; + } + } + /** + When specified, `prefixUrl` will be prepended to `url`. + The prefix can be any valid URL, either relative or absolute. + A trailing slash `/` is optional - one will be added automatically. + + __Note__: `prefixUrl` will be ignored if the `url` argument is a URL instance. + + __Note__: Leading slashes in `input` are disallowed when using this option to enforce consistency and avoid confusion. + For example, when the prefix URL is `https://example.com/foo` and the input is `/bar`, there's ambiguity whether the resulting URL would become `https://example.com/foo/bar` or `https://example.com/bar`. + The latter is used by browsers. + + __Tip__: Useful when used with `got.extend()` to create niche-specific Got instances. + + __Tip__: You can change `prefixUrl` using hooks as long as the URL still includes the `prefixUrl`. + If the URL doesn't include it anymore, it will throw. + + @example + ``` + import got from 'got'; + + await got('unicorn', {prefixUrl: 'https://cats.com'}); + //=> 'https://cats.com/unicorn' + + const instance = got.extend({ + prefixUrl: 'https://google.com' + }); + + await instance('unicorn', { + hooks: { + beforeRequest: [ + options => { + options.prefixUrl = 'https://cats.com'; + } + ] + } + }); + //=> 'https://cats.com/unicorn' + ``` + */ + get prefixUrl() { + // We always return `string` here. + // It has to be `string | URL`, otherwise TypeScript will error because the getter and the setter have incompatible types. + return this._internals.prefixUrl; + } + set prefixUrl(value) { + assert.any([dist.string, dist.urlInstance], value); + if (value === '') { + this._internals.prefixUrl = ''; + return; + } + value = value.toString(); + if (!value.endsWith('/')) { + value += '/'; + } + if (this._internals.prefixUrl && this._internals.url) { + const { href } = this._internals.url; + this._internals.url.href = value + href.slice(this._internals.prefixUrl.length); + } + this._internals.prefixUrl = value; + } + /** + __Note #1__: The `body` option cannot be used with the `json` or `form` option. + + __Note #2__: If you provide this option, `got.stream()` will be read-only. + + __Note #3__: If you provide a payload with the `GET` or `HEAD` method, it will throw a `TypeError` unless the method is `GET` and the `allowGetBody` option is set to `true`. + + __Note #4__: This option is not enumerable and will not be merged with the instance defaults. + + The `content-length` header will be automatically set if `body` is a `string` / `Buffer` / [`FormData`](https://developer.mozilla.org/en-US/docs/Web/API/FormData) / [`form-data` instance](https://github.com/form-data/form-data), and `content-length` and `transfer-encoding` are not manually set in `options.headers`. + + Since Got 12, the `content-length` is not automatically set when `body` is a `fs.createReadStream`. + */ + get body() { + return this._internals.body; + } + set body(value) { + assert.any([dist.string, dist.buffer, dist.nodeStream, dist.generator, dist.asyncGenerator, isFormData, dist.undefined], value); + if (dist.nodeStream(value)) { + assert.truthy(value.readable); + } + if (value !== undefined) { + assert.undefined(this._internals.form); + assert.undefined(this._internals.json); + } + this._internals.body = value; + } + /** + The form body is converted to a query string using [`(new URLSearchParams(object)).toString()`](https://nodejs.org/api/url.html#url_constructor_new_urlsearchparams_obj). + + If the `Content-Type` header is not present, it will be set to `application/x-www-form-urlencoded`. + + __Note #1__: If you provide this option, `got.stream()` will be read-only. + + __Note #2__: This option is not enumerable and will not be merged with the instance defaults. + */ + get form() { + return this._internals.form; + } + set form(value) { + assert.any([dist.plainObject, dist.undefined], value); + if (value !== undefined) { + assert.undefined(this._internals.body); + assert.undefined(this._internals.json); + } + this._internals.form = value; + } + /** + JSON body. If the `Content-Type` header is not set, it will be set to `application/json`. + + __Note #1__: If you provide this option, `got.stream()` will be read-only. + + __Note #2__: This option is not enumerable and will not be merged with the instance defaults. + */ + get json() { + return this._internals.json; + } + set json(value) { + if (value !== undefined) { + assert.undefined(this._internals.body); + assert.undefined(this._internals.form); + } + this._internals.json = value; + } + /** + The URL to request, as a string, a [`https.request` options object](https://nodejs.org/api/https.html#https_https_request_options_callback), or a [WHATWG `URL`](https://nodejs.org/api/url.html#url_class_url). + + Properties from `options` will override properties in the parsed `url`. + + If no protocol is specified, it will throw a `TypeError`. + + __Note__: The query string is **not** parsed as search params. + + @example + ``` + await got('https://example.com/?query=a b'); //=> https://example.com/?query=a%20b + await got('https://example.com/', {searchParams: {query: 'a b'}}); //=> https://example.com/?query=a+b + + // The query string is overridden by `searchParams` + await got('https://example.com/?query=a b', {searchParams: {query: 'a b'}}); //=> https://example.com/?query=a+b + ``` + */ + get url() { + return this._internals.url; + } + set url(value) { + assert.any([dist.string, dist.urlInstance, dist.undefined], value); + if (value === undefined) { + this._internals.url = undefined; + return; + } + if (dist.string(value) && value.startsWith('/')) { + throw new Error('`url` must not start with a slash'); + } + const urlString = `${this.prefixUrl}${value.toString()}`; + const url = new external_node_url_namespaceObject.URL(urlString); + this._internals.url = url; + decodeURI(urlString); + if (url.protocol === 'unix:') { + url.href = `http://unix${url.pathname}${url.search}`; + } + if (url.protocol !== 'http:' && url.protocol !== 'https:') { + const error = new Error(`Unsupported protocol: ${url.protocol}`); + error.code = 'ERR_UNSUPPORTED_PROTOCOL'; + throw error; + } + if (this._internals.username) { + url.username = this._internals.username; + this._internals.username = ''; + } + if (this._internals.password) { + url.password = this._internals.password; + this._internals.password = ''; + } + if (this._internals.searchParams) { + url.search = this._internals.searchParams.toString(); + this._internals.searchParams = undefined; + } + if (url.hostname === 'unix') { + if (!this._internals.enableUnixSockets) { + throw new Error('Using UNIX domain sockets but option `enableUnixSockets` is not enabled'); + } + const matches = /(?.+?):(?.+)/.exec(`${url.pathname}${url.search}`); + if (matches?.groups) { + const { socketPath, path } = matches.groups; + this._unixOptions = { + socketPath, + path, + host: '', + }; + } + else { + this._unixOptions = undefined; + } + return; + } + this._unixOptions = undefined; + } + /** + Cookie support. You don't have to care about parsing or how to store them. + + __Note__: If you provide this option, `options.headers.cookie` will be overridden. + */ + get cookieJar() { + return this._internals.cookieJar; + } + set cookieJar(value) { + assert.any([dist.object, dist.undefined], value); + if (value === undefined) { + this._internals.cookieJar = undefined; + return; + } + let { setCookie, getCookieString } = value; + assert.function_(setCookie); + assert.function_(getCookieString); + /* istanbul ignore next: Horrible `tough-cookie` v3 check */ + if (setCookie.length === 4 && getCookieString.length === 0) { + setCookie = (0,external_node_util_namespaceObject.promisify)(setCookie.bind(value)); + getCookieString = (0,external_node_util_namespaceObject.promisify)(getCookieString.bind(value)); + this._internals.cookieJar = { + setCookie, + getCookieString: getCookieString, + }; + } + else { + this._internals.cookieJar = value; + } + } + /** + You can abort the `request` using [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController). + + *Requires Node.js 16 or later.* + + @example + ``` + import got from 'got'; + + const abortController = new AbortController(); + + const request = got('https://httpbin.org/anything', { + signal: abortController.signal + }); + + setTimeout(() => { + abortController.abort(); + }, 100); + ``` + */ + // TODO: Replace `any` with `AbortSignal` when targeting Node 16. + get signal() { + return this._internals.signal; + } + // TODO: Replace `any` with `AbortSignal` when targeting Node 16. + set signal(value) { + assert.object(value); + this._internals.signal = value; + } + /** + Ignore invalid cookies instead of throwing an error. + Only useful when the `cookieJar` option has been set. Not recommended. + + @default false + */ + get ignoreInvalidCookies() { + return this._internals.ignoreInvalidCookies; + } + set ignoreInvalidCookies(value) { + assert.boolean(value); + this._internals.ignoreInvalidCookies = value; + } + /** + Query string that will be added to the request URL. + This will override the query string in `url`. + + If you need to pass in an array, you can do it using a `URLSearchParams` instance. + + @example + ``` + import got from 'got'; + + const searchParams = new URLSearchParams([['key', 'a'], ['key', 'b']]); + + await got('https://example.com', {searchParams}); + + console.log(searchParams.toString()); + //=> 'key=a&key=b' + ``` + */ + get searchParams() { + if (this._internals.url) { + return this._internals.url.searchParams; + } + if (this._internals.searchParams === undefined) { + this._internals.searchParams = new external_node_url_namespaceObject.URLSearchParams(); + } + return this._internals.searchParams; + } + set searchParams(value) { + assert.any([dist.string, dist.object, dist.undefined], value); + const url = this._internals.url; + if (value === undefined) { + this._internals.searchParams = undefined; + if (url) { + url.search = ''; + } + return; + } + const searchParameters = this.searchParams; + let updated; + if (dist.string(value)) { + updated = new external_node_url_namespaceObject.URLSearchParams(value); + } + else if (value instanceof external_node_url_namespaceObject.URLSearchParams) { + updated = value; + } + else { + validateSearchParameters(value); + updated = new external_node_url_namespaceObject.URLSearchParams(); + // eslint-disable-next-line guard-for-in + for (const key in value) { + const entry = value[key]; + if (entry === null) { + updated.append(key, ''); + } + else if (entry === undefined) { + searchParameters.delete(key); + } + else { + updated.append(key, entry); + } + } + } + if (this._merging) { + // These keys will be replaced + for (const key of updated.keys()) { + searchParameters.delete(key); + } + for (const [key, value] of updated) { + searchParameters.append(key, value); + } + } + else if (url) { + url.search = searchParameters.toString(); + } + else { + this._internals.searchParams = searchParameters; + } + } + get searchParameters() { + throw new Error('The `searchParameters` option does not exist. Use `searchParams` instead.'); + } + set searchParameters(_value) { + throw new Error('The `searchParameters` option does not exist. Use `searchParams` instead.'); + } + get dnsLookup() { + return this._internals.dnsLookup; + } + set dnsLookup(value) { + assert.any([dist.function_, dist.undefined], value); + this._internals.dnsLookup = value; + } + /** + An instance of [`CacheableLookup`](https://github.com/szmarczak/cacheable-lookup) used for making DNS lookups. + Useful when making lots of requests to different *public* hostnames. + + `CacheableLookup` uses `dns.resolver4(..)` and `dns.resolver6(...)` under the hood and fall backs to `dns.lookup(...)` when the first two fail, which may lead to additional delay. + + __Note__: This should stay disabled when making requests to internal hostnames such as `localhost`, `database.local` etc. + + @default false + */ + get dnsCache() { + return this._internals.dnsCache; + } + set dnsCache(value) { + assert.any([dist.object, dist.boolean, dist.undefined], value); + if (value === true) { + this._internals.dnsCache = getGlobalDnsCache(); + } + else if (value === false) { + this._internals.dnsCache = undefined; + } + else { + this._internals.dnsCache = value; + } + } + /** + User data. `context` is shallow merged and enumerable. If it contains non-enumerable properties they will NOT be merged. + + @example + ``` + import got from 'got'; + + const instance = got.extend({ + hooks: { + beforeRequest: [ + options => { + if (!options.context || !options.context.token) { + throw new Error('Token required'); + } + + options.headers.token = options.context.token; + } + ] + } + }); + + const context = { + token: 'secret' + }; + + const response = await instance('https://httpbin.org/headers', {context}); + + // Let's see the headers + console.log(response.body); + ``` + */ + get context() { + return this._internals.context; + } + set context(value) { + assert.object(value); + if (this._merging) { + Object.assign(this._internals.context, value); + } + else { + this._internals.context = { ...value }; + } + } + /** + Hooks allow modifications during the request lifecycle. + Hook functions may be async and are run serially. + */ + get hooks() { + return this._internals.hooks; + } + set hooks(value) { + assert.object(value); + // eslint-disable-next-line guard-for-in + for (const knownHookEvent in value) { + if (!(knownHookEvent in this._internals.hooks)) { + throw new Error(`Unexpected hook event: ${knownHookEvent}`); + } + const typedKnownHookEvent = knownHookEvent; + const typedValue = value; + const hooks = typedValue[typedKnownHookEvent]; + assert.any([dist.array, dist.undefined], hooks); + if (hooks) { + for (const hook of hooks) { + assert.function_(hook); + } + } + if (this._merging) { + if (hooks) { + // @ts-expect-error FIXME + this._internals.hooks[typedKnownHookEvent].push(...hooks); + } + } + else { + if (!hooks) { + throw new Error(`Missing hook event: ${knownHookEvent}`); + } + // @ts-expect-error FIXME + this._internals.hooks[knownHookEvent] = [...hooks]; + } + } + } + /** + Defines if redirect responses should be followed automatically. + + Note that if a `303` is sent by the server in response to any request type (`POST`, `DELETE`, etc.), Got will automatically request the resource pointed to in the location header via `GET`. + This is in accordance with [the spec](https://tools.ietf.org/html/rfc7231#section-6.4.4). + + @default true + */ + get followRedirect() { + return this._internals.followRedirect; + } + set followRedirect(value) { + assert.boolean(value); + this._internals.followRedirect = value; + } + get followRedirects() { + throw new TypeError('The `followRedirects` option does not exist. Use `followRedirect` instead.'); + } + set followRedirects(_value) { + throw new TypeError('The `followRedirects` option does not exist. Use `followRedirect` instead.'); + } + /** + If exceeded, the request will be aborted and a `MaxRedirectsError` will be thrown. + + @default 10 + */ + get maxRedirects() { + return this._internals.maxRedirects; + } + set maxRedirects(value) { + assert.number(value); + this._internals.maxRedirects = value; + } + /** + A cache adapter instance for storing cached response data. + + @default false + */ + get cache() { + return this._internals.cache; + } + set cache(value) { + assert.any([dist.object, dist.string, dist.boolean, dist.undefined], value); + if (value === true) { + this._internals.cache = globalCache; + } + else if (value === false) { + this._internals.cache = undefined; + } + else { + this._internals.cache = value; + } + } + /** + Determines if a `got.HTTPError` is thrown for unsuccessful responses. + + If this is disabled, requests that encounter an error status code will be resolved with the `response` instead of throwing. + This may be useful if you are checking for resource availability and are expecting error responses. + + @default true + */ + get throwHttpErrors() { + return this._internals.throwHttpErrors; + } + set throwHttpErrors(value) { + assert.boolean(value); + this._internals.throwHttpErrors = value; + } + get username() { + const url = this._internals.url; + const value = url ? url.username : this._internals.username; + return decodeURIComponent(value); + } + set username(value) { + assert.string(value); + const url = this._internals.url; + const fixedValue = encodeURIComponent(value); + if (url) { + url.username = fixedValue; + } + else { + this._internals.username = fixedValue; + } + } + get password() { + const url = this._internals.url; + const value = url ? url.password : this._internals.password; + return decodeURIComponent(value); + } + set password(value) { + assert.string(value); + const url = this._internals.url; + const fixedValue = encodeURIComponent(value); + if (url) { + url.password = fixedValue; + } + else { + this._internals.password = fixedValue; + } + } + /** + If set to `true`, Got will additionally accept HTTP2 requests. + + It will choose either HTTP/1.1 or HTTP/2 depending on the ALPN protocol. + + __Note__: This option requires Node.js 15.10.0 or newer as HTTP/2 support on older Node.js versions is very buggy. + + __Note__: Overriding `options.request` will disable HTTP2 support. + + @default false + + @example + ``` + import got from 'got'; + + const {headers} = await got('https://nghttp2.org/httpbin/anything', {http2: true}); + + console.log(headers.via); + //=> '2 nghttpx' + ``` + */ + get http2() { + return this._internals.http2; + } + set http2(value) { + assert.boolean(value); + this._internals.http2 = value; + } + /** + Set this to `true` to allow sending body for the `GET` method. + However, the [HTTP/2 specification](https://tools.ietf.org/html/rfc7540#section-8.1.3) says that `An HTTP GET request includes request header fields and no payload body`, therefore when using the HTTP/2 protocol this option will have no effect. + This option is only meant to interact with non-compliant servers when you have no other choice. + + __Note__: The [RFC 7321](https://tools.ietf.org/html/rfc7231#section-4.3.1) doesn't specify any particular behavior for the GET method having a payload, therefore __it's considered an [anti-pattern](https://en.wikipedia.org/wiki/Anti-pattern)__. + + @default false + */ + get allowGetBody() { + return this._internals.allowGetBody; + } + set allowGetBody(value) { + assert.boolean(value); + this._internals.allowGetBody = value; + } + /** + Request headers. + + Existing headers will be overwritten. Headers set to `undefined` will be omitted. + + @default {} + */ + get headers() { + return this._internals.headers; + } + set headers(value) { + assert.plainObject(value); + if (this._merging) { + Object.assign(this._internals.headers, lowercaseKeys(value)); + } + else { + this._internals.headers = lowercaseKeys(value); + } + } + /** + Specifies if the redirects should be [rewritten as `GET`](https://tools.ietf.org/html/rfc7231#section-6.4). + + If `false`, when sending a POST request and receiving a `302`, it will resend the body to the new location using the same HTTP method (`POST` in this case). + + @default false + */ + get methodRewriting() { + return this._internals.methodRewriting; + } + set methodRewriting(value) { + assert.boolean(value); + this._internals.methodRewriting = value; + } + /** + Indicates which DNS record family to use. + + Values: + - `undefined`: IPv4 (if present) or IPv6 + - `4`: Only IPv4 + - `6`: Only IPv6 + + @default undefined + */ + get dnsLookupIpVersion() { + return this._internals.dnsLookupIpVersion; + } + set dnsLookupIpVersion(value) { + if (value !== undefined && value !== 4 && value !== 6) { + throw new TypeError(`Invalid DNS lookup IP version: ${value}`); + } + this._internals.dnsLookupIpVersion = value; + } + /** + A function used to parse JSON responses. + + @example + ``` + import got from 'got'; + import Bourne from '@hapi/bourne'; + + const parsed = await got('https://example.com', { + parseJson: text => Bourne.parse(text) + }).json(); + + console.log(parsed); + ``` + */ + get parseJson() { + return this._internals.parseJson; + } + set parseJson(value) { + assert.function_(value); + this._internals.parseJson = value; + } + /** + A function used to stringify the body of JSON requests. + + @example + ``` + import got from 'got'; + + await got.post('https://example.com', { + stringifyJson: object => JSON.stringify(object, (key, value) => { + if (key.startsWith('_')) { + return; + } + + return value; + }), + json: { + some: 'payload', + _ignoreMe: 1234 + } + }); + ``` + + @example + ``` + import got from 'got'; + + await got.post('https://example.com', { + stringifyJson: object => JSON.stringify(object, (key, value) => { + if (typeof value === 'number') { + return value.toString(); + } + + return value; + }), + json: { + some: 'payload', + number: 1 + } + }); + ``` + */ + get stringifyJson() { + return this._internals.stringifyJson; + } + set stringifyJson(value) { + assert.function_(value); + this._internals.stringifyJson = value; + } + /** + An object representing `limit`, `calculateDelay`, `methods`, `statusCodes`, `maxRetryAfter` and `errorCodes` fields for maximum retry count, retry handler, allowed methods, allowed status codes, maximum [`Retry-After`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After) time and allowed error codes. + + Delays between retries counts with function `1000 * Math.pow(2, retry) + Math.random() * 100`, where `retry` is attempt number (starts from 1). + + The `calculateDelay` property is a `function` that receives an object with `attemptCount`, `retryOptions`, `error` and `computedValue` properties for current retry count, the retry options, error and default computed value. + The function must return a delay in milliseconds (or a Promise resolving with it) (`0` return value cancels retry). + + By default, it retries *only* on the specified methods, status codes, and on these network errors: + + - `ETIMEDOUT`: One of the [timeout](#timeout) limits were reached. + - `ECONNRESET`: Connection was forcibly closed by a peer. + - `EADDRINUSE`: Could not bind to any free port. + - `ECONNREFUSED`: Connection was refused by the server. + - `EPIPE`: The remote side of the stream being written has been closed. + - `ENOTFOUND`: Couldn't resolve the hostname to an IP address. + - `ENETUNREACH`: No internet connection. + - `EAI_AGAIN`: DNS lookup timed out. + + __Note__: If `maxRetryAfter` is set to `undefined`, it will use `options.timeout`. + __Note__: If [`Retry-After`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After) header is greater than `maxRetryAfter`, it will cancel the request. + */ + get retry() { + return this._internals.retry; + } + set retry(value) { + assert.plainObject(value); + assert.any([dist.function_, dist.undefined], value.calculateDelay); + assert.any([dist.number, dist.undefined], value.maxRetryAfter); + assert.any([dist.number, dist.undefined], value.limit); + assert.any([dist.array, dist.undefined], value.methods); + assert.any([dist.array, dist.undefined], value.statusCodes); + assert.any([dist.array, dist.undefined], value.errorCodes); + assert.any([dist.number, dist.undefined], value.noise); + if (value.noise && Math.abs(value.noise) > 100) { + throw new Error(`The maximum acceptable retry noise is +/- 100ms, got ${value.noise}`); + } + for (const key in value) { + if (!(key in this._internals.retry)) { + throw new Error(`Unexpected retry option: ${key}`); + } + } + if (this._merging) { + Object.assign(this._internals.retry, value); + } + else { + this._internals.retry = { ...value }; + } + const { retry } = this._internals; + retry.methods = [...new Set(retry.methods.map(method => method.toUpperCase()))]; + retry.statusCodes = [...new Set(retry.statusCodes)]; + retry.errorCodes = [...new Set(retry.errorCodes)]; + } + /** + From `http.RequestOptions`. + + The IP address used to send the request from. + */ + get localAddress() { + return this._internals.localAddress; + } + set localAddress(value) { + assert.any([dist.string, dist.undefined], value); + this._internals.localAddress = value; + } + /** + The HTTP method used to make the request. + + @default 'GET' + */ + get method() { + return this._internals.method; + } + set method(value) { + assert.string(value); + this._internals.method = value.toUpperCase(); + } + get createConnection() { + return this._internals.createConnection; + } + set createConnection(value) { + assert.any([dist.function_, dist.undefined], value); + this._internals.createConnection = value; + } + /** + From `http-cache-semantics` + + @default {} + */ + get cacheOptions() { + return this._internals.cacheOptions; + } + set cacheOptions(value) { + assert.plainObject(value); + assert.any([dist.boolean, dist.undefined], value.shared); + assert.any([dist.number, dist.undefined], value.cacheHeuristic); + assert.any([dist.number, dist.undefined], value.immutableMinTimeToLive); + assert.any([dist.boolean, dist.undefined], value.ignoreCargoCult); + for (const key in value) { + if (!(key in this._internals.cacheOptions)) { + throw new Error(`Cache option \`${key}\` does not exist`); + } + } + if (this._merging) { + Object.assign(this._internals.cacheOptions, value); + } + else { + this._internals.cacheOptions = { ...value }; + } + } + /** + Options for the advanced HTTPS API. + */ + get https() { + return this._internals.https; + } + set https(value) { + assert.plainObject(value); + assert.any([dist.boolean, dist.undefined], value.rejectUnauthorized); + assert.any([dist.function_, dist.undefined], value.checkServerIdentity); + assert.any([dist.string, dist.object, dist.array, dist.undefined], value.certificateAuthority); + assert.any([dist.string, dist.object, dist.array, dist.undefined], value.key); + assert.any([dist.string, dist.object, dist.array, dist.undefined], value.certificate); + assert.any([dist.string, dist.undefined], value.passphrase); + assert.any([dist.string, dist.buffer, dist.array, dist.undefined], value.pfx); + assert.any([dist.array, dist.undefined], value.alpnProtocols); + assert.any([dist.string, dist.undefined], value.ciphers); + assert.any([dist.string, dist.buffer, dist.undefined], value.dhparam); + assert.any([dist.string, dist.undefined], value.signatureAlgorithms); + assert.any([dist.string, dist.undefined], value.minVersion); + assert.any([dist.string, dist.undefined], value.maxVersion); + assert.any([dist.boolean, dist.undefined], value.honorCipherOrder); + assert.any([dist.number, dist.undefined], value.tlsSessionLifetime); + assert.any([dist.string, dist.undefined], value.ecdhCurve); + assert.any([dist.string, dist.buffer, dist.array, dist.undefined], value.certificateRevocationLists); + for (const key in value) { + if (!(key in this._internals.https)) { + throw new Error(`HTTPS option \`${key}\` does not exist`); + } + } + if (this._merging) { + Object.assign(this._internals.https, value); + } + else { + this._internals.https = { ...value }; + } + } + /** + [Encoding](https://nodejs.org/api/buffer.html#buffer_buffers_and_character_encodings) to be used on `setEncoding` of the response data. + + To get a [`Buffer`](https://nodejs.org/api/buffer.html), you need to set `responseType` to `buffer` instead. + Don't set this option to `null`. + + __Note__: This doesn't affect streams! Instead, you need to do `got.stream(...).setEncoding(encoding)`. + + @default 'utf-8' + */ + get encoding() { + return this._internals.encoding; + } + set encoding(value) { + if (value === null) { + throw new TypeError('To get a Buffer, set `options.responseType` to `buffer` instead'); + } + assert.any([dist.string, dist.undefined], value); + this._internals.encoding = value; + } + /** + When set to `true` the promise will return the Response body instead of the Response object. + + @default false + */ + get resolveBodyOnly() { + return this._internals.resolveBodyOnly; + } + set resolveBodyOnly(value) { + assert.boolean(value); + this._internals.resolveBodyOnly = value; + } + /** + Returns a `Stream` instead of a `Promise`. + This is equivalent to calling `got.stream(url, options?)`. + + @default false + */ + get isStream() { + return this._internals.isStream; + } + set isStream(value) { + assert.boolean(value); + this._internals.isStream = value; + } + /** + The parsing method. + + The promise also has `.text()`, `.json()` and `.buffer()` methods which return another Got promise for the parsed body. + + It's like setting the options to `{responseType: 'json', resolveBodyOnly: true}` but without affecting the main Got promise. + + __Note__: When using streams, this option is ignored. + + @example + ``` + const responsePromise = got(url); + const bufferPromise = responsePromise.buffer(); + const jsonPromise = responsePromise.json(); + + const [response, buffer, json] = Promise.all([responsePromise, bufferPromise, jsonPromise]); + // `response` is an instance of Got Response + // `buffer` is an instance of Buffer + // `json` is an object + ``` + + @example + ``` + // This + const body = await got(url).json(); + + // is semantically the same as this + const body = await got(url, {responseType: 'json', resolveBodyOnly: true}); + ``` + */ + get responseType() { + return this._internals.responseType; + } + set responseType(value) { + if (value === undefined) { + this._internals.responseType = 'text'; + return; + } + if (value !== 'text' && value !== 'buffer' && value !== 'json') { + throw new Error(`Invalid \`responseType\` option: ${value}`); + } + this._internals.responseType = value; + } + get pagination() { + return this._internals.pagination; + } + set pagination(value) { + assert.object(value); + if (this._merging) { + Object.assign(this._internals.pagination, value); + } + else { + this._internals.pagination = value; + } + } + get auth() { + throw new Error('Parameter `auth` is deprecated. Use `username` / `password` instead.'); + } + set auth(_value) { + throw new Error('Parameter `auth` is deprecated. Use `username` / `password` instead.'); + } + get setHost() { + return this._internals.setHost; + } + set setHost(value) { + assert.boolean(value); + this._internals.setHost = value; + } + get maxHeaderSize() { + return this._internals.maxHeaderSize; + } + set maxHeaderSize(value) { + assert.any([dist.number, dist.undefined], value); + this._internals.maxHeaderSize = value; + } + get enableUnixSockets() { + return this._internals.enableUnixSockets; + } + set enableUnixSockets(value) { + assert.boolean(value); + this._internals.enableUnixSockets = value; + } + // eslint-disable-next-line @typescript-eslint/naming-convention + toJSON() { + return { ...this._internals }; + } + [Symbol.for('nodejs.util.inspect.custom')](_depth, options) { + return (0,external_node_util_namespaceObject.inspect)(this._internals, options); + } + createNativeRequestOptions() { + const internals = this._internals; + const url = internals.url; + let agent; + if (url.protocol === 'https:') { + agent = internals.http2 ? internals.agent : internals.agent.https; + } + else { + agent = internals.agent.http; + } + const { https } = internals; + let { pfx } = https; + if (dist.array(pfx) && dist.plainObject(pfx[0])) { + pfx = pfx.map(object => ({ + buf: object.buffer, + passphrase: object.passphrase, + })); + } + return { + ...internals.cacheOptions, + ...this._unixOptions, + // HTTPS options + // eslint-disable-next-line @typescript-eslint/naming-convention + ALPNProtocols: https.alpnProtocols, + ca: https.certificateAuthority, + cert: https.certificate, + key: https.key, + passphrase: https.passphrase, + pfx: https.pfx, + rejectUnauthorized: https.rejectUnauthorized, + checkServerIdentity: https.checkServerIdentity ?? external_node_tls_namespaceObject.checkServerIdentity, + ciphers: https.ciphers, + honorCipherOrder: https.honorCipherOrder, + minVersion: https.minVersion, + maxVersion: https.maxVersion, + sigalgs: https.signatureAlgorithms, + sessionTimeout: https.tlsSessionLifetime, + dhparam: https.dhparam, + ecdhCurve: https.ecdhCurve, + crl: https.certificateRevocationLists, + // HTTP options + lookup: internals.dnsLookup ?? internals.dnsCache?.lookup, + family: internals.dnsLookupIpVersion, + agent, + setHost: internals.setHost, + method: internals.method, + maxHeaderSize: internals.maxHeaderSize, + localAddress: internals.localAddress, + headers: internals.headers, + createConnection: internals.createConnection, + timeout: internals.http2 ? getHttp2TimeoutOption(internals) : undefined, + // HTTP/2 options + h2session: internals.h2session, + }; + } + getRequestFunction() { + const url = this._internals.url; + const { request } = this._internals; + if (!request && url) { + return this.getFallbackRequestFunction(); + } + return request; + } + getFallbackRequestFunction() { + const url = this._internals.url; + if (!url) { + return; + } + if (url.protocol === 'https:') { + if (this._internals.http2) { + if (major < 15 || (major === 15 && minor < 10)) { + const error = new Error('To use the `http2` option, install Node.js 15.10.0 or above'); + error.code = 'EUNSUPPORTED'; + throw error; + } + return http2_wrapper_source.auto; + } + return external_node_https_namespaceObject.request; + } + return external_node_http_namespaceObject.request; + } + freeze() { + const options = this._internals; + Object.freeze(options); + Object.freeze(options.hooks); + Object.freeze(options.hooks.afterResponse); + Object.freeze(options.hooks.beforeError); + Object.freeze(options.hooks.beforeRedirect); + Object.freeze(options.hooks.beforeRequest); + Object.freeze(options.hooks.beforeRetry); + Object.freeze(options.hooks.init); + Object.freeze(options.https); + Object.freeze(options.cacheOptions); + Object.freeze(options.agent); + Object.freeze(options.headers); + Object.freeze(options.timeout); + Object.freeze(options.retry); + Object.freeze(options.retry.errorCodes); + Object.freeze(options.retry.methods); + Object.freeze(options.retry.statusCodes); + Object.freeze(options.context); + } +} + +;// CONCATENATED MODULE: ./node_modules/got/dist/source/core/response.js + +const isResponseOk = (response) => { + const { statusCode } = response; + const limitStatusCode = response.request.options.followRedirect ? 299 : 399; + return (statusCode >= 200 && statusCode <= limitStatusCode) || statusCode === 304; +}; +/** +An error to be thrown when server response code is 2xx, and parsing body fails. +Includes a `response` property. +*/ +class ParseError extends RequestError { + constructor(error, response) { + const { options } = response.request; + super(`${error.message} in "${options.url.toString()}"`, error, response.request); + this.name = 'ParseError'; + this.code = 'ERR_BODY_PARSE_FAILURE'; + } +} +const parseBody = (response, responseType, parseJson, encoding) => { + const { rawBody } = response; + try { + if (responseType === 'text') { + return rawBody.toString(encoding); + } + if (responseType === 'json') { + return rawBody.length === 0 ? '' : parseJson(rawBody.toString(encoding)); + } + if (responseType === 'buffer') { + return rawBody; + } + } + catch (error) { + throw new ParseError(error, response); + } + throw new ParseError({ + message: `Unknown body type '${responseType}'`, + name: 'Error', + }, response); +}; + +;// CONCATENATED MODULE: ./node_modules/got/dist/source/core/utils/is-client-request.js +function isClientRequest(clientRequest) { + return clientRequest.writable && !clientRequest.writableEnded; +} +/* harmony default export */ const is_client_request = (isClientRequest); + +;// CONCATENATED MODULE: ./node_modules/got/dist/source/core/utils/is-unix-socket-url.js +// eslint-disable-next-line @typescript-eslint/naming-convention +function isUnixSocketURL(url) { + return url.protocol === 'unix:' || url.hostname === 'unix'; +} + +;// CONCATENATED MODULE: ./node_modules/got/dist/source/core/index.js + + + + + + + + + + + + + + + + + + + + + + + +const supportsBrotli = dist.string(external_node_process_namespaceObject.versions.brotli); +const methodsWithoutBody = new Set(['GET', 'HEAD']); +const cacheableStore = new WeakableMap(); +const redirectCodes = new Set([300, 301, 302, 303, 304, 307, 308]); +const proxiedRequestEvents = [ + 'socket', + 'connect', + 'continue', + 'information', + 'upgrade', +]; +const core_noop = () => { }; +class Request extends external_node_stream_namespaceObject.Duplex { + constructor(url, options, defaults) { + super({ + // Don't destroy immediately, as the error may be emitted on unsuccessful retry + autoDestroy: false, + // It needs to be zero because we're just proxying the data to another stream + highWaterMark: 0, + }); + // @ts-expect-error - Ignoring for now. + Object.defineProperty(this, 'constructor', { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_noPipe", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + // @ts-expect-error https://github.com/microsoft/TypeScript/issues/9568 + Object.defineProperty(this, "options", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "response", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "requestUrl", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "redirectUrls", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "retryCount", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_stopRetry", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_downloadedSize", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_uploadedSize", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_stopReading", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_pipedServerResponses", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_request", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_responseSize", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_bodySize", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_unproxyEvents", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_isFromCache", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_cannotHaveBody", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_triggerRead", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_cancelTimeouts", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_nativeResponse", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_flushed", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "_aborted", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + // We need this because `this._request` if `undefined` when using cache + Object.defineProperty(this, "_requestInitialized", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + this._downloadedSize = 0; + this._uploadedSize = 0; + this._stopReading = false; + this._pipedServerResponses = new Set(); + this._cannotHaveBody = false; + this._unproxyEvents = core_noop; + this._triggerRead = false; + this._cancelTimeouts = core_noop; + this._jobs = []; + this._flushed = false; + this._requestInitialized = false; + this._aborted = false; + this.redirectUrls = []; + this.retryCount = 0; + this._stopRetry = core_noop; + this.on('pipe', source => { + if (source.headers) { + Object.assign(this.options.headers, source.headers); + } + }); + this.on('newListener', event => { + if (event === 'retry' && this.listenerCount('retry') > 0) { + throw new Error('A retry listener has been attached already.'); + } + }); + try { + this.options = new Options(url, options, defaults); + if (!this.options.url) { + if (this.options.prefixUrl === '') { + throw new TypeError('Missing `url` property'); + } + this.options.url = ''; + } + this.requestUrl = this.options.url; + } + catch (error) { + const { options } = error; + if (options) { + this.options = options; + } + this.flush = async () => { + this.flush = async () => { }; + this.destroy(error); + }; + return; + } + if (this.options.signal?.aborted) { + this.destroy(new AbortError(this)); + } + this.options.signal?.addEventListener('abort', () => { + this.destroy(new AbortError(this)); + }); + // Important! If you replace `body` in a handler with another stream, make sure it's readable first. + // The below is run only once. + const { body } = this.options; + if (dist.nodeStream(body)) { + body.once('error', error => { + if (this._flushed) { + this._beforeError(new UploadError(error, this)); + } + else { + this.flush = async () => { + this.flush = async () => { }; + this._beforeError(new UploadError(error, this)); + }; + } + }); + } + } + async flush() { + if (this._flushed) { + return; + } + this._flushed = true; + try { + await this._finalizeBody(); + if (this.destroyed) { + return; + } + await this._makeRequest(); + if (this.destroyed) { + this._request?.destroy(); + return; + } + // Queued writes etc. + for (const job of this._jobs) { + job(); + } + // Prevent memory leak + this._jobs.length = 0; + this._requestInitialized = true; + } + catch (error) { + this._beforeError(error); + } + } + _beforeError(error) { + if (this._stopReading) { + return; + } + const { response, options } = this; + const attemptCount = this.retryCount + (error.name === 'RetryError' ? 0 : 1); + this._stopReading = true; + if (!(error instanceof RequestError)) { + error = new RequestError(error.message, error, this); + } + const typedError = error; + void (async () => { + // Node.js parser is really weird. + // It emits post-request Parse Errors on the same instance as previous request. WTF. + // Therefore we need to check if it has been destroyed as well. + // + // Furthermore, Node.js 16 `response.destroy()` doesn't immediately destroy the socket, + // but makes the response unreadable. So we additionally need to check `response.readable`. + if (response?.readable && !response.rawBody && !this._request?.socket?.destroyed) { + // @types/node has incorrect typings. `setEncoding` accepts `null` as well. + response.setEncoding(this.readableEncoding); + const success = await this._setRawBody(response); + if (success) { + response.body = response.rawBody.toString(); + } + } + if (this.listenerCount('retry') !== 0) { + let backoff; + try { + let retryAfter; + if (response && 'retry-after' in response.headers) { + retryAfter = Number(response.headers['retry-after']); + if (Number.isNaN(retryAfter)) { + retryAfter = Date.parse(response.headers['retry-after']) - Date.now(); + if (retryAfter <= 0) { + retryAfter = 1; + } + } + else { + retryAfter *= 1000; + } + } + const retryOptions = options.retry; + backoff = await retryOptions.calculateDelay({ + attemptCount, + retryOptions, + error: typedError, + retryAfter, + computedValue: calculate_retry_delay({ + attemptCount, + retryOptions, + error: typedError, + retryAfter, + computedValue: retryOptions.maxRetryAfter ?? options.timeout.request ?? Number.POSITIVE_INFINITY, + }), + }); + } + catch (error_) { + void this._error(new RequestError(error_.message, error_, this)); + return; + } + if (backoff) { + await new Promise(resolve => { + const timeout = setTimeout(resolve, backoff); + this._stopRetry = () => { + clearTimeout(timeout); + resolve(); + }; + }); + // Something forced us to abort the retry + if (this.destroyed) { + return; + } + try { + for (const hook of this.options.hooks.beforeRetry) { + // eslint-disable-next-line no-await-in-loop + await hook(typedError, this.retryCount + 1); + } + } + catch (error_) { + void this._error(new RequestError(error_.message, error, this)); + return; + } + // Something forced us to abort the retry + if (this.destroyed) { + return; + } + this.destroy(); + this.emit('retry', this.retryCount + 1, error, (updatedOptions) => { + const request = new Request(options.url, updatedOptions, options); + request.retryCount = this.retryCount + 1; + external_node_process_namespaceObject.nextTick(() => { + void request.flush(); + }); + return request; + }); + return; + } + } + void this._error(typedError); + })(); + } + _read() { + this._triggerRead = true; + const { response } = this; + if (response && !this._stopReading) { + // We cannot put this in the `if` above + // because `.read()` also triggers the `end` event + if (response.readableLength) { + this._triggerRead = false; + } + let data; + while ((data = response.read()) !== null) { + this._downloadedSize += data.length; // eslint-disable-line @typescript-eslint/restrict-plus-operands + const progress = this.downloadProgress; + if (progress.percent < 1) { + this.emit('downloadProgress', progress); + } + this.push(data); + } + } + } + _write(chunk, encoding, callback) { + const write = () => { + this._writeRequest(chunk, encoding, callback); + }; + if (this._requestInitialized) { + write(); + } + else { + this._jobs.push(write); + } + } + _final(callback) { + const endRequest = () => { + // We need to check if `this._request` is present, + // because it isn't when we use cache. + if (!this._request || this._request.destroyed) { + callback(); + return; + } + this._request.end((error) => { + // The request has been destroyed before `_final` finished. + // See https://github.com/nodejs/node/issues/39356 + if (this._request._writableState?.errored) { + return; + } + if (!error) { + this._bodySize = this._uploadedSize; + this.emit('uploadProgress', this.uploadProgress); + this._request.emit('upload-complete'); + } + callback(error); + }); + }; + if (this._requestInitialized) { + endRequest(); + } + else { + this._jobs.push(endRequest); + } + } + _destroy(error, callback) { + this._stopReading = true; + this.flush = async () => { }; + // Prevent further retries + this._stopRetry(); + this._cancelTimeouts(); + if (this.options) { + const { body } = this.options; + if (dist.nodeStream(body)) { + body.destroy(); + } + } + if (this._request) { + this._request.destroy(); + } + if (error !== null && !dist.undefined(error) && !(error instanceof RequestError)) { + error = new RequestError(error.message, error, this); + } + callback(error); + } + pipe(destination, options) { + if (destination instanceof external_node_http_namespaceObject.ServerResponse) { + this._pipedServerResponses.add(destination); + } + return super.pipe(destination, options); + } + unpipe(destination) { + if (destination instanceof external_node_http_namespaceObject.ServerResponse) { + this._pipedServerResponses.delete(destination); + } + super.unpipe(destination); + return this; + } + async _finalizeBody() { + const { options } = this; + const { headers } = options; + const isForm = !dist.undefined(options.form); + // eslint-disable-next-line @typescript-eslint/naming-convention + const isJSON = !dist.undefined(options.json); + const isBody = !dist.undefined(options.body); + const cannotHaveBody = methodsWithoutBody.has(options.method) && !(options.method === 'GET' && options.allowGetBody); + this._cannotHaveBody = cannotHaveBody; + if (isForm || isJSON || isBody) { + if (cannotHaveBody) { + throw new TypeError(`The \`${options.method}\` method cannot be used with a body`); + } + // Serialize body + const noContentType = !dist.string(headers['content-type']); + if (isBody) { + // Body is spec-compliant FormData + if (isFormData(options.body)) { + const encoder = new FormDataEncoder(options.body); + if (noContentType) { + headers['content-type'] = encoder.headers['Content-Type']; + } + headers['content-length'] = encoder.headers['Content-Length']; + options.body = encoder.encode(); + } + // Special case for https://github.com/form-data/form-data + if (is_form_data_isFormData(options.body) && noContentType) { + headers['content-type'] = `multipart/form-data; boundary=${options.body.getBoundary()}`; + } + } + else if (isForm) { + if (noContentType) { + headers['content-type'] = 'application/x-www-form-urlencoded'; + } + const { form } = options; + options.form = undefined; + options.body = (new external_node_url_namespaceObject.URLSearchParams(form)).toString(); + } + else { + if (noContentType) { + headers['content-type'] = 'application/json'; + } + const { json } = options; + options.json = undefined; + options.body = options.stringifyJson(json); + } + const uploadBodySize = await getBodySize(options.body, options.headers); + // See https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD send a Content-Length in a request message when + // no Transfer-Encoding is sent and the request method defines a meaning + // for an enclosed payload body. For example, a Content-Length header + // field is normally sent in a POST request even when the value is 0 + // (indicating an empty payload body). A user agent SHOULD NOT send a + // Content-Length header field when the request message does not contain + // a payload body and the method semantics do not anticipate such a + // body. + if (dist.undefined(headers['content-length']) && dist.undefined(headers['transfer-encoding']) && !cannotHaveBody && !dist.undefined(uploadBodySize)) { + headers['content-length'] = String(uploadBodySize); + } + } + if (options.responseType === 'json' && !('accept' in options.headers)) { + options.headers.accept = 'application/json'; + } + this._bodySize = Number(headers['content-length']) || undefined; + } + async _onResponseBase(response) { + // This will be called e.g. when using cache so we need to check if this request has been aborted. + if (this.isAborted) { + return; + } + const { options } = this; + const { url } = options; + this._nativeResponse = response; + if (options.decompress) { + response = decompress_response(response); + } + const statusCode = response.statusCode; + const typedResponse = response; + typedResponse.statusMessage = typedResponse.statusMessage ? typedResponse.statusMessage : external_node_http_namespaceObject.STATUS_CODES[statusCode]; + typedResponse.url = options.url.toString(); + typedResponse.requestUrl = this.requestUrl; + typedResponse.redirectUrls = this.redirectUrls; + typedResponse.request = this; + typedResponse.isFromCache = this._nativeResponse.fromCache ?? false; + typedResponse.ip = this.ip; + typedResponse.retryCount = this.retryCount; + typedResponse.ok = isResponseOk(typedResponse); + this._isFromCache = typedResponse.isFromCache; + this._responseSize = Number(response.headers['content-length']) || undefined; + this.response = typedResponse; + response.once('end', () => { + this._responseSize = this._downloadedSize; + this.emit('downloadProgress', this.downloadProgress); + }); + response.once('error', (error) => { + this._aborted = true; + // Force clean-up, because some packages don't do this. + // TODO: Fix decompress-response + response.destroy(); + this._beforeError(new ReadError(error, this)); + }); + response.once('aborted', () => { + this._aborted = true; + this._beforeError(new ReadError({ + name: 'Error', + message: 'The server aborted pending request', + code: 'ECONNRESET', + }, this)); + }); + this.emit('downloadProgress', this.downloadProgress); + const rawCookies = response.headers['set-cookie']; + if (dist.object(options.cookieJar) && rawCookies) { + let promises = rawCookies.map(async (rawCookie) => options.cookieJar.setCookie(rawCookie, url.toString())); + if (options.ignoreInvalidCookies) { + promises = promises.map(async (promise) => { + try { + await promise; + } + catch { } + }); + } + try { + await Promise.all(promises); + } + catch (error) { + this._beforeError(error); + return; + } + } + // The above is running a promise, therefore we need to check if this request has been aborted yet again. + if (this.isAborted) { + return; + } + if (options.followRedirect && response.headers.location && redirectCodes.has(statusCode)) { + // We're being redirected, we don't care about the response. + // It'd be best to abort the request, but we can't because + // we would have to sacrifice the TCP connection. We don't want that. + response.resume(); + this._cancelTimeouts(); + this._unproxyEvents(); + if (this.redirectUrls.length >= options.maxRedirects) { + this._beforeError(new MaxRedirectsError(this)); + return; + } + this._request = undefined; + const updatedOptions = new Options(undefined, undefined, this.options); + const shouldBeGet = statusCode === 303 && updatedOptions.method !== 'GET' && updatedOptions.method !== 'HEAD'; + if (shouldBeGet || updatedOptions.methodRewriting) { + // Server responded with "see other", indicating that the resource exists at another location, + // and the client should request it from that location via GET or HEAD. + updatedOptions.method = 'GET'; + updatedOptions.body = undefined; + updatedOptions.json = undefined; + updatedOptions.form = undefined; + delete updatedOptions.headers['content-length']; + } + try { + // We need this in order to support UTF-8 + const redirectBuffer = external_node_buffer_namespaceObject.Buffer.from(response.headers.location, 'binary').toString(); + const redirectUrl = new external_node_url_namespaceObject.URL(redirectBuffer, url); + if (!isUnixSocketURL(url) && isUnixSocketURL(redirectUrl)) { + this._beforeError(new RequestError('Cannot redirect to UNIX socket', {}, this)); + return; + } + // Redirecting to a different site, clear sensitive data. + if (redirectUrl.hostname !== url.hostname || redirectUrl.port !== url.port) { + if ('host' in updatedOptions.headers) { + delete updatedOptions.headers.host; + } + if ('cookie' in updatedOptions.headers) { + delete updatedOptions.headers.cookie; + } + if ('authorization' in updatedOptions.headers) { + delete updatedOptions.headers.authorization; + } + if (updatedOptions.username || updatedOptions.password) { + updatedOptions.username = ''; + updatedOptions.password = ''; + } + } + else { + redirectUrl.username = updatedOptions.username; + redirectUrl.password = updatedOptions.password; + } + this.redirectUrls.push(redirectUrl); + updatedOptions.prefixUrl = ''; + updatedOptions.url = redirectUrl; + for (const hook of updatedOptions.hooks.beforeRedirect) { + // eslint-disable-next-line no-await-in-loop + await hook(updatedOptions, typedResponse); + } + this.emit('redirect', updatedOptions, typedResponse); + this.options = updatedOptions; + await this._makeRequest(); + } + catch (error) { + this._beforeError(error); + return; + } + return; + } + if (options.isStream && options.throwHttpErrors && !isResponseOk(typedResponse)) { + this._beforeError(new HTTPError(typedResponse)); + return; + } + response.on('readable', () => { + if (this._triggerRead) { + this._read(); + } + }); + this.on('resume', () => { + response.resume(); + }); + this.on('pause', () => { + response.pause(); + }); + response.once('end', () => { + this.push(null); + }); + if (this._noPipe) { + const success = await this._setRawBody(); + if (success) { + this.emit('response', response); + } + return; + } + this.emit('response', response); + for (const destination of this._pipedServerResponses) { + if (destination.headersSent) { + continue; + } + // eslint-disable-next-line guard-for-in + for (const key in response.headers) { + const isAllowed = options.decompress ? key !== 'content-encoding' : true; + const value = response.headers[key]; + if (isAllowed) { + destination.setHeader(key, value); + } + } + destination.statusCode = statusCode; + } + } + async _setRawBody(from = this) { + if (from.readableEnded) { + return false; + } + try { + // Errors are emitted via the `error` event + const rawBody = await (0,get_stream.buffer)(from); + // On retry Request is destroyed with no error, therefore the above will successfully resolve. + // So in order to check if this was really successfull, we need to check if it has been properly ended. + if (!this.isAborted) { + this.response.rawBody = rawBody; + return true; + } + } + catch { } + return false; + } + async _onResponse(response) { + try { + await this._onResponseBase(response); + } + catch (error) { + /* istanbul ignore next: better safe than sorry */ + this._beforeError(error); + } + } + _onRequest(request) { + const { options } = this; + const { timeout, url } = options; + dist_source(request); + if (this.options.http2) { + // Unset stream timeout, as the `timeout` option was used only for connection timeout. + request.setTimeout(0); + } + this._cancelTimeouts = timedOut(request, timeout, url); + const responseEventName = options.cache ? 'cacheableResponse' : 'response'; + request.once(responseEventName, (response) => { + void this._onResponse(response); + }); + request.once('error', (error) => { + this._aborted = true; + // Force clean-up, because some packages (e.g. nock) don't do this. + request.destroy(); + error = error instanceof timed_out_TimeoutError ? new TimeoutError(error, this.timings, this) : new RequestError(error.message, error, this); + this._beforeError(error); + }); + this._unproxyEvents = proxyEvents(request, this, proxiedRequestEvents); + this._request = request; + this.emit('uploadProgress', this.uploadProgress); + this._sendBody(); + this.emit('request', request); + } + async _asyncWrite(chunk) { + return new Promise((resolve, reject) => { + super.write(chunk, error => { + if (error) { + reject(error); + return; + } + resolve(); + }); + }); + } + _sendBody() { + // Send body + const { body } = this.options; + const currentRequest = this.redirectUrls.length === 0 ? this : this._request ?? this; + if (dist.nodeStream(body)) { + body.pipe(currentRequest); + } + else if (dist.generator(body) || dist.asyncGenerator(body)) { + (async () => { + try { + for await (const chunk of body) { + await this._asyncWrite(chunk); + } + super.end(); + } + catch (error) { + this._beforeError(error); + } + })(); + } + else if (!dist.undefined(body)) { + this._writeRequest(body, undefined, () => { }); + currentRequest.end(); + } + else if (this._cannotHaveBody || this._noPipe) { + currentRequest.end(); + } + } + _prepareCache(cache) { + if (!cacheableStore.has(cache)) { + cacheableStore.set(cache, new src(((requestOptions, handler) => { + const result = requestOptions._request(requestOptions, handler); + // TODO: remove this when `cacheable-request` supports async request functions. + if (dist.promise(result)) { + // We only need to implement the error handler in order to support HTTP2 caching. + // The result will be a promise anyway. + // @ts-expect-error ignore + // eslint-disable-next-line @typescript-eslint/promise-function-async + result.once = (event, handler) => { + if (event === 'error') { + (async () => { + try { + await result; + } + catch (error) { + handler(error); + } + })(); + } + else if (event === 'abort') { + // The empty catch is needed here in case when + // it rejects before it's `await`ed in `_makeRequest`. + (async () => { + try { + const request = (await result); + request.once('abort', handler); + } + catch { } + })(); + } + else { + /* istanbul ignore next: safety check */ + throw new Error(`Unknown HTTP2 promise event: ${event}`); + } + return result; + }; + } + return result; + }), cache)); + } + } + async _createCacheableRequest(url, options) { + return new Promise((resolve, reject) => { + // TODO: Remove `utils/url-to-options.ts` when `cacheable-request` is fixed + Object.assign(options, urlToOptions(url)); + let request; + // TODO: Fix `cacheable-response`. This is ugly. + const cacheRequest = cacheableStore.get(options.cache)(options, async (response) => { + response._readableState.autoDestroy = false; + if (request) { + const fix = () => { + if (response.req) { + response.complete = response.req.res.complete; + } + }; + response.prependOnceListener('end', fix); + fix(); + (await request).emit('cacheableResponse', response); + } + resolve(response); + }); + cacheRequest.once('error', reject); + cacheRequest.once('request', async (requestOrPromise) => { + request = requestOrPromise; + resolve(request); + }); + }); + } + async _makeRequest() { + const { options } = this; + const { headers, username, password } = options; + const cookieJar = options.cookieJar; + for (const key in headers) { + if (dist.undefined(headers[key])) { + // eslint-disable-next-line @typescript-eslint/no-dynamic-delete + delete headers[key]; + } + else if (dist.null_(headers[key])) { + throw new TypeError(`Use \`undefined\` instead of \`null\` to delete the \`${key}\` header`); + } + } + if (options.decompress && dist.undefined(headers['accept-encoding'])) { + headers['accept-encoding'] = supportsBrotli ? 'gzip, deflate, br' : 'gzip, deflate'; + } + if (username || password) { + const credentials = external_node_buffer_namespaceObject.Buffer.from(`${username}:${password}`).toString('base64'); + headers.authorization = `Basic ${credentials}`; + } + // Set cookies + if (cookieJar) { + const cookieString = await cookieJar.getCookieString(options.url.toString()); + if (dist.nonEmptyString(cookieString)) { + headers.cookie = cookieString; + } + } + // Reset `prefixUrl` + options.prefixUrl = ''; + let request; + for (const hook of options.hooks.beforeRequest) { + // eslint-disable-next-line no-await-in-loop + const result = await hook(options); + if (!dist.undefined(result)) { + // @ts-expect-error Skip the type mismatch to support abstract responses + request = () => result; + break; + } + } + if (!request) { + request = options.getRequestFunction(); + } + const url = options.url; + this._requestOptions = options.createNativeRequestOptions(); + if (options.cache) { + this._requestOptions._request = request; + this._requestOptions.cache = options.cache; + this._prepareCache(options.cache); + } + // Cache support + const fn = options.cache ? this._createCacheableRequest : request; + try { + // We can't do `await fn(...)`, + // because stream `error` event can be emitted before `Promise.resolve()`. + let requestOrResponse = fn(url, this._requestOptions); + if (dist.promise(requestOrResponse)) { + requestOrResponse = await requestOrResponse; + } + // Fallback + if (dist.undefined(requestOrResponse)) { + requestOrResponse = options.getFallbackRequestFunction()(url, this._requestOptions); + if (dist.promise(requestOrResponse)) { + requestOrResponse = await requestOrResponse; + } + } + if (is_client_request(requestOrResponse)) { + this._onRequest(requestOrResponse); + } + else if (this.writable) { + this.once('finish', () => { + void this._onResponse(requestOrResponse); + }); + this._sendBody(); + } + else { + void this._onResponse(requestOrResponse); + } + } + catch (error) { + if (error instanceof src.CacheError) { + throw new CacheError(error, this); + } + throw error; + } + } + async _error(error) { + try { + for (const hook of this.options.hooks.beforeError) { + // eslint-disable-next-line no-await-in-loop + error = await hook(error); + } + } + catch (error_) { + error = new RequestError(error_.message, error_, this); + } + this.destroy(error); + } + _writeRequest(chunk, encoding, callback) { + if (!this._request || this._request.destroyed) { + // Probably the `ClientRequest` instance will throw + return; + } + this._request.write(chunk, encoding, (error) => { + if (!error) { + this._uploadedSize += external_node_buffer_namespaceObject.Buffer.byteLength(chunk, encoding); + const progress = this.uploadProgress; + if (progress.percent < 1) { + this.emit('uploadProgress', progress); + } + } + callback(error); + }); + } + /** + The remote IP address. + */ + get ip() { + return this.socket?.remoteAddress; + } + /** + Indicates whether the request has been aborted or not. + */ + get isAborted() { + return this._aborted; + } + get socket() { + return this._request?.socket ?? undefined; + } + /** + Progress event for downloading (receiving a response). + */ + get downloadProgress() { + let percent; + if (this._responseSize) { + percent = this._downloadedSize / this._responseSize; + } + else if (this._responseSize === this._downloadedSize) { + percent = 1; + } + else { + percent = 0; + } + return { + percent, + transferred: this._downloadedSize, + total: this._responseSize, + }; + } + /** + Progress event for uploading (sending a request). + */ + get uploadProgress() { + let percent; + if (this._bodySize) { + percent = this._uploadedSize / this._bodySize; + } + else if (this._bodySize === this._uploadedSize) { + percent = 1; + } + else { + percent = 0; + } + return { + percent, + transferred: this._uploadedSize, + total: this._bodySize, + }; + } + /** + The object contains the following properties: + + - `start` - Time when the request started. + - `socket` - Time when a socket was assigned to the request. + - `lookup` - Time when the DNS lookup finished. + - `connect` - Time when the socket successfully connected. + - `secureConnect` - Time when the socket securely connected. + - `upload` - Time when the request finished uploading. + - `response` - Time when the request fired `response` event. + - `end` - Time when the response fired `end` event. + - `error` - Time when the request fired `error` event. + - `abort` - Time when the request fired `abort` event. + - `phases` + - `wait` - `timings.socket - timings.start` + - `dns` - `timings.lookup - timings.socket` + - `tcp` - `timings.connect - timings.lookup` + - `tls` - `timings.secureConnect - timings.connect` + - `request` - `timings.upload - (timings.secureConnect || timings.connect)` + - `firstByte` - `timings.response - timings.upload` + - `download` - `timings.end - timings.response` + - `total` - `(timings.end || timings.error || timings.abort) - timings.start` + + If something has not been measured yet, it will be `undefined`. + + __Note__: The time is a `number` representing the milliseconds elapsed since the UNIX epoch. + */ + get timings() { + return this._request?.timings; + } + /** + Whether the response was retrieved from the cache. + */ + get isFromCache() { + return this._isFromCache; + } + get reusedSocket() { + return this._request?.reusedSocket; + } +} + +;// CONCATENATED MODULE: ./node_modules/got/dist/source/as-promise/types.js + +/** +An error to be thrown when the request is aborted with `.cancel()`. +*/ +class types_CancelError extends RequestError { + constructor(request) { + super('Promise was canceled', {}, request); + this.name = 'CancelError'; + this.code = 'ERR_CANCELED'; + } + /** + Whether the promise is canceled. + */ + get isCanceled() { + return true; + } +} + +;// CONCATENATED MODULE: ./node_modules/got/dist/source/as-promise/index.js + + + + + + + + +const as_promise_proxiedRequestEvents = [ + 'request', + 'response', + 'redirect', + 'uploadProgress', + 'downloadProgress', +]; +function asPromise(firstRequest) { + let globalRequest; + let globalResponse; + let normalizedOptions; + const emitter = new external_node_events_namespaceObject.EventEmitter(); + const promise = new PCancelable((resolve, reject, onCancel) => { + onCancel(() => { + globalRequest.destroy(); + }); + onCancel.shouldReject = false; + onCancel(() => { + reject(new types_CancelError(globalRequest)); + }); + const makeRequest = (retryCount) => { + // Errors when a new request is made after the promise settles. + // Used to detect a race condition. + // See https://github.com/sindresorhus/got/issues/1489 + onCancel(() => { }); + const request = firstRequest ?? new Request(undefined, undefined, normalizedOptions); + request.retryCount = retryCount; + request._noPipe = true; + globalRequest = request; + request.once('response', async (response) => { + // Parse body + const contentEncoding = (response.headers['content-encoding'] ?? '').toLowerCase(); + const isCompressed = contentEncoding === 'gzip' || contentEncoding === 'deflate' || contentEncoding === 'br'; + const { options } = request; + if (isCompressed && !options.decompress) { + response.body = response.rawBody; + } + else { + try { + response.body = parseBody(response, options.responseType, options.parseJson, options.encoding); + } + catch (error) { + // Fall back to `utf8` + response.body = response.rawBody.toString(); + if (isResponseOk(response)) { + request._beforeError(error); + return; + } + } + } + try { + const hooks = options.hooks.afterResponse; + for (const [index, hook] of hooks.entries()) { + // @ts-expect-error TS doesn't notice that CancelableRequest is a Promise + // eslint-disable-next-line no-await-in-loop + response = await hook(response, async (updatedOptions) => { + options.merge(updatedOptions); + options.prefixUrl = ''; + if (updatedOptions.url) { + options.url = updatedOptions.url; + } + // Remove any further hooks for that request, because we'll call them anyway. + // The loop continues. We don't want duplicates (asPromise recursion). + options.hooks.afterResponse = options.hooks.afterResponse.slice(0, index); + throw new RetryError(request); + }); + if (!(dist.object(response) && dist.number(response.statusCode) && !dist.nullOrUndefined(response.body))) { + throw new TypeError('The `afterResponse` hook returned an invalid value'); + } + } + } + catch (error) { + request._beforeError(error); + return; + } + globalResponse = response; + if (!isResponseOk(response)) { + request._beforeError(new HTTPError(response)); + return; + } + request.destroy(); + resolve(request.options.resolveBodyOnly ? response.body : response); + }); + const onError = (error) => { + if (promise.isCanceled) { + return; + } + const { options } = request; + if (error instanceof HTTPError && !options.throwHttpErrors) { + const { response } = error; + request.destroy(); + resolve(request.options.resolveBodyOnly ? response.body : response); + return; + } + reject(error); + }; + request.once('error', onError); + const previousBody = request.options?.body; + request.once('retry', (newRetryCount, error) => { + firstRequest = undefined; + const newBody = request.options.body; + if (previousBody === newBody && dist.nodeStream(newBody)) { + error.message = 'Cannot retry with consumed body stream'; + onError(error); + return; + } + // This is needed! We need to reuse `request.options` because they can get modified! + // For example, by calling `promise.json()`. + normalizedOptions = request.options; + makeRequest(newRetryCount); + }); + proxyEvents(request, emitter, as_promise_proxiedRequestEvents); + if (dist.undefined(firstRequest)) { + void request.flush(); + } + }; + makeRequest(0); + }); + promise.on = (event, fn) => { + emitter.on(event, fn); + return promise; + }; + promise.off = (event, fn) => { + emitter.off(event, fn); + return promise; + }; + const shortcut = (responseType) => { + const newPromise = (async () => { + // Wait until downloading has ended + await promise; + const { options } = globalResponse.request; + return parseBody(globalResponse, responseType, options.parseJson, options.encoding); + })(); + // eslint-disable-next-line @typescript-eslint/no-floating-promises + Object.defineProperties(newPromise, Object.getOwnPropertyDescriptors(promise)); + return newPromise; + }; + promise.json = () => { + if (globalRequest.options) { + const { headers } = globalRequest.options; + if (!globalRequest.writableFinished && !('accept' in headers)) { + headers.accept = 'application/json'; + } + } + return shortcut('json'); + }; + promise.buffer = () => shortcut('buffer'); + promise.text = () => shortcut('text'); + return promise; +} + +;// CONCATENATED MODULE: ./node_modules/got/dist/source/create.js + + + + +// The `delay` package weighs 10KB (!) +const delay = async (ms) => new Promise(resolve => { + setTimeout(resolve, ms); +}); +const isGotInstance = (value) => dist.function_(value); +const aliases = [ + 'get', + 'post', + 'put', + 'patch', + 'head', + 'delete', +]; +const create = (defaults) => { + defaults = { + options: new Options(undefined, undefined, defaults.options), + handlers: [...defaults.handlers], + mutableDefaults: defaults.mutableDefaults, + }; + Object.defineProperty(defaults, 'mutableDefaults', { + enumerable: true, + configurable: false, + writable: false, + }); + // Got interface + const got = ((url, options, defaultOptions = defaults.options) => { + const request = new Request(url, options, defaultOptions); + let promise; + const lastHandler = (normalized) => { + // Note: `options` is `undefined` when `new Options(...)` fails + request.options = normalized; + request._noPipe = !normalized.isStream; + void request.flush(); + if (normalized.isStream) { + return request; + } + if (!promise) { + promise = asPromise(request); + } + return promise; + }; + let iteration = 0; + const iterateHandlers = (newOptions) => { + const handler = defaults.handlers[iteration++] ?? lastHandler; + const result = handler(newOptions, iterateHandlers); + if (dist.promise(result) && !request.options.isStream) { + if (!promise) { + promise = asPromise(request); + } + if (result !== promise) { + const descriptors = Object.getOwnPropertyDescriptors(promise); + for (const key in descriptors) { + if (key in result) { + // eslint-disable-next-line @typescript-eslint/no-dynamic-delete + delete descriptors[key]; + } + } + // eslint-disable-next-line @typescript-eslint/no-floating-promises + Object.defineProperties(result, descriptors); + result.cancel = promise.cancel; + } + } + return result; + }; + return iterateHandlers(request.options); + }); + got.extend = (...instancesOrOptions) => { + const options = new Options(undefined, undefined, defaults.options); + const handlers = [...defaults.handlers]; + let mutableDefaults; + for (const value of instancesOrOptions) { + if (isGotInstance(value)) { + options.merge(value.defaults.options); + handlers.push(...value.defaults.handlers); + mutableDefaults = value.defaults.mutableDefaults; + } + else { + options.merge(value); + if (value.handlers) { + handlers.push(...value.handlers); + } + mutableDefaults = value.mutableDefaults; + } + } + return create({ + options, + handlers, + mutableDefaults: Boolean(mutableDefaults), + }); + }; + // Pagination + const paginateEach = (async function* (url, options) { + let normalizedOptions = new Options(url, options, defaults.options); + normalizedOptions.resolveBodyOnly = false; + const { pagination } = normalizedOptions; + assert.function_(pagination.transform); + assert.function_(pagination.shouldContinue); + assert.function_(pagination.filter); + assert.function_(pagination.paginate); + assert.number(pagination.countLimit); + assert.number(pagination.requestLimit); + assert.number(pagination.backoff); + const allItems = []; + let { countLimit } = pagination; + let numberOfRequests = 0; + while (numberOfRequests < pagination.requestLimit) { + if (numberOfRequests !== 0) { + // eslint-disable-next-line no-await-in-loop + await delay(pagination.backoff); + } + // eslint-disable-next-line no-await-in-loop + const response = (await got(undefined, undefined, normalizedOptions)); + // eslint-disable-next-line no-await-in-loop + const parsed = await pagination.transform(response); + const currentItems = []; + assert.array(parsed); + for (const item of parsed) { + if (pagination.filter({ item, currentItems, allItems })) { + if (!pagination.shouldContinue({ item, currentItems, allItems })) { + return; + } + yield item; + if (pagination.stackAllItems) { + allItems.push(item); + } + currentItems.push(item); + if (--countLimit <= 0) { + return; + } + } + } + const optionsToMerge = pagination.paginate({ + response, + currentItems, + allItems, + }); + if (optionsToMerge === false) { + return; + } + if (optionsToMerge === response.request.options) { + normalizedOptions = response.request.options; + } + else { + normalizedOptions.merge(optionsToMerge); + assert.any([dist.urlInstance, dist.undefined], optionsToMerge.url); + if (optionsToMerge.url !== undefined) { + normalizedOptions.prefixUrl = ''; + normalizedOptions.url = optionsToMerge.url; + } + } + numberOfRequests++; + } + }); + got.paginate = paginateEach; + got.paginate.all = (async (url, options) => { + const results = []; + for await (const item of paginateEach(url, options)) { + results.push(item); + } + return results; + }); + // For those who like very descriptive names + got.paginate.each = paginateEach; + // Stream API + got.stream = ((url, options) => got(url, { ...options, isStream: true })); + // Shortcuts + for (const method of aliases) { + got[method] = ((url, options) => got(url, { ...options, method })); + got.stream[method] = ((url, options) => got(url, { ...options, method, isStream: true })); + } + if (!defaults.mutableDefaults) { + Object.freeze(defaults.handlers); + defaults.options.freeze(); + } + Object.defineProperty(got, 'defaults', { + value: defaults, + writable: false, + configurable: false, + enumerable: true, + }); + return got; +}; +/* harmony default export */ const source_create = (create); + +;// CONCATENATED MODULE: ./node_modules/got/dist/source/index.js + + +const defaults = { + options: new Options(), + handlers: [], + mutableDefaults: false, +}; +const got = source_create(defaults); +/* harmony default export */ const got_dist_source = (got); + + + + + + + + + + + + + +/***/ }), + +/***/ 2020: +/***/ ((module) => { + +"use strict"; +module.exports = JSON.parse('[[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"],[[47,47],"disallowed_STD3_valid"],[[48,57],"valid"],[[58,64],"disallowed_STD3_valid"],[[65,65],"mapped",[97]],[[66,66],"mapped",[98]],[[67,67],"mapped",[99]],[[68,68],"mapped",[100]],[[69,69],"mapped",[101]],[[70,70],"mapped",[102]],[[71,71],"mapped",[103]],[[72,72],"mapped",[104]],[[73,73],"mapped",[105]],[[74,74],"mapped",[106]],[[75,75],"mapped",[107]],[[76,76],"mapped",[108]],[[77,77],"mapped",[109]],[[78,78],"mapped",[110]],[[79,79],"mapped",[111]],[[80,80],"mapped",[112]],[[81,81],"mapped",[113]],[[82,82],"mapped",[114]],[[83,83],"mapped",[115]],[[84,84],"mapped",[116]],[[85,85],"mapped",[117]],[[86,86],"mapped",[118]],[[87,87],"mapped",[119]],[[88,88],"mapped",[120]],[[89,89],"mapped",[121]],[[90,90],"mapped",[122]],[[91,96],"disallowed_STD3_valid"],[[97,122],"valid"],[[123,127],"disallowed_STD3_valid"],[[128,159],"disallowed"],[[160,160],"disallowed_STD3_mapped",[32]],[[161,167],"valid",[],"NV8"],[[168,168],"disallowed_STD3_mapped",[32,776]],[[169,169],"valid",[],"NV8"],[[170,170],"mapped",[97]],[[171,172],"valid",[],"NV8"],[[173,173],"ignored"],[[174,174],"valid",[],"NV8"],[[175,175],"disallowed_STD3_mapped",[32,772]],[[176,177],"valid",[],"NV8"],[[178,178],"mapped",[50]],[[179,179],"mapped",[51]],[[180,180],"disallowed_STD3_mapped",[32,769]],[[181,181],"mapped",[956]],[[182,182],"valid",[],"NV8"],[[183,183],"valid"],[[184,184],"disallowed_STD3_mapped",[32,807]],[[185,185],"mapped",[49]],[[186,186],"mapped",[111]],[[187,187],"valid",[],"NV8"],[[188,188],"mapped",[49,8260,52]],[[189,189],"mapped",[49,8260,50]],[[190,190],"mapped",[51,8260,52]],[[191,191],"valid",[],"NV8"],[[192,192],"mapped",[224]],[[193,193],"mapped",[225]],[[194,194],"mapped",[226]],[[195,195],"mapped",[227]],[[196,196],"mapped",[228]],[[197,197],"mapped",[229]],[[198,198],"mapped",[230]],[[199,199],"mapped",[231]],[[200,200],"mapped",[232]],[[201,201],"mapped",[233]],[[202,202],"mapped",[234]],[[203,203],"mapped",[235]],[[204,204],"mapped",[236]],[[205,205],"mapped",[237]],[[206,206],"mapped",[238]],[[207,207],"mapped",[239]],[[208,208],"mapped",[240]],[[209,209],"mapped",[241]],[[210,210],"mapped",[242]],[[211,211],"mapped",[243]],[[212,212],"mapped",[244]],[[213,213],"mapped",[245]],[[214,214],"mapped",[246]],[[215,215],"valid",[],"NV8"],[[216,216],"mapped",[248]],[[217,217],"mapped",[249]],[[218,218],"mapped",[250]],[[219,219],"mapped",[251]],[[220,220],"mapped",[252]],[[221,221],"mapped",[253]],[[222,222],"mapped",[254]],[[223,223],"deviation",[115,115]],[[224,246],"valid"],[[247,247],"valid",[],"NV8"],[[248,255],"valid"],[[256,256],"mapped",[257]],[[257,257],"valid"],[[258,258],"mapped",[259]],[[259,259],"valid"],[[260,260],"mapped",[261]],[[261,261],"valid"],[[262,262],"mapped",[263]],[[263,263],"valid"],[[264,264],"mapped",[265]],[[265,265],"valid"],[[266,266],"mapped",[267]],[[267,267],"valid"],[[268,268],"mapped",[269]],[[269,269],"valid"],[[270,270],"mapped",[271]],[[271,271],"valid"],[[272,272],"mapped",[273]],[[273,273],"valid"],[[274,274],"mapped",[275]],[[275,275],"valid"],[[276,276],"mapped",[277]],[[277,277],"valid"],[[278,278],"mapped",[279]],[[279,279],"valid"],[[280,280],"mapped",[281]],[[281,281],"valid"],[[282,282],"mapped",[283]],[[283,283],"valid"],[[284,284],"mapped",[285]],[[285,285],"valid"],[[286,286],"mapped",[287]],[[287,287],"valid"],[[288,288],"mapped",[289]],[[289,289],"valid"],[[290,290],"mapped",[291]],[[291,291],"valid"],[[292,292],"mapped",[293]],[[293,293],"valid"],[[294,294],"mapped",[295]],[[295,295],"valid"],[[296,296],"mapped",[297]],[[297,297],"valid"],[[298,298],"mapped",[299]],[[299,299],"valid"],[[300,300],"mapped",[301]],[[301,301],"valid"],[[302,302],"mapped",[303]],[[303,303],"valid"],[[304,304],"mapped",[105,775]],[[305,305],"valid"],[[306,307],"mapped",[105,106]],[[308,308],"mapped",[309]],[[309,309],"valid"],[[310,310],"mapped",[311]],[[311,312],"valid"],[[313,313],"mapped",[314]],[[314,314],"valid"],[[315,315],"mapped",[316]],[[316,316],"valid"],[[317,317],"mapped",[318]],[[318,318],"valid"],[[319,320],"mapped",[108,183]],[[321,321],"mapped",[322]],[[322,322],"valid"],[[323,323],"mapped",[324]],[[324,324],"valid"],[[325,325],"mapped",[326]],[[326,326],"valid"],[[327,327],"mapped",[328]],[[328,328],"valid"],[[329,329],"mapped",[700,110]],[[330,330],"mapped",[331]],[[331,331],"valid"],[[332,332],"mapped",[333]],[[333,333],"valid"],[[334,334],"mapped",[335]],[[335,335],"valid"],[[336,336],"mapped",[337]],[[337,337],"valid"],[[338,338],"mapped",[339]],[[339,339],"valid"],[[340,340],"mapped",[341]],[[341,341],"valid"],[[342,342],"mapped",[343]],[[343,343],"valid"],[[344,344],"mapped",[345]],[[345,345],"valid"],[[346,346],"mapped",[347]],[[347,347],"valid"],[[348,348],"mapped",[349]],[[349,349],"valid"],[[350,350],"mapped",[351]],[[351,351],"valid"],[[352,352],"mapped",[353]],[[353,353],"valid"],[[354,354],"mapped",[355]],[[355,355],"valid"],[[356,356],"mapped",[357]],[[357,357],"valid"],[[358,358],"mapped",[359]],[[359,359],"valid"],[[360,360],"mapped",[361]],[[361,361],"valid"],[[362,362],"mapped",[363]],[[363,363],"valid"],[[364,364],"mapped",[365]],[[365,365],"valid"],[[366,366],"mapped",[367]],[[367,367],"valid"],[[368,368],"mapped",[369]],[[369,369],"valid"],[[370,370],"mapped",[371]],[[371,371],"valid"],[[372,372],"mapped",[373]],[[373,373],"valid"],[[374,374],"mapped",[375]],[[375,375],"valid"],[[376,376],"mapped",[255]],[[377,377],"mapped",[378]],[[378,378],"valid"],[[379,379],"mapped",[380]],[[380,380],"valid"],[[381,381],"mapped",[382]],[[382,382],"valid"],[[383,383],"mapped",[115]],[[384,384],"valid"],[[385,385],"mapped",[595]],[[386,386],"mapped",[387]],[[387,387],"valid"],[[388,388],"mapped",[389]],[[389,389],"valid"],[[390,390],"mapped",[596]],[[391,391],"mapped",[392]],[[392,392],"valid"],[[393,393],"mapped",[598]],[[394,394],"mapped",[599]],[[395,395],"mapped",[396]],[[396,397],"valid"],[[398,398],"mapped",[477]],[[399,399],"mapped",[601]],[[400,400],"mapped",[603]],[[401,401],"mapped",[402]],[[402,402],"valid"],[[403,403],"mapped",[608]],[[404,404],"mapped",[611]],[[405,405],"valid"],[[406,406],"mapped",[617]],[[407,407],"mapped",[616]],[[408,408],"mapped",[409]],[[409,411],"valid"],[[412,412],"mapped",[623]],[[413,413],"mapped",[626]],[[414,414],"valid"],[[415,415],"mapped",[629]],[[416,416],"mapped",[417]],[[417,417],"valid"],[[418,418],"mapped",[419]],[[419,419],"valid"],[[420,420],"mapped",[421]],[[421,421],"valid"],[[422,422],"mapped",[640]],[[423,423],"mapped",[424]],[[424,424],"valid"],[[425,425],"mapped",[643]],[[426,427],"valid"],[[428,428],"mapped",[429]],[[429,429],"valid"],[[430,430],"mapped",[648]],[[431,431],"mapped",[432]],[[432,432],"valid"],[[433,433],"mapped",[650]],[[434,434],"mapped",[651]],[[435,435],"mapped",[436]],[[436,436],"valid"],[[437,437],"mapped",[438]],[[438,438],"valid"],[[439,439],"mapped",[658]],[[440,440],"mapped",[441]],[[441,443],"valid"],[[444,444],"mapped",[445]],[[445,451],"valid"],[[452,454],"mapped",[100,382]],[[455,457],"mapped",[108,106]],[[458,460],"mapped",[110,106]],[[461,461],"mapped",[462]],[[462,462],"valid"],[[463,463],"mapped",[464]],[[464,464],"valid"],[[465,465],"mapped",[466]],[[466,466],"valid"],[[467,467],"mapped",[468]],[[468,468],"valid"],[[469,469],"mapped",[470]],[[470,470],"valid"],[[471,471],"mapped",[472]],[[472,472],"valid"],[[473,473],"mapped",[474]],[[474,474],"valid"],[[475,475],"mapped",[476]],[[476,477],"valid"],[[478,478],"mapped",[479]],[[479,479],"valid"],[[480,480],"mapped",[481]],[[481,481],"valid"],[[482,482],"mapped",[483]],[[483,483],"valid"],[[484,484],"mapped",[485]],[[485,485],"valid"],[[486,486],"mapped",[487]],[[487,487],"valid"],[[488,488],"mapped",[489]],[[489,489],"valid"],[[490,490],"mapped",[491]],[[491,491],"valid"],[[492,492],"mapped",[493]],[[493,493],"valid"],[[494,494],"mapped",[495]],[[495,496],"valid"],[[497,499],"mapped",[100,122]],[[500,500],"mapped",[501]],[[501,501],"valid"],[[502,502],"mapped",[405]],[[503,503],"mapped",[447]],[[504,504],"mapped",[505]],[[505,505],"valid"],[[506,506],"mapped",[507]],[[507,507],"valid"],[[508,508],"mapped",[509]],[[509,509],"valid"],[[510,510],"mapped",[511]],[[511,511],"valid"],[[512,512],"mapped",[513]],[[513,513],"valid"],[[514,514],"mapped",[515]],[[515,515],"valid"],[[516,516],"mapped",[517]],[[517,517],"valid"],[[518,518],"mapped",[519]],[[519,519],"valid"],[[520,520],"mapped",[521]],[[521,521],"valid"],[[522,522],"mapped",[523]],[[523,523],"valid"],[[524,524],"mapped",[525]],[[525,525],"valid"],[[526,526],"mapped",[527]],[[527,527],"valid"],[[528,528],"mapped",[529]],[[529,529],"valid"],[[530,530],"mapped",[531]],[[531,531],"valid"],[[532,532],"mapped",[533]],[[533,533],"valid"],[[534,534],"mapped",[535]],[[535,535],"valid"],[[536,536],"mapped",[537]],[[537,537],"valid"],[[538,538],"mapped",[539]],[[539,539],"valid"],[[540,540],"mapped",[541]],[[541,541],"valid"],[[542,542],"mapped",[543]],[[543,543],"valid"],[[544,544],"mapped",[414]],[[545,545],"valid"],[[546,546],"mapped",[547]],[[547,547],"valid"],[[548,548],"mapped",[549]],[[549,549],"valid"],[[550,550],"mapped",[551]],[[551,551],"valid"],[[552,552],"mapped",[553]],[[553,553],"valid"],[[554,554],"mapped",[555]],[[555,555],"valid"],[[556,556],"mapped",[557]],[[557,557],"valid"],[[558,558],"mapped",[559]],[[559,559],"valid"],[[560,560],"mapped",[561]],[[561,561],"valid"],[[562,562],"mapped",[563]],[[563,563],"valid"],[[564,566],"valid"],[[567,569],"valid"],[[570,570],"mapped",[11365]],[[571,571],"mapped",[572]],[[572,572],"valid"],[[573,573],"mapped",[410]],[[574,574],"mapped",[11366]],[[575,576],"valid"],[[577,577],"mapped",[578]],[[578,578],"valid"],[[579,579],"mapped",[384]],[[580,580],"mapped",[649]],[[581,581],"mapped",[652]],[[582,582],"mapped",[583]],[[583,583],"valid"],[[584,584],"mapped",[585]],[[585,585],"valid"],[[586,586],"mapped",[587]],[[587,587],"valid"],[[588,588],"mapped",[589]],[[589,589],"valid"],[[590,590],"mapped",[591]],[[591,591],"valid"],[[592,680],"valid"],[[681,685],"valid"],[[686,687],"valid"],[[688,688],"mapped",[104]],[[689,689],"mapped",[614]],[[690,690],"mapped",[106]],[[691,691],"mapped",[114]],[[692,692],"mapped",[633]],[[693,693],"mapped",[635]],[[694,694],"mapped",[641]],[[695,695],"mapped",[119]],[[696,696],"mapped",[121]],[[697,705],"valid"],[[706,709],"valid",[],"NV8"],[[710,721],"valid"],[[722,727],"valid",[],"NV8"],[[728,728],"disallowed_STD3_mapped",[32,774]],[[729,729],"disallowed_STD3_mapped",[32,775]],[[730,730],"disallowed_STD3_mapped",[32,778]],[[731,731],"disallowed_STD3_mapped",[32,808]],[[732,732],"disallowed_STD3_mapped",[32,771]],[[733,733],"disallowed_STD3_mapped",[32,779]],[[734,734],"valid",[],"NV8"],[[735,735],"valid",[],"NV8"],[[736,736],"mapped",[611]],[[737,737],"mapped",[108]],[[738,738],"mapped",[115]],[[739,739],"mapped",[120]],[[740,740],"mapped",[661]],[[741,745],"valid",[],"NV8"],[[746,747],"valid",[],"NV8"],[[748,748],"valid"],[[749,749],"valid",[],"NV8"],[[750,750],"valid"],[[751,767],"valid",[],"NV8"],[[768,831],"valid"],[[832,832],"mapped",[768]],[[833,833],"mapped",[769]],[[834,834],"valid"],[[835,835],"mapped",[787]],[[836,836],"mapped",[776,769]],[[837,837],"mapped",[953]],[[838,846],"valid"],[[847,847],"ignored"],[[848,855],"valid"],[[856,860],"valid"],[[861,863],"valid"],[[864,865],"valid"],[[866,866],"valid"],[[867,879],"valid"],[[880,880],"mapped",[881]],[[881,881],"valid"],[[882,882],"mapped",[883]],[[883,883],"valid"],[[884,884],"mapped",[697]],[[885,885],"valid"],[[886,886],"mapped",[887]],[[887,887],"valid"],[[888,889],"disallowed"],[[890,890],"disallowed_STD3_mapped",[32,953]],[[891,893],"valid"],[[894,894],"disallowed_STD3_mapped",[59]],[[895,895],"mapped",[1011]],[[896,899],"disallowed"],[[900,900],"disallowed_STD3_mapped",[32,769]],[[901,901],"disallowed_STD3_mapped",[32,776,769]],[[902,902],"mapped",[940]],[[903,903],"mapped",[183]],[[904,904],"mapped",[941]],[[905,905],"mapped",[942]],[[906,906],"mapped",[943]],[[907,907],"disallowed"],[[908,908],"mapped",[972]],[[909,909],"disallowed"],[[910,910],"mapped",[973]],[[911,911],"mapped",[974]],[[912,912],"valid"],[[913,913],"mapped",[945]],[[914,914],"mapped",[946]],[[915,915],"mapped",[947]],[[916,916],"mapped",[948]],[[917,917],"mapped",[949]],[[918,918],"mapped",[950]],[[919,919],"mapped",[951]],[[920,920],"mapped",[952]],[[921,921],"mapped",[953]],[[922,922],"mapped",[954]],[[923,923],"mapped",[955]],[[924,924],"mapped",[956]],[[925,925],"mapped",[957]],[[926,926],"mapped",[958]],[[927,927],"mapped",[959]],[[928,928],"mapped",[960]],[[929,929],"mapped",[961]],[[930,930],"disallowed"],[[931,931],"mapped",[963]],[[932,932],"mapped",[964]],[[933,933],"mapped",[965]],[[934,934],"mapped",[966]],[[935,935],"mapped",[967]],[[936,936],"mapped",[968]],[[937,937],"mapped",[969]],[[938,938],"mapped",[970]],[[939,939],"mapped",[971]],[[940,961],"valid"],[[962,962],"deviation",[963]],[[963,974],"valid"],[[975,975],"mapped",[983]],[[976,976],"mapped",[946]],[[977,977],"mapped",[952]],[[978,978],"mapped",[965]],[[979,979],"mapped",[973]],[[980,980],"mapped",[971]],[[981,981],"mapped",[966]],[[982,982],"mapped",[960]],[[983,983],"valid"],[[984,984],"mapped",[985]],[[985,985],"valid"],[[986,986],"mapped",[987]],[[987,987],"valid"],[[988,988],"mapped",[989]],[[989,989],"valid"],[[990,990],"mapped",[991]],[[991,991],"valid"],[[992,992],"mapped",[993]],[[993,993],"valid"],[[994,994],"mapped",[995]],[[995,995],"valid"],[[996,996],"mapped",[997]],[[997,997],"valid"],[[998,998],"mapped",[999]],[[999,999],"valid"],[[1000,1000],"mapped",[1001]],[[1001,1001],"valid"],[[1002,1002],"mapped",[1003]],[[1003,1003],"valid"],[[1004,1004],"mapped",[1005]],[[1005,1005],"valid"],[[1006,1006],"mapped",[1007]],[[1007,1007],"valid"],[[1008,1008],"mapped",[954]],[[1009,1009],"mapped",[961]],[[1010,1010],"mapped",[963]],[[1011,1011],"valid"],[[1012,1012],"mapped",[952]],[[1013,1013],"mapped",[949]],[[1014,1014],"valid",[],"NV8"],[[1015,1015],"mapped",[1016]],[[1016,1016],"valid"],[[1017,1017],"mapped",[963]],[[1018,1018],"mapped",[1019]],[[1019,1019],"valid"],[[1020,1020],"valid"],[[1021,1021],"mapped",[891]],[[1022,1022],"mapped",[892]],[[1023,1023],"mapped",[893]],[[1024,1024],"mapped",[1104]],[[1025,1025],"mapped",[1105]],[[1026,1026],"mapped",[1106]],[[1027,1027],"mapped",[1107]],[[1028,1028],"mapped",[1108]],[[1029,1029],"mapped",[1109]],[[1030,1030],"mapped",[1110]],[[1031,1031],"mapped",[1111]],[[1032,1032],"mapped",[1112]],[[1033,1033],"mapped",[1113]],[[1034,1034],"mapped",[1114]],[[1035,1035],"mapped",[1115]],[[1036,1036],"mapped",[1116]],[[1037,1037],"mapped",[1117]],[[1038,1038],"mapped",[1118]],[[1039,1039],"mapped",[1119]],[[1040,1040],"mapped",[1072]],[[1041,1041],"mapped",[1073]],[[1042,1042],"mapped",[1074]],[[1043,1043],"mapped",[1075]],[[1044,1044],"mapped",[1076]],[[1045,1045],"mapped",[1077]],[[1046,1046],"mapped",[1078]],[[1047,1047],"mapped",[1079]],[[1048,1048],"mapped",[1080]],[[1049,1049],"mapped",[1081]],[[1050,1050],"mapped",[1082]],[[1051,1051],"mapped",[1083]],[[1052,1052],"mapped",[1084]],[[1053,1053],"mapped",[1085]],[[1054,1054],"mapped",[1086]],[[1055,1055],"mapped",[1087]],[[1056,1056],"mapped",[1088]],[[1057,1057],"mapped",[1089]],[[1058,1058],"mapped",[1090]],[[1059,1059],"mapped",[1091]],[[1060,1060],"mapped",[1092]],[[1061,1061],"mapped",[1093]],[[1062,1062],"mapped",[1094]],[[1063,1063],"mapped",[1095]],[[1064,1064],"mapped",[1096]],[[1065,1065],"mapped",[1097]],[[1066,1066],"mapped",[1098]],[[1067,1067],"mapped",[1099]],[[1068,1068],"mapped",[1100]],[[1069,1069],"mapped",[1101]],[[1070,1070],"mapped",[1102]],[[1071,1071],"mapped",[1103]],[[1072,1103],"valid"],[[1104,1104],"valid"],[[1105,1116],"valid"],[[1117,1117],"valid"],[[1118,1119],"valid"],[[1120,1120],"mapped",[1121]],[[1121,1121],"valid"],[[1122,1122],"mapped",[1123]],[[1123,1123],"valid"],[[1124,1124],"mapped",[1125]],[[1125,1125],"valid"],[[1126,1126],"mapped",[1127]],[[1127,1127],"valid"],[[1128,1128],"mapped",[1129]],[[1129,1129],"valid"],[[1130,1130],"mapped",[1131]],[[1131,1131],"valid"],[[1132,1132],"mapped",[1133]],[[1133,1133],"valid"],[[1134,1134],"mapped",[1135]],[[1135,1135],"valid"],[[1136,1136],"mapped",[1137]],[[1137,1137],"valid"],[[1138,1138],"mapped",[1139]],[[1139,1139],"valid"],[[1140,1140],"mapped",[1141]],[[1141,1141],"valid"],[[1142,1142],"mapped",[1143]],[[1143,1143],"valid"],[[1144,1144],"mapped",[1145]],[[1145,1145],"valid"],[[1146,1146],"mapped",[1147]],[[1147,1147],"valid"],[[1148,1148],"mapped",[1149]],[[1149,1149],"valid"],[[1150,1150],"mapped",[1151]],[[1151,1151],"valid"],[[1152,1152],"mapped",[1153]],[[1153,1153],"valid"],[[1154,1154],"valid",[],"NV8"],[[1155,1158],"valid"],[[1159,1159],"valid"],[[1160,1161],"valid",[],"NV8"],[[1162,1162],"mapped",[1163]],[[1163,1163],"valid"],[[1164,1164],"mapped",[1165]],[[1165,1165],"valid"],[[1166,1166],"mapped",[1167]],[[1167,1167],"valid"],[[1168,1168],"mapped",[1169]],[[1169,1169],"valid"],[[1170,1170],"mapped",[1171]],[[1171,1171],"valid"],[[1172,1172],"mapped",[1173]],[[1173,1173],"valid"],[[1174,1174],"mapped",[1175]],[[1175,1175],"valid"],[[1176,1176],"mapped",[1177]],[[1177,1177],"valid"],[[1178,1178],"mapped",[1179]],[[1179,1179],"valid"],[[1180,1180],"mapped",[1181]],[[1181,1181],"valid"],[[1182,1182],"mapped",[1183]],[[1183,1183],"valid"],[[1184,1184],"mapped",[1185]],[[1185,1185],"valid"],[[1186,1186],"mapped",[1187]],[[1187,1187],"valid"],[[1188,1188],"mapped",[1189]],[[1189,1189],"valid"],[[1190,1190],"mapped",[1191]],[[1191,1191],"valid"],[[1192,1192],"mapped",[1193]],[[1193,1193],"valid"],[[1194,1194],"mapped",[1195]],[[1195,1195],"valid"],[[1196,1196],"mapped",[1197]],[[1197,1197],"valid"],[[1198,1198],"mapped",[1199]],[[1199,1199],"valid"],[[1200,1200],"mapped",[1201]],[[1201,1201],"valid"],[[1202,1202],"mapped",[1203]],[[1203,1203],"valid"],[[1204,1204],"mapped",[1205]],[[1205,1205],"valid"],[[1206,1206],"mapped",[1207]],[[1207,1207],"valid"],[[1208,1208],"mapped",[1209]],[[1209,1209],"valid"],[[1210,1210],"mapped",[1211]],[[1211,1211],"valid"],[[1212,1212],"mapped",[1213]],[[1213,1213],"valid"],[[1214,1214],"mapped",[1215]],[[1215,1215],"valid"],[[1216,1216],"disallowed"],[[1217,1217],"mapped",[1218]],[[1218,1218],"valid"],[[1219,1219],"mapped",[1220]],[[1220,1220],"valid"],[[1221,1221],"mapped",[1222]],[[1222,1222],"valid"],[[1223,1223],"mapped",[1224]],[[1224,1224],"valid"],[[1225,1225],"mapped",[1226]],[[1226,1226],"valid"],[[1227,1227],"mapped",[1228]],[[1228,1228],"valid"],[[1229,1229],"mapped",[1230]],[[1230,1230],"valid"],[[1231,1231],"valid"],[[1232,1232],"mapped",[1233]],[[1233,1233],"valid"],[[1234,1234],"mapped",[1235]],[[1235,1235],"valid"],[[1236,1236],"mapped",[1237]],[[1237,1237],"valid"],[[1238,1238],"mapped",[1239]],[[1239,1239],"valid"],[[1240,1240],"mapped",[1241]],[[1241,1241],"valid"],[[1242,1242],"mapped",[1243]],[[1243,1243],"valid"],[[1244,1244],"mapped",[1245]],[[1245,1245],"valid"],[[1246,1246],"mapped",[1247]],[[1247,1247],"valid"],[[1248,1248],"mapped",[1249]],[[1249,1249],"valid"],[[1250,1250],"mapped",[1251]],[[1251,1251],"valid"],[[1252,1252],"mapped",[1253]],[[1253,1253],"valid"],[[1254,1254],"mapped",[1255]],[[1255,1255],"valid"],[[1256,1256],"mapped",[1257]],[[1257,1257],"valid"],[[1258,1258],"mapped",[1259]],[[1259,1259],"valid"],[[1260,1260],"mapped",[1261]],[[1261,1261],"valid"],[[1262,1262],"mapped",[1263]],[[1263,1263],"valid"],[[1264,1264],"mapped",[1265]],[[1265,1265],"valid"],[[1266,1266],"mapped",[1267]],[[1267,1267],"valid"],[[1268,1268],"mapped",[1269]],[[1269,1269],"valid"],[[1270,1270],"mapped",[1271]],[[1271,1271],"valid"],[[1272,1272],"mapped",[1273]],[[1273,1273],"valid"],[[1274,1274],"mapped",[1275]],[[1275,1275],"valid"],[[1276,1276],"mapped",[1277]],[[1277,1277],"valid"],[[1278,1278],"mapped",[1279]],[[1279,1279],"valid"],[[1280,1280],"mapped",[1281]],[[1281,1281],"valid"],[[1282,1282],"mapped",[1283]],[[1283,1283],"valid"],[[1284,1284],"mapped",[1285]],[[1285,1285],"valid"],[[1286,1286],"mapped",[1287]],[[1287,1287],"valid"],[[1288,1288],"mapped",[1289]],[[1289,1289],"valid"],[[1290,1290],"mapped",[1291]],[[1291,1291],"valid"],[[1292,1292],"mapped",[1293]],[[1293,1293],"valid"],[[1294,1294],"mapped",[1295]],[[1295,1295],"valid"],[[1296,1296],"mapped",[1297]],[[1297,1297],"valid"],[[1298,1298],"mapped",[1299]],[[1299,1299],"valid"],[[1300,1300],"mapped",[1301]],[[1301,1301],"valid"],[[1302,1302],"mapped",[1303]],[[1303,1303],"valid"],[[1304,1304],"mapped",[1305]],[[1305,1305],"valid"],[[1306,1306],"mapped",[1307]],[[1307,1307],"valid"],[[1308,1308],"mapped",[1309]],[[1309,1309],"valid"],[[1310,1310],"mapped",[1311]],[[1311,1311],"valid"],[[1312,1312],"mapped",[1313]],[[1313,1313],"valid"],[[1314,1314],"mapped",[1315]],[[1315,1315],"valid"],[[1316,1316],"mapped",[1317]],[[1317,1317],"valid"],[[1318,1318],"mapped",[1319]],[[1319,1319],"valid"],[[1320,1320],"mapped",[1321]],[[1321,1321],"valid"],[[1322,1322],"mapped",[1323]],[[1323,1323],"valid"],[[1324,1324],"mapped",[1325]],[[1325,1325],"valid"],[[1326,1326],"mapped",[1327]],[[1327,1327],"valid"],[[1328,1328],"disallowed"],[[1329,1329],"mapped",[1377]],[[1330,1330],"mapped",[1378]],[[1331,1331],"mapped",[1379]],[[1332,1332],"mapped",[1380]],[[1333,1333],"mapped",[1381]],[[1334,1334],"mapped",[1382]],[[1335,1335],"mapped",[1383]],[[1336,1336],"mapped",[1384]],[[1337,1337],"mapped",[1385]],[[1338,1338],"mapped",[1386]],[[1339,1339],"mapped",[1387]],[[1340,1340],"mapped",[1388]],[[1341,1341],"mapped",[1389]],[[1342,1342],"mapped",[1390]],[[1343,1343],"mapped",[1391]],[[1344,1344],"mapped",[1392]],[[1345,1345],"mapped",[1393]],[[1346,1346],"mapped",[1394]],[[1347,1347],"mapped",[1395]],[[1348,1348],"mapped",[1396]],[[1349,1349],"mapped",[1397]],[[1350,1350],"mapped",[1398]],[[1351,1351],"mapped",[1399]],[[1352,1352],"mapped",[1400]],[[1353,1353],"mapped",[1401]],[[1354,1354],"mapped",[1402]],[[1355,1355],"mapped",[1403]],[[1356,1356],"mapped",[1404]],[[1357,1357],"mapped",[1405]],[[1358,1358],"mapped",[1406]],[[1359,1359],"mapped",[1407]],[[1360,1360],"mapped",[1408]],[[1361,1361],"mapped",[1409]],[[1362,1362],"mapped",[1410]],[[1363,1363],"mapped",[1411]],[[1364,1364],"mapped",[1412]],[[1365,1365],"mapped",[1413]],[[1366,1366],"mapped",[1414]],[[1367,1368],"disallowed"],[[1369,1369],"valid"],[[1370,1375],"valid",[],"NV8"],[[1376,1376],"disallowed"],[[1377,1414],"valid"],[[1415,1415],"mapped",[1381,1410]],[[1416,1416],"disallowed"],[[1417,1417],"valid",[],"NV8"],[[1418,1418],"valid",[],"NV8"],[[1419,1420],"disallowed"],[[1421,1422],"valid",[],"NV8"],[[1423,1423],"valid",[],"NV8"],[[1424,1424],"disallowed"],[[1425,1441],"valid"],[[1442,1442],"valid"],[[1443,1455],"valid"],[[1456,1465],"valid"],[[1466,1466],"valid"],[[1467,1469],"valid"],[[1470,1470],"valid",[],"NV8"],[[1471,1471],"valid"],[[1472,1472],"valid",[],"NV8"],[[1473,1474],"valid"],[[1475,1475],"valid",[],"NV8"],[[1476,1476],"valid"],[[1477,1477],"valid"],[[1478,1478],"valid",[],"NV8"],[[1479,1479],"valid"],[[1480,1487],"disallowed"],[[1488,1514],"valid"],[[1515,1519],"disallowed"],[[1520,1524],"valid"],[[1525,1535],"disallowed"],[[1536,1539],"disallowed"],[[1540,1540],"disallowed"],[[1541,1541],"disallowed"],[[1542,1546],"valid",[],"NV8"],[[1547,1547],"valid",[],"NV8"],[[1548,1548],"valid",[],"NV8"],[[1549,1551],"valid",[],"NV8"],[[1552,1557],"valid"],[[1558,1562],"valid"],[[1563,1563],"valid",[],"NV8"],[[1564,1564],"disallowed"],[[1565,1565],"disallowed"],[[1566,1566],"valid",[],"NV8"],[[1567,1567],"valid",[],"NV8"],[[1568,1568],"valid"],[[1569,1594],"valid"],[[1595,1599],"valid"],[[1600,1600],"valid",[],"NV8"],[[1601,1618],"valid"],[[1619,1621],"valid"],[[1622,1624],"valid"],[[1625,1630],"valid"],[[1631,1631],"valid"],[[1632,1641],"valid"],[[1642,1645],"valid",[],"NV8"],[[1646,1647],"valid"],[[1648,1652],"valid"],[[1653,1653],"mapped",[1575,1652]],[[1654,1654],"mapped",[1608,1652]],[[1655,1655],"mapped",[1735,1652]],[[1656,1656],"mapped",[1610,1652]],[[1657,1719],"valid"],[[1720,1721],"valid"],[[1722,1726],"valid"],[[1727,1727],"valid"],[[1728,1742],"valid"],[[1743,1743],"valid"],[[1744,1747],"valid"],[[1748,1748],"valid",[],"NV8"],[[1749,1756],"valid"],[[1757,1757],"disallowed"],[[1758,1758],"valid",[],"NV8"],[[1759,1768],"valid"],[[1769,1769],"valid",[],"NV8"],[[1770,1773],"valid"],[[1774,1775],"valid"],[[1776,1785],"valid"],[[1786,1790],"valid"],[[1791,1791],"valid"],[[1792,1805],"valid",[],"NV8"],[[1806,1806],"disallowed"],[[1807,1807],"disallowed"],[[1808,1836],"valid"],[[1837,1839],"valid"],[[1840,1866],"valid"],[[1867,1868],"disallowed"],[[1869,1871],"valid"],[[1872,1901],"valid"],[[1902,1919],"valid"],[[1920,1968],"valid"],[[1969,1969],"valid"],[[1970,1983],"disallowed"],[[1984,2037],"valid"],[[2038,2042],"valid",[],"NV8"],[[2043,2047],"disallowed"],[[2048,2093],"valid"],[[2094,2095],"disallowed"],[[2096,2110],"valid",[],"NV8"],[[2111,2111],"disallowed"],[[2112,2139],"valid"],[[2140,2141],"disallowed"],[[2142,2142],"valid",[],"NV8"],[[2143,2207],"disallowed"],[[2208,2208],"valid"],[[2209,2209],"valid"],[[2210,2220],"valid"],[[2221,2226],"valid"],[[2227,2228],"valid"],[[2229,2274],"disallowed"],[[2275,2275],"valid"],[[2276,2302],"valid"],[[2303,2303],"valid"],[[2304,2304],"valid"],[[2305,2307],"valid"],[[2308,2308],"valid"],[[2309,2361],"valid"],[[2362,2363],"valid"],[[2364,2381],"valid"],[[2382,2382],"valid"],[[2383,2383],"valid"],[[2384,2388],"valid"],[[2389,2389],"valid"],[[2390,2391],"valid"],[[2392,2392],"mapped",[2325,2364]],[[2393,2393],"mapped",[2326,2364]],[[2394,2394],"mapped",[2327,2364]],[[2395,2395],"mapped",[2332,2364]],[[2396,2396],"mapped",[2337,2364]],[[2397,2397],"mapped",[2338,2364]],[[2398,2398],"mapped",[2347,2364]],[[2399,2399],"mapped",[2351,2364]],[[2400,2403],"valid"],[[2404,2405],"valid",[],"NV8"],[[2406,2415],"valid"],[[2416,2416],"valid",[],"NV8"],[[2417,2418],"valid"],[[2419,2423],"valid"],[[2424,2424],"valid"],[[2425,2426],"valid"],[[2427,2428],"valid"],[[2429,2429],"valid"],[[2430,2431],"valid"],[[2432,2432],"valid"],[[2433,2435],"valid"],[[2436,2436],"disallowed"],[[2437,2444],"valid"],[[2445,2446],"disallowed"],[[2447,2448],"valid"],[[2449,2450],"disallowed"],[[2451,2472],"valid"],[[2473,2473],"disallowed"],[[2474,2480],"valid"],[[2481,2481],"disallowed"],[[2482,2482],"valid"],[[2483,2485],"disallowed"],[[2486,2489],"valid"],[[2490,2491],"disallowed"],[[2492,2492],"valid"],[[2493,2493],"valid"],[[2494,2500],"valid"],[[2501,2502],"disallowed"],[[2503,2504],"valid"],[[2505,2506],"disallowed"],[[2507,2509],"valid"],[[2510,2510],"valid"],[[2511,2518],"disallowed"],[[2519,2519],"valid"],[[2520,2523],"disallowed"],[[2524,2524],"mapped",[2465,2492]],[[2525,2525],"mapped",[2466,2492]],[[2526,2526],"disallowed"],[[2527,2527],"mapped",[2479,2492]],[[2528,2531],"valid"],[[2532,2533],"disallowed"],[[2534,2545],"valid"],[[2546,2554],"valid",[],"NV8"],[[2555,2555],"valid",[],"NV8"],[[2556,2560],"disallowed"],[[2561,2561],"valid"],[[2562,2562],"valid"],[[2563,2563],"valid"],[[2564,2564],"disallowed"],[[2565,2570],"valid"],[[2571,2574],"disallowed"],[[2575,2576],"valid"],[[2577,2578],"disallowed"],[[2579,2600],"valid"],[[2601,2601],"disallowed"],[[2602,2608],"valid"],[[2609,2609],"disallowed"],[[2610,2610],"valid"],[[2611,2611],"mapped",[2610,2620]],[[2612,2612],"disallowed"],[[2613,2613],"valid"],[[2614,2614],"mapped",[2616,2620]],[[2615,2615],"disallowed"],[[2616,2617],"valid"],[[2618,2619],"disallowed"],[[2620,2620],"valid"],[[2621,2621],"disallowed"],[[2622,2626],"valid"],[[2627,2630],"disallowed"],[[2631,2632],"valid"],[[2633,2634],"disallowed"],[[2635,2637],"valid"],[[2638,2640],"disallowed"],[[2641,2641],"valid"],[[2642,2648],"disallowed"],[[2649,2649],"mapped",[2582,2620]],[[2650,2650],"mapped",[2583,2620]],[[2651,2651],"mapped",[2588,2620]],[[2652,2652],"valid"],[[2653,2653],"disallowed"],[[2654,2654],"mapped",[2603,2620]],[[2655,2661],"disallowed"],[[2662,2676],"valid"],[[2677,2677],"valid"],[[2678,2688],"disallowed"],[[2689,2691],"valid"],[[2692,2692],"disallowed"],[[2693,2699],"valid"],[[2700,2700],"valid"],[[2701,2701],"valid"],[[2702,2702],"disallowed"],[[2703,2705],"valid"],[[2706,2706],"disallowed"],[[2707,2728],"valid"],[[2729,2729],"disallowed"],[[2730,2736],"valid"],[[2737,2737],"disallowed"],[[2738,2739],"valid"],[[2740,2740],"disallowed"],[[2741,2745],"valid"],[[2746,2747],"disallowed"],[[2748,2757],"valid"],[[2758,2758],"disallowed"],[[2759,2761],"valid"],[[2762,2762],"disallowed"],[[2763,2765],"valid"],[[2766,2767],"disallowed"],[[2768,2768],"valid"],[[2769,2783],"disallowed"],[[2784,2784],"valid"],[[2785,2787],"valid"],[[2788,2789],"disallowed"],[[2790,2799],"valid"],[[2800,2800],"valid",[],"NV8"],[[2801,2801],"valid",[],"NV8"],[[2802,2808],"disallowed"],[[2809,2809],"valid"],[[2810,2816],"disallowed"],[[2817,2819],"valid"],[[2820,2820],"disallowed"],[[2821,2828],"valid"],[[2829,2830],"disallowed"],[[2831,2832],"valid"],[[2833,2834],"disallowed"],[[2835,2856],"valid"],[[2857,2857],"disallowed"],[[2858,2864],"valid"],[[2865,2865],"disallowed"],[[2866,2867],"valid"],[[2868,2868],"disallowed"],[[2869,2869],"valid"],[[2870,2873],"valid"],[[2874,2875],"disallowed"],[[2876,2883],"valid"],[[2884,2884],"valid"],[[2885,2886],"disallowed"],[[2887,2888],"valid"],[[2889,2890],"disallowed"],[[2891,2893],"valid"],[[2894,2901],"disallowed"],[[2902,2903],"valid"],[[2904,2907],"disallowed"],[[2908,2908],"mapped",[2849,2876]],[[2909,2909],"mapped",[2850,2876]],[[2910,2910],"disallowed"],[[2911,2913],"valid"],[[2914,2915],"valid"],[[2916,2917],"disallowed"],[[2918,2927],"valid"],[[2928,2928],"valid",[],"NV8"],[[2929,2929],"valid"],[[2930,2935],"valid",[],"NV8"],[[2936,2945],"disallowed"],[[2946,2947],"valid"],[[2948,2948],"disallowed"],[[2949,2954],"valid"],[[2955,2957],"disallowed"],[[2958,2960],"valid"],[[2961,2961],"disallowed"],[[2962,2965],"valid"],[[2966,2968],"disallowed"],[[2969,2970],"valid"],[[2971,2971],"disallowed"],[[2972,2972],"valid"],[[2973,2973],"disallowed"],[[2974,2975],"valid"],[[2976,2978],"disallowed"],[[2979,2980],"valid"],[[2981,2983],"disallowed"],[[2984,2986],"valid"],[[2987,2989],"disallowed"],[[2990,2997],"valid"],[[2998,2998],"valid"],[[2999,3001],"valid"],[[3002,3005],"disallowed"],[[3006,3010],"valid"],[[3011,3013],"disallowed"],[[3014,3016],"valid"],[[3017,3017],"disallowed"],[[3018,3021],"valid"],[[3022,3023],"disallowed"],[[3024,3024],"valid"],[[3025,3030],"disallowed"],[[3031,3031],"valid"],[[3032,3045],"disallowed"],[[3046,3046],"valid"],[[3047,3055],"valid"],[[3056,3058],"valid",[],"NV8"],[[3059,3066],"valid",[],"NV8"],[[3067,3071],"disallowed"],[[3072,3072],"valid"],[[3073,3075],"valid"],[[3076,3076],"disallowed"],[[3077,3084],"valid"],[[3085,3085],"disallowed"],[[3086,3088],"valid"],[[3089,3089],"disallowed"],[[3090,3112],"valid"],[[3113,3113],"disallowed"],[[3114,3123],"valid"],[[3124,3124],"valid"],[[3125,3129],"valid"],[[3130,3132],"disallowed"],[[3133,3133],"valid"],[[3134,3140],"valid"],[[3141,3141],"disallowed"],[[3142,3144],"valid"],[[3145,3145],"disallowed"],[[3146,3149],"valid"],[[3150,3156],"disallowed"],[[3157,3158],"valid"],[[3159,3159],"disallowed"],[[3160,3161],"valid"],[[3162,3162],"valid"],[[3163,3167],"disallowed"],[[3168,3169],"valid"],[[3170,3171],"valid"],[[3172,3173],"disallowed"],[[3174,3183],"valid"],[[3184,3191],"disallowed"],[[3192,3199],"valid",[],"NV8"],[[3200,3200],"disallowed"],[[3201,3201],"valid"],[[3202,3203],"valid"],[[3204,3204],"disallowed"],[[3205,3212],"valid"],[[3213,3213],"disallowed"],[[3214,3216],"valid"],[[3217,3217],"disallowed"],[[3218,3240],"valid"],[[3241,3241],"disallowed"],[[3242,3251],"valid"],[[3252,3252],"disallowed"],[[3253,3257],"valid"],[[3258,3259],"disallowed"],[[3260,3261],"valid"],[[3262,3268],"valid"],[[3269,3269],"disallowed"],[[3270,3272],"valid"],[[3273,3273],"disallowed"],[[3274,3277],"valid"],[[3278,3284],"disallowed"],[[3285,3286],"valid"],[[3287,3293],"disallowed"],[[3294,3294],"valid"],[[3295,3295],"disallowed"],[[3296,3297],"valid"],[[3298,3299],"valid"],[[3300,3301],"disallowed"],[[3302,3311],"valid"],[[3312,3312],"disallowed"],[[3313,3314],"valid"],[[3315,3328],"disallowed"],[[3329,3329],"valid"],[[3330,3331],"valid"],[[3332,3332],"disallowed"],[[3333,3340],"valid"],[[3341,3341],"disallowed"],[[3342,3344],"valid"],[[3345,3345],"disallowed"],[[3346,3368],"valid"],[[3369,3369],"valid"],[[3370,3385],"valid"],[[3386,3386],"valid"],[[3387,3388],"disallowed"],[[3389,3389],"valid"],[[3390,3395],"valid"],[[3396,3396],"valid"],[[3397,3397],"disallowed"],[[3398,3400],"valid"],[[3401,3401],"disallowed"],[[3402,3405],"valid"],[[3406,3406],"valid"],[[3407,3414],"disallowed"],[[3415,3415],"valid"],[[3416,3422],"disallowed"],[[3423,3423],"valid"],[[3424,3425],"valid"],[[3426,3427],"valid"],[[3428,3429],"disallowed"],[[3430,3439],"valid"],[[3440,3445],"valid",[],"NV8"],[[3446,3448],"disallowed"],[[3449,3449],"valid",[],"NV8"],[[3450,3455],"valid"],[[3456,3457],"disallowed"],[[3458,3459],"valid"],[[3460,3460],"disallowed"],[[3461,3478],"valid"],[[3479,3481],"disallowed"],[[3482,3505],"valid"],[[3506,3506],"disallowed"],[[3507,3515],"valid"],[[3516,3516],"disallowed"],[[3517,3517],"valid"],[[3518,3519],"disallowed"],[[3520,3526],"valid"],[[3527,3529],"disallowed"],[[3530,3530],"valid"],[[3531,3534],"disallowed"],[[3535,3540],"valid"],[[3541,3541],"disallowed"],[[3542,3542],"valid"],[[3543,3543],"disallowed"],[[3544,3551],"valid"],[[3552,3557],"disallowed"],[[3558,3567],"valid"],[[3568,3569],"disallowed"],[[3570,3571],"valid"],[[3572,3572],"valid",[],"NV8"],[[3573,3584],"disallowed"],[[3585,3634],"valid"],[[3635,3635],"mapped",[3661,3634]],[[3636,3642],"valid"],[[3643,3646],"disallowed"],[[3647,3647],"valid",[],"NV8"],[[3648,3662],"valid"],[[3663,3663],"valid",[],"NV8"],[[3664,3673],"valid"],[[3674,3675],"valid",[],"NV8"],[[3676,3712],"disallowed"],[[3713,3714],"valid"],[[3715,3715],"disallowed"],[[3716,3716],"valid"],[[3717,3718],"disallowed"],[[3719,3720],"valid"],[[3721,3721],"disallowed"],[[3722,3722],"valid"],[[3723,3724],"disallowed"],[[3725,3725],"valid"],[[3726,3731],"disallowed"],[[3732,3735],"valid"],[[3736,3736],"disallowed"],[[3737,3743],"valid"],[[3744,3744],"disallowed"],[[3745,3747],"valid"],[[3748,3748],"disallowed"],[[3749,3749],"valid"],[[3750,3750],"disallowed"],[[3751,3751],"valid"],[[3752,3753],"disallowed"],[[3754,3755],"valid"],[[3756,3756],"disallowed"],[[3757,3762],"valid"],[[3763,3763],"mapped",[3789,3762]],[[3764,3769],"valid"],[[3770,3770],"disallowed"],[[3771,3773],"valid"],[[3774,3775],"disallowed"],[[3776,3780],"valid"],[[3781,3781],"disallowed"],[[3782,3782],"valid"],[[3783,3783],"disallowed"],[[3784,3789],"valid"],[[3790,3791],"disallowed"],[[3792,3801],"valid"],[[3802,3803],"disallowed"],[[3804,3804],"mapped",[3755,3737]],[[3805,3805],"mapped",[3755,3745]],[[3806,3807],"valid"],[[3808,3839],"disallowed"],[[3840,3840],"valid"],[[3841,3850],"valid",[],"NV8"],[[3851,3851],"valid"],[[3852,3852],"mapped",[3851]],[[3853,3863],"valid",[],"NV8"],[[3864,3865],"valid"],[[3866,3871],"valid",[],"NV8"],[[3872,3881],"valid"],[[3882,3892],"valid",[],"NV8"],[[3893,3893],"valid"],[[3894,3894],"valid",[],"NV8"],[[3895,3895],"valid"],[[3896,3896],"valid",[],"NV8"],[[3897,3897],"valid"],[[3898,3901],"valid",[],"NV8"],[[3902,3906],"valid"],[[3907,3907],"mapped",[3906,4023]],[[3908,3911],"valid"],[[3912,3912],"disallowed"],[[3913,3916],"valid"],[[3917,3917],"mapped",[3916,4023]],[[3918,3921],"valid"],[[3922,3922],"mapped",[3921,4023]],[[3923,3926],"valid"],[[3927,3927],"mapped",[3926,4023]],[[3928,3931],"valid"],[[3932,3932],"mapped",[3931,4023]],[[3933,3944],"valid"],[[3945,3945],"mapped",[3904,4021]],[[3946,3946],"valid"],[[3947,3948],"valid"],[[3949,3952],"disallowed"],[[3953,3954],"valid"],[[3955,3955],"mapped",[3953,3954]],[[3956,3956],"valid"],[[3957,3957],"mapped",[3953,3956]],[[3958,3958],"mapped",[4018,3968]],[[3959,3959],"mapped",[4018,3953,3968]],[[3960,3960],"mapped",[4019,3968]],[[3961,3961],"mapped",[4019,3953,3968]],[[3962,3968],"valid"],[[3969,3969],"mapped",[3953,3968]],[[3970,3972],"valid"],[[3973,3973],"valid",[],"NV8"],[[3974,3979],"valid"],[[3980,3983],"valid"],[[3984,3986],"valid"],[[3987,3987],"mapped",[3986,4023]],[[3988,3989],"valid"],[[3990,3990],"valid"],[[3991,3991],"valid"],[[3992,3992],"disallowed"],[[3993,3996],"valid"],[[3997,3997],"mapped",[3996,4023]],[[3998,4001],"valid"],[[4002,4002],"mapped",[4001,4023]],[[4003,4006],"valid"],[[4007,4007],"mapped",[4006,4023]],[[4008,4011],"valid"],[[4012,4012],"mapped",[4011,4023]],[[4013,4013],"valid"],[[4014,4016],"valid"],[[4017,4023],"valid"],[[4024,4024],"valid"],[[4025,4025],"mapped",[3984,4021]],[[4026,4028],"valid"],[[4029,4029],"disallowed"],[[4030,4037],"valid",[],"NV8"],[[4038,4038],"valid"],[[4039,4044],"valid",[],"NV8"],[[4045,4045],"disallowed"],[[4046,4046],"valid",[],"NV8"],[[4047,4047],"valid",[],"NV8"],[[4048,4049],"valid",[],"NV8"],[[4050,4052],"valid",[],"NV8"],[[4053,4056],"valid",[],"NV8"],[[4057,4058],"valid",[],"NV8"],[[4059,4095],"disallowed"],[[4096,4129],"valid"],[[4130,4130],"valid"],[[4131,4135],"valid"],[[4136,4136],"valid"],[[4137,4138],"valid"],[[4139,4139],"valid"],[[4140,4146],"valid"],[[4147,4149],"valid"],[[4150,4153],"valid"],[[4154,4159],"valid"],[[4160,4169],"valid"],[[4170,4175],"valid",[],"NV8"],[[4176,4185],"valid"],[[4186,4249],"valid"],[[4250,4253],"valid"],[[4254,4255],"valid",[],"NV8"],[[4256,4293],"disallowed"],[[4294,4294],"disallowed"],[[4295,4295],"mapped",[11559]],[[4296,4300],"disallowed"],[[4301,4301],"mapped",[11565]],[[4302,4303],"disallowed"],[[4304,4342],"valid"],[[4343,4344],"valid"],[[4345,4346],"valid"],[[4347,4347],"valid",[],"NV8"],[[4348,4348],"mapped",[4316]],[[4349,4351],"valid"],[[4352,4441],"valid",[],"NV8"],[[4442,4446],"valid",[],"NV8"],[[4447,4448],"disallowed"],[[4449,4514],"valid",[],"NV8"],[[4515,4519],"valid",[],"NV8"],[[4520,4601],"valid",[],"NV8"],[[4602,4607],"valid",[],"NV8"],[[4608,4614],"valid"],[[4615,4615],"valid"],[[4616,4678],"valid"],[[4679,4679],"valid"],[[4680,4680],"valid"],[[4681,4681],"disallowed"],[[4682,4685],"valid"],[[4686,4687],"disallowed"],[[4688,4694],"valid"],[[4695,4695],"disallowed"],[[4696,4696],"valid"],[[4697,4697],"disallowed"],[[4698,4701],"valid"],[[4702,4703],"disallowed"],[[4704,4742],"valid"],[[4743,4743],"valid"],[[4744,4744],"valid"],[[4745,4745],"disallowed"],[[4746,4749],"valid"],[[4750,4751],"disallowed"],[[4752,4782],"valid"],[[4783,4783],"valid"],[[4784,4784],"valid"],[[4785,4785],"disallowed"],[[4786,4789],"valid"],[[4790,4791],"disallowed"],[[4792,4798],"valid"],[[4799,4799],"disallowed"],[[4800,4800],"valid"],[[4801,4801],"disallowed"],[[4802,4805],"valid"],[[4806,4807],"disallowed"],[[4808,4814],"valid"],[[4815,4815],"valid"],[[4816,4822],"valid"],[[4823,4823],"disallowed"],[[4824,4846],"valid"],[[4847,4847],"valid"],[[4848,4878],"valid"],[[4879,4879],"valid"],[[4880,4880],"valid"],[[4881,4881],"disallowed"],[[4882,4885],"valid"],[[4886,4887],"disallowed"],[[4888,4894],"valid"],[[4895,4895],"valid"],[[4896,4934],"valid"],[[4935,4935],"valid"],[[4936,4954],"valid"],[[4955,4956],"disallowed"],[[4957,4958],"valid"],[[4959,4959],"valid"],[[4960,4960],"valid",[],"NV8"],[[4961,4988],"valid",[],"NV8"],[[4989,4991],"disallowed"],[[4992,5007],"valid"],[[5008,5017],"valid",[],"NV8"],[[5018,5023],"disallowed"],[[5024,5108],"valid"],[[5109,5109],"valid"],[[5110,5111],"disallowed"],[[5112,5112],"mapped",[5104]],[[5113,5113],"mapped",[5105]],[[5114,5114],"mapped",[5106]],[[5115,5115],"mapped",[5107]],[[5116,5116],"mapped",[5108]],[[5117,5117],"mapped",[5109]],[[5118,5119],"disallowed"],[[5120,5120],"valid",[],"NV8"],[[5121,5740],"valid"],[[5741,5742],"valid",[],"NV8"],[[5743,5750],"valid"],[[5751,5759],"valid"],[[5760,5760],"disallowed"],[[5761,5786],"valid"],[[5787,5788],"valid",[],"NV8"],[[5789,5791],"disallowed"],[[5792,5866],"valid"],[[5867,5872],"valid",[],"NV8"],[[5873,5880],"valid"],[[5881,5887],"disallowed"],[[5888,5900],"valid"],[[5901,5901],"disallowed"],[[5902,5908],"valid"],[[5909,5919],"disallowed"],[[5920,5940],"valid"],[[5941,5942],"valid",[],"NV8"],[[5943,5951],"disallowed"],[[5952,5971],"valid"],[[5972,5983],"disallowed"],[[5984,5996],"valid"],[[5997,5997],"disallowed"],[[5998,6000],"valid"],[[6001,6001],"disallowed"],[[6002,6003],"valid"],[[6004,6015],"disallowed"],[[6016,6067],"valid"],[[6068,6069],"disallowed"],[[6070,6099],"valid"],[[6100,6102],"valid",[],"NV8"],[[6103,6103],"valid"],[[6104,6107],"valid",[],"NV8"],[[6108,6108],"valid"],[[6109,6109],"valid"],[[6110,6111],"disallowed"],[[6112,6121],"valid"],[[6122,6127],"disallowed"],[[6128,6137],"valid",[],"NV8"],[[6138,6143],"disallowed"],[[6144,6149],"valid",[],"NV8"],[[6150,6150],"disallowed"],[[6151,6154],"valid",[],"NV8"],[[6155,6157],"ignored"],[[6158,6158],"disallowed"],[[6159,6159],"disallowed"],[[6160,6169],"valid"],[[6170,6175],"disallowed"],[[6176,6263],"valid"],[[6264,6271],"disallowed"],[[6272,6313],"valid"],[[6314,6314],"valid"],[[6315,6319],"disallowed"],[[6320,6389],"valid"],[[6390,6399],"disallowed"],[[6400,6428],"valid"],[[6429,6430],"valid"],[[6431,6431],"disallowed"],[[6432,6443],"valid"],[[6444,6447],"disallowed"],[[6448,6459],"valid"],[[6460,6463],"disallowed"],[[6464,6464],"valid",[],"NV8"],[[6465,6467],"disallowed"],[[6468,6469],"valid",[],"NV8"],[[6470,6509],"valid"],[[6510,6511],"disallowed"],[[6512,6516],"valid"],[[6517,6527],"disallowed"],[[6528,6569],"valid"],[[6570,6571],"valid"],[[6572,6575],"disallowed"],[[6576,6601],"valid"],[[6602,6607],"disallowed"],[[6608,6617],"valid"],[[6618,6618],"valid",[],"XV8"],[[6619,6621],"disallowed"],[[6622,6623],"valid",[],"NV8"],[[6624,6655],"valid",[],"NV8"],[[6656,6683],"valid"],[[6684,6685],"disallowed"],[[6686,6687],"valid",[],"NV8"],[[6688,6750],"valid"],[[6751,6751],"disallowed"],[[6752,6780],"valid"],[[6781,6782],"disallowed"],[[6783,6793],"valid"],[[6794,6799],"disallowed"],[[6800,6809],"valid"],[[6810,6815],"disallowed"],[[6816,6822],"valid",[],"NV8"],[[6823,6823],"valid"],[[6824,6829],"valid",[],"NV8"],[[6830,6831],"disallowed"],[[6832,6845],"valid"],[[6846,6846],"valid",[],"NV8"],[[6847,6911],"disallowed"],[[6912,6987],"valid"],[[6988,6991],"disallowed"],[[6992,7001],"valid"],[[7002,7018],"valid",[],"NV8"],[[7019,7027],"valid"],[[7028,7036],"valid",[],"NV8"],[[7037,7039],"disallowed"],[[7040,7082],"valid"],[[7083,7085],"valid"],[[7086,7097],"valid"],[[7098,7103],"valid"],[[7104,7155],"valid"],[[7156,7163],"disallowed"],[[7164,7167],"valid",[],"NV8"],[[7168,7223],"valid"],[[7224,7226],"disallowed"],[[7227,7231],"valid",[],"NV8"],[[7232,7241],"valid"],[[7242,7244],"disallowed"],[[7245,7293],"valid"],[[7294,7295],"valid",[],"NV8"],[[7296,7359],"disallowed"],[[7360,7367],"valid",[],"NV8"],[[7368,7375],"disallowed"],[[7376,7378],"valid"],[[7379,7379],"valid",[],"NV8"],[[7380,7410],"valid"],[[7411,7414],"valid"],[[7415,7415],"disallowed"],[[7416,7417],"valid"],[[7418,7423],"disallowed"],[[7424,7467],"valid"],[[7468,7468],"mapped",[97]],[[7469,7469],"mapped",[230]],[[7470,7470],"mapped",[98]],[[7471,7471],"valid"],[[7472,7472],"mapped",[100]],[[7473,7473],"mapped",[101]],[[7474,7474],"mapped",[477]],[[7475,7475],"mapped",[103]],[[7476,7476],"mapped",[104]],[[7477,7477],"mapped",[105]],[[7478,7478],"mapped",[106]],[[7479,7479],"mapped",[107]],[[7480,7480],"mapped",[108]],[[7481,7481],"mapped",[109]],[[7482,7482],"mapped",[110]],[[7483,7483],"valid"],[[7484,7484],"mapped",[111]],[[7485,7485],"mapped",[547]],[[7486,7486],"mapped",[112]],[[7487,7487],"mapped",[114]],[[7488,7488],"mapped",[116]],[[7489,7489],"mapped",[117]],[[7490,7490],"mapped",[119]],[[7491,7491],"mapped",[97]],[[7492,7492],"mapped",[592]],[[7493,7493],"mapped",[593]],[[7494,7494],"mapped",[7426]],[[7495,7495],"mapped",[98]],[[7496,7496],"mapped",[100]],[[7497,7497],"mapped",[101]],[[7498,7498],"mapped",[601]],[[7499,7499],"mapped",[603]],[[7500,7500],"mapped",[604]],[[7501,7501],"mapped",[103]],[[7502,7502],"valid"],[[7503,7503],"mapped",[107]],[[7504,7504],"mapped",[109]],[[7505,7505],"mapped",[331]],[[7506,7506],"mapped",[111]],[[7507,7507],"mapped",[596]],[[7508,7508],"mapped",[7446]],[[7509,7509],"mapped",[7447]],[[7510,7510],"mapped",[112]],[[7511,7511],"mapped",[116]],[[7512,7512],"mapped",[117]],[[7513,7513],"mapped",[7453]],[[7514,7514],"mapped",[623]],[[7515,7515],"mapped",[118]],[[7516,7516],"mapped",[7461]],[[7517,7517],"mapped",[946]],[[7518,7518],"mapped",[947]],[[7519,7519],"mapped",[948]],[[7520,7520],"mapped",[966]],[[7521,7521],"mapped",[967]],[[7522,7522],"mapped",[105]],[[7523,7523],"mapped",[114]],[[7524,7524],"mapped",[117]],[[7525,7525],"mapped",[118]],[[7526,7526],"mapped",[946]],[[7527,7527],"mapped",[947]],[[7528,7528],"mapped",[961]],[[7529,7529],"mapped",[966]],[[7530,7530],"mapped",[967]],[[7531,7531],"valid"],[[7532,7543],"valid"],[[7544,7544],"mapped",[1085]],[[7545,7578],"valid"],[[7579,7579],"mapped",[594]],[[7580,7580],"mapped",[99]],[[7581,7581],"mapped",[597]],[[7582,7582],"mapped",[240]],[[7583,7583],"mapped",[604]],[[7584,7584],"mapped",[102]],[[7585,7585],"mapped",[607]],[[7586,7586],"mapped",[609]],[[7587,7587],"mapped",[613]],[[7588,7588],"mapped",[616]],[[7589,7589],"mapped",[617]],[[7590,7590],"mapped",[618]],[[7591,7591],"mapped",[7547]],[[7592,7592],"mapped",[669]],[[7593,7593],"mapped",[621]],[[7594,7594],"mapped",[7557]],[[7595,7595],"mapped",[671]],[[7596,7596],"mapped",[625]],[[7597,7597],"mapped",[624]],[[7598,7598],"mapped",[626]],[[7599,7599],"mapped",[627]],[[7600,7600],"mapped",[628]],[[7601,7601],"mapped",[629]],[[7602,7602],"mapped",[632]],[[7603,7603],"mapped",[642]],[[7604,7604],"mapped",[643]],[[7605,7605],"mapped",[427]],[[7606,7606],"mapped",[649]],[[7607,7607],"mapped",[650]],[[7608,7608],"mapped",[7452]],[[7609,7609],"mapped",[651]],[[7610,7610],"mapped",[652]],[[7611,7611],"mapped",[122]],[[7612,7612],"mapped",[656]],[[7613,7613],"mapped",[657]],[[7614,7614],"mapped",[658]],[[7615,7615],"mapped",[952]],[[7616,7619],"valid"],[[7620,7626],"valid"],[[7627,7654],"valid"],[[7655,7669],"valid"],[[7670,7675],"disallowed"],[[7676,7676],"valid"],[[7677,7677],"valid"],[[7678,7679],"valid"],[[7680,7680],"mapped",[7681]],[[7681,7681],"valid"],[[7682,7682],"mapped",[7683]],[[7683,7683],"valid"],[[7684,7684],"mapped",[7685]],[[7685,7685],"valid"],[[7686,7686],"mapped",[7687]],[[7687,7687],"valid"],[[7688,7688],"mapped",[7689]],[[7689,7689],"valid"],[[7690,7690],"mapped",[7691]],[[7691,7691],"valid"],[[7692,7692],"mapped",[7693]],[[7693,7693],"valid"],[[7694,7694],"mapped",[7695]],[[7695,7695],"valid"],[[7696,7696],"mapped",[7697]],[[7697,7697],"valid"],[[7698,7698],"mapped",[7699]],[[7699,7699],"valid"],[[7700,7700],"mapped",[7701]],[[7701,7701],"valid"],[[7702,7702],"mapped",[7703]],[[7703,7703],"valid"],[[7704,7704],"mapped",[7705]],[[7705,7705],"valid"],[[7706,7706],"mapped",[7707]],[[7707,7707],"valid"],[[7708,7708],"mapped",[7709]],[[7709,7709],"valid"],[[7710,7710],"mapped",[7711]],[[7711,7711],"valid"],[[7712,7712],"mapped",[7713]],[[7713,7713],"valid"],[[7714,7714],"mapped",[7715]],[[7715,7715],"valid"],[[7716,7716],"mapped",[7717]],[[7717,7717],"valid"],[[7718,7718],"mapped",[7719]],[[7719,7719],"valid"],[[7720,7720],"mapped",[7721]],[[7721,7721],"valid"],[[7722,7722],"mapped",[7723]],[[7723,7723],"valid"],[[7724,7724],"mapped",[7725]],[[7725,7725],"valid"],[[7726,7726],"mapped",[7727]],[[7727,7727],"valid"],[[7728,7728],"mapped",[7729]],[[7729,7729],"valid"],[[7730,7730],"mapped",[7731]],[[7731,7731],"valid"],[[7732,7732],"mapped",[7733]],[[7733,7733],"valid"],[[7734,7734],"mapped",[7735]],[[7735,7735],"valid"],[[7736,7736],"mapped",[7737]],[[7737,7737],"valid"],[[7738,7738],"mapped",[7739]],[[7739,7739],"valid"],[[7740,7740],"mapped",[7741]],[[7741,7741],"valid"],[[7742,7742],"mapped",[7743]],[[7743,7743],"valid"],[[7744,7744],"mapped",[7745]],[[7745,7745],"valid"],[[7746,7746],"mapped",[7747]],[[7747,7747],"valid"],[[7748,7748],"mapped",[7749]],[[7749,7749],"valid"],[[7750,7750],"mapped",[7751]],[[7751,7751],"valid"],[[7752,7752],"mapped",[7753]],[[7753,7753],"valid"],[[7754,7754],"mapped",[7755]],[[7755,7755],"valid"],[[7756,7756],"mapped",[7757]],[[7757,7757],"valid"],[[7758,7758],"mapped",[7759]],[[7759,7759],"valid"],[[7760,7760],"mapped",[7761]],[[7761,7761],"valid"],[[7762,7762],"mapped",[7763]],[[7763,7763],"valid"],[[7764,7764],"mapped",[7765]],[[7765,7765],"valid"],[[7766,7766],"mapped",[7767]],[[7767,7767],"valid"],[[7768,7768],"mapped",[7769]],[[7769,7769],"valid"],[[7770,7770],"mapped",[7771]],[[7771,7771],"valid"],[[7772,7772],"mapped",[7773]],[[7773,7773],"valid"],[[7774,7774],"mapped",[7775]],[[7775,7775],"valid"],[[7776,7776],"mapped",[7777]],[[7777,7777],"valid"],[[7778,7778],"mapped",[7779]],[[7779,7779],"valid"],[[7780,7780],"mapped",[7781]],[[7781,7781],"valid"],[[7782,7782],"mapped",[7783]],[[7783,7783],"valid"],[[7784,7784],"mapped",[7785]],[[7785,7785],"valid"],[[7786,7786],"mapped",[7787]],[[7787,7787],"valid"],[[7788,7788],"mapped",[7789]],[[7789,7789],"valid"],[[7790,7790],"mapped",[7791]],[[7791,7791],"valid"],[[7792,7792],"mapped",[7793]],[[7793,7793],"valid"],[[7794,7794],"mapped",[7795]],[[7795,7795],"valid"],[[7796,7796],"mapped",[7797]],[[7797,7797],"valid"],[[7798,7798],"mapped",[7799]],[[7799,7799],"valid"],[[7800,7800],"mapped",[7801]],[[7801,7801],"valid"],[[7802,7802],"mapped",[7803]],[[7803,7803],"valid"],[[7804,7804],"mapped",[7805]],[[7805,7805],"valid"],[[7806,7806],"mapped",[7807]],[[7807,7807],"valid"],[[7808,7808],"mapped",[7809]],[[7809,7809],"valid"],[[7810,7810],"mapped",[7811]],[[7811,7811],"valid"],[[7812,7812],"mapped",[7813]],[[7813,7813],"valid"],[[7814,7814],"mapped",[7815]],[[7815,7815],"valid"],[[7816,7816],"mapped",[7817]],[[7817,7817],"valid"],[[7818,7818],"mapped",[7819]],[[7819,7819],"valid"],[[7820,7820],"mapped",[7821]],[[7821,7821],"valid"],[[7822,7822],"mapped",[7823]],[[7823,7823],"valid"],[[7824,7824],"mapped",[7825]],[[7825,7825],"valid"],[[7826,7826],"mapped",[7827]],[[7827,7827],"valid"],[[7828,7828],"mapped",[7829]],[[7829,7833],"valid"],[[7834,7834],"mapped",[97,702]],[[7835,7835],"mapped",[7777]],[[7836,7837],"valid"],[[7838,7838],"mapped",[115,115]],[[7839,7839],"valid"],[[7840,7840],"mapped",[7841]],[[7841,7841],"valid"],[[7842,7842],"mapped",[7843]],[[7843,7843],"valid"],[[7844,7844],"mapped",[7845]],[[7845,7845],"valid"],[[7846,7846],"mapped",[7847]],[[7847,7847],"valid"],[[7848,7848],"mapped",[7849]],[[7849,7849],"valid"],[[7850,7850],"mapped",[7851]],[[7851,7851],"valid"],[[7852,7852],"mapped",[7853]],[[7853,7853],"valid"],[[7854,7854],"mapped",[7855]],[[7855,7855],"valid"],[[7856,7856],"mapped",[7857]],[[7857,7857],"valid"],[[7858,7858],"mapped",[7859]],[[7859,7859],"valid"],[[7860,7860],"mapped",[7861]],[[7861,7861],"valid"],[[7862,7862],"mapped",[7863]],[[7863,7863],"valid"],[[7864,7864],"mapped",[7865]],[[7865,7865],"valid"],[[7866,7866],"mapped",[7867]],[[7867,7867],"valid"],[[7868,7868],"mapped",[7869]],[[7869,7869],"valid"],[[7870,7870],"mapped",[7871]],[[7871,7871],"valid"],[[7872,7872],"mapped",[7873]],[[7873,7873],"valid"],[[7874,7874],"mapped",[7875]],[[7875,7875],"valid"],[[7876,7876],"mapped",[7877]],[[7877,7877],"valid"],[[7878,7878],"mapped",[7879]],[[7879,7879],"valid"],[[7880,7880],"mapped",[7881]],[[7881,7881],"valid"],[[7882,7882],"mapped",[7883]],[[7883,7883],"valid"],[[7884,7884],"mapped",[7885]],[[7885,7885],"valid"],[[7886,7886],"mapped",[7887]],[[7887,7887],"valid"],[[7888,7888],"mapped",[7889]],[[7889,7889],"valid"],[[7890,7890],"mapped",[7891]],[[7891,7891],"valid"],[[7892,7892],"mapped",[7893]],[[7893,7893],"valid"],[[7894,7894],"mapped",[7895]],[[7895,7895],"valid"],[[7896,7896],"mapped",[7897]],[[7897,7897],"valid"],[[7898,7898],"mapped",[7899]],[[7899,7899],"valid"],[[7900,7900],"mapped",[7901]],[[7901,7901],"valid"],[[7902,7902],"mapped",[7903]],[[7903,7903],"valid"],[[7904,7904],"mapped",[7905]],[[7905,7905],"valid"],[[7906,7906],"mapped",[7907]],[[7907,7907],"valid"],[[7908,7908],"mapped",[7909]],[[7909,7909],"valid"],[[7910,7910],"mapped",[7911]],[[7911,7911],"valid"],[[7912,7912],"mapped",[7913]],[[7913,7913],"valid"],[[7914,7914],"mapped",[7915]],[[7915,7915],"valid"],[[7916,7916],"mapped",[7917]],[[7917,7917],"valid"],[[7918,7918],"mapped",[7919]],[[7919,7919],"valid"],[[7920,7920],"mapped",[7921]],[[7921,7921],"valid"],[[7922,7922],"mapped",[7923]],[[7923,7923],"valid"],[[7924,7924],"mapped",[7925]],[[7925,7925],"valid"],[[7926,7926],"mapped",[7927]],[[7927,7927],"valid"],[[7928,7928],"mapped",[7929]],[[7929,7929],"valid"],[[7930,7930],"mapped",[7931]],[[7931,7931],"valid"],[[7932,7932],"mapped",[7933]],[[7933,7933],"valid"],[[7934,7934],"mapped",[7935]],[[7935,7935],"valid"],[[7936,7943],"valid"],[[7944,7944],"mapped",[7936]],[[7945,7945],"mapped",[7937]],[[7946,7946],"mapped",[7938]],[[7947,7947],"mapped",[7939]],[[7948,7948],"mapped",[7940]],[[7949,7949],"mapped",[7941]],[[7950,7950],"mapped",[7942]],[[7951,7951],"mapped",[7943]],[[7952,7957],"valid"],[[7958,7959],"disallowed"],[[7960,7960],"mapped",[7952]],[[7961,7961],"mapped",[7953]],[[7962,7962],"mapped",[7954]],[[7963,7963],"mapped",[7955]],[[7964,7964],"mapped",[7956]],[[7965,7965],"mapped",[7957]],[[7966,7967],"disallowed"],[[7968,7975],"valid"],[[7976,7976],"mapped",[7968]],[[7977,7977],"mapped",[7969]],[[7978,7978],"mapped",[7970]],[[7979,7979],"mapped",[7971]],[[7980,7980],"mapped",[7972]],[[7981,7981],"mapped",[7973]],[[7982,7982],"mapped",[7974]],[[7983,7983],"mapped",[7975]],[[7984,7991],"valid"],[[7992,7992],"mapped",[7984]],[[7993,7993],"mapped",[7985]],[[7994,7994],"mapped",[7986]],[[7995,7995],"mapped",[7987]],[[7996,7996],"mapped",[7988]],[[7997,7997],"mapped",[7989]],[[7998,7998],"mapped",[7990]],[[7999,7999],"mapped",[7991]],[[8000,8005],"valid"],[[8006,8007],"disallowed"],[[8008,8008],"mapped",[8000]],[[8009,8009],"mapped",[8001]],[[8010,8010],"mapped",[8002]],[[8011,8011],"mapped",[8003]],[[8012,8012],"mapped",[8004]],[[8013,8013],"mapped",[8005]],[[8014,8015],"disallowed"],[[8016,8023],"valid"],[[8024,8024],"disallowed"],[[8025,8025],"mapped",[8017]],[[8026,8026],"disallowed"],[[8027,8027],"mapped",[8019]],[[8028,8028],"disallowed"],[[8029,8029],"mapped",[8021]],[[8030,8030],"disallowed"],[[8031,8031],"mapped",[8023]],[[8032,8039],"valid"],[[8040,8040],"mapped",[8032]],[[8041,8041],"mapped",[8033]],[[8042,8042],"mapped",[8034]],[[8043,8043],"mapped",[8035]],[[8044,8044],"mapped",[8036]],[[8045,8045],"mapped",[8037]],[[8046,8046],"mapped",[8038]],[[8047,8047],"mapped",[8039]],[[8048,8048],"valid"],[[8049,8049],"mapped",[940]],[[8050,8050],"valid"],[[8051,8051],"mapped",[941]],[[8052,8052],"valid"],[[8053,8053],"mapped",[942]],[[8054,8054],"valid"],[[8055,8055],"mapped",[943]],[[8056,8056],"valid"],[[8057,8057],"mapped",[972]],[[8058,8058],"valid"],[[8059,8059],"mapped",[973]],[[8060,8060],"valid"],[[8061,8061],"mapped",[974]],[[8062,8063],"disallowed"],[[8064,8064],"mapped",[7936,953]],[[8065,8065],"mapped",[7937,953]],[[8066,8066],"mapped",[7938,953]],[[8067,8067],"mapped",[7939,953]],[[8068,8068],"mapped",[7940,953]],[[8069,8069],"mapped",[7941,953]],[[8070,8070],"mapped",[7942,953]],[[8071,8071],"mapped",[7943,953]],[[8072,8072],"mapped",[7936,953]],[[8073,8073],"mapped",[7937,953]],[[8074,8074],"mapped",[7938,953]],[[8075,8075],"mapped",[7939,953]],[[8076,8076],"mapped",[7940,953]],[[8077,8077],"mapped",[7941,953]],[[8078,8078],"mapped",[7942,953]],[[8079,8079],"mapped",[7943,953]],[[8080,8080],"mapped",[7968,953]],[[8081,8081],"mapped",[7969,953]],[[8082,8082],"mapped",[7970,953]],[[8083,8083],"mapped",[7971,953]],[[8084,8084],"mapped",[7972,953]],[[8085,8085],"mapped",[7973,953]],[[8086,8086],"mapped",[7974,953]],[[8087,8087],"mapped",[7975,953]],[[8088,8088],"mapped",[7968,953]],[[8089,8089],"mapped",[7969,953]],[[8090,8090],"mapped",[7970,953]],[[8091,8091],"mapped",[7971,953]],[[8092,8092],"mapped",[7972,953]],[[8093,8093],"mapped",[7973,953]],[[8094,8094],"mapped",[7974,953]],[[8095,8095],"mapped",[7975,953]],[[8096,8096],"mapped",[8032,953]],[[8097,8097],"mapped",[8033,953]],[[8098,8098],"mapped",[8034,953]],[[8099,8099],"mapped",[8035,953]],[[8100,8100],"mapped",[8036,953]],[[8101,8101],"mapped",[8037,953]],[[8102,8102],"mapped",[8038,953]],[[8103,8103],"mapped",[8039,953]],[[8104,8104],"mapped",[8032,953]],[[8105,8105],"mapped",[8033,953]],[[8106,8106],"mapped",[8034,953]],[[8107,8107],"mapped",[8035,953]],[[8108,8108],"mapped",[8036,953]],[[8109,8109],"mapped",[8037,953]],[[8110,8110],"mapped",[8038,953]],[[8111,8111],"mapped",[8039,953]],[[8112,8113],"valid"],[[8114,8114],"mapped",[8048,953]],[[8115,8115],"mapped",[945,953]],[[8116,8116],"mapped",[940,953]],[[8117,8117],"disallowed"],[[8118,8118],"valid"],[[8119,8119],"mapped",[8118,953]],[[8120,8120],"mapped",[8112]],[[8121,8121],"mapped",[8113]],[[8122,8122],"mapped",[8048]],[[8123,8123],"mapped",[940]],[[8124,8124],"mapped",[945,953]],[[8125,8125],"disallowed_STD3_mapped",[32,787]],[[8126,8126],"mapped",[953]],[[8127,8127],"disallowed_STD3_mapped",[32,787]],[[8128,8128],"disallowed_STD3_mapped",[32,834]],[[8129,8129],"disallowed_STD3_mapped",[32,776,834]],[[8130,8130],"mapped",[8052,953]],[[8131,8131],"mapped",[951,953]],[[8132,8132],"mapped",[942,953]],[[8133,8133],"disallowed"],[[8134,8134],"valid"],[[8135,8135],"mapped",[8134,953]],[[8136,8136],"mapped",[8050]],[[8137,8137],"mapped",[941]],[[8138,8138],"mapped",[8052]],[[8139,8139],"mapped",[942]],[[8140,8140],"mapped",[951,953]],[[8141,8141],"disallowed_STD3_mapped",[32,787,768]],[[8142,8142],"disallowed_STD3_mapped",[32,787,769]],[[8143,8143],"disallowed_STD3_mapped",[32,787,834]],[[8144,8146],"valid"],[[8147,8147],"mapped",[912]],[[8148,8149],"disallowed"],[[8150,8151],"valid"],[[8152,8152],"mapped",[8144]],[[8153,8153],"mapped",[8145]],[[8154,8154],"mapped",[8054]],[[8155,8155],"mapped",[943]],[[8156,8156],"disallowed"],[[8157,8157],"disallowed_STD3_mapped",[32,788,768]],[[8158,8158],"disallowed_STD3_mapped",[32,788,769]],[[8159,8159],"disallowed_STD3_mapped",[32,788,834]],[[8160,8162],"valid"],[[8163,8163],"mapped",[944]],[[8164,8167],"valid"],[[8168,8168],"mapped",[8160]],[[8169,8169],"mapped",[8161]],[[8170,8170],"mapped",[8058]],[[8171,8171],"mapped",[973]],[[8172,8172],"mapped",[8165]],[[8173,8173],"disallowed_STD3_mapped",[32,776,768]],[[8174,8174],"disallowed_STD3_mapped",[32,776,769]],[[8175,8175],"disallowed_STD3_mapped",[96]],[[8176,8177],"disallowed"],[[8178,8178],"mapped",[8060,953]],[[8179,8179],"mapped",[969,953]],[[8180,8180],"mapped",[974,953]],[[8181,8181],"disallowed"],[[8182,8182],"valid"],[[8183,8183],"mapped",[8182,953]],[[8184,8184],"mapped",[8056]],[[8185,8185],"mapped",[972]],[[8186,8186],"mapped",[8060]],[[8187,8187],"mapped",[974]],[[8188,8188],"mapped",[969,953]],[[8189,8189],"disallowed_STD3_mapped",[32,769]],[[8190,8190],"disallowed_STD3_mapped",[32,788]],[[8191,8191],"disallowed"],[[8192,8202],"disallowed_STD3_mapped",[32]],[[8203,8203],"ignored"],[[8204,8205],"deviation",[]],[[8206,8207],"disallowed"],[[8208,8208],"valid",[],"NV8"],[[8209,8209],"mapped",[8208]],[[8210,8214],"valid",[],"NV8"],[[8215,8215],"disallowed_STD3_mapped",[32,819]],[[8216,8227],"valid",[],"NV8"],[[8228,8230],"disallowed"],[[8231,8231],"valid",[],"NV8"],[[8232,8238],"disallowed"],[[8239,8239],"disallowed_STD3_mapped",[32]],[[8240,8242],"valid",[],"NV8"],[[8243,8243],"mapped",[8242,8242]],[[8244,8244],"mapped",[8242,8242,8242]],[[8245,8245],"valid",[],"NV8"],[[8246,8246],"mapped",[8245,8245]],[[8247,8247],"mapped",[8245,8245,8245]],[[8248,8251],"valid",[],"NV8"],[[8252,8252],"disallowed_STD3_mapped",[33,33]],[[8253,8253],"valid",[],"NV8"],[[8254,8254],"disallowed_STD3_mapped",[32,773]],[[8255,8262],"valid",[],"NV8"],[[8263,8263],"disallowed_STD3_mapped",[63,63]],[[8264,8264],"disallowed_STD3_mapped",[63,33]],[[8265,8265],"disallowed_STD3_mapped",[33,63]],[[8266,8269],"valid",[],"NV8"],[[8270,8274],"valid",[],"NV8"],[[8275,8276],"valid",[],"NV8"],[[8277,8278],"valid",[],"NV8"],[[8279,8279],"mapped",[8242,8242,8242,8242]],[[8280,8286],"valid",[],"NV8"],[[8287,8287],"disallowed_STD3_mapped",[32]],[[8288,8288],"ignored"],[[8289,8291],"disallowed"],[[8292,8292],"ignored"],[[8293,8293],"disallowed"],[[8294,8297],"disallowed"],[[8298,8303],"disallowed"],[[8304,8304],"mapped",[48]],[[8305,8305],"mapped",[105]],[[8306,8307],"disallowed"],[[8308,8308],"mapped",[52]],[[8309,8309],"mapped",[53]],[[8310,8310],"mapped",[54]],[[8311,8311],"mapped",[55]],[[8312,8312],"mapped",[56]],[[8313,8313],"mapped",[57]],[[8314,8314],"disallowed_STD3_mapped",[43]],[[8315,8315],"mapped",[8722]],[[8316,8316],"disallowed_STD3_mapped",[61]],[[8317,8317],"disallowed_STD3_mapped",[40]],[[8318,8318],"disallowed_STD3_mapped",[41]],[[8319,8319],"mapped",[110]],[[8320,8320],"mapped",[48]],[[8321,8321],"mapped",[49]],[[8322,8322],"mapped",[50]],[[8323,8323],"mapped",[51]],[[8324,8324],"mapped",[52]],[[8325,8325],"mapped",[53]],[[8326,8326],"mapped",[54]],[[8327,8327],"mapped",[55]],[[8328,8328],"mapped",[56]],[[8329,8329],"mapped",[57]],[[8330,8330],"disallowed_STD3_mapped",[43]],[[8331,8331],"mapped",[8722]],[[8332,8332],"disallowed_STD3_mapped",[61]],[[8333,8333],"disallowed_STD3_mapped",[40]],[[8334,8334],"disallowed_STD3_mapped",[41]],[[8335,8335],"disallowed"],[[8336,8336],"mapped",[97]],[[8337,8337],"mapped",[101]],[[8338,8338],"mapped",[111]],[[8339,8339],"mapped",[120]],[[8340,8340],"mapped",[601]],[[8341,8341],"mapped",[104]],[[8342,8342],"mapped",[107]],[[8343,8343],"mapped",[108]],[[8344,8344],"mapped",[109]],[[8345,8345],"mapped",[110]],[[8346,8346],"mapped",[112]],[[8347,8347],"mapped",[115]],[[8348,8348],"mapped",[116]],[[8349,8351],"disallowed"],[[8352,8359],"valid",[],"NV8"],[[8360,8360],"mapped",[114,115]],[[8361,8362],"valid",[],"NV8"],[[8363,8363],"valid",[],"NV8"],[[8364,8364],"valid",[],"NV8"],[[8365,8367],"valid",[],"NV8"],[[8368,8369],"valid",[],"NV8"],[[8370,8373],"valid",[],"NV8"],[[8374,8376],"valid",[],"NV8"],[[8377,8377],"valid",[],"NV8"],[[8378,8378],"valid",[],"NV8"],[[8379,8381],"valid",[],"NV8"],[[8382,8382],"valid",[],"NV8"],[[8383,8399],"disallowed"],[[8400,8417],"valid",[],"NV8"],[[8418,8419],"valid",[],"NV8"],[[8420,8426],"valid",[],"NV8"],[[8427,8427],"valid",[],"NV8"],[[8428,8431],"valid",[],"NV8"],[[8432,8432],"valid",[],"NV8"],[[8433,8447],"disallowed"],[[8448,8448],"disallowed_STD3_mapped",[97,47,99]],[[8449,8449],"disallowed_STD3_mapped",[97,47,115]],[[8450,8450],"mapped",[99]],[[8451,8451],"mapped",[176,99]],[[8452,8452],"valid",[],"NV8"],[[8453,8453],"disallowed_STD3_mapped",[99,47,111]],[[8454,8454],"disallowed_STD3_mapped",[99,47,117]],[[8455,8455],"mapped",[603]],[[8456,8456],"valid",[],"NV8"],[[8457,8457],"mapped",[176,102]],[[8458,8458],"mapped",[103]],[[8459,8462],"mapped",[104]],[[8463,8463],"mapped",[295]],[[8464,8465],"mapped",[105]],[[8466,8467],"mapped",[108]],[[8468,8468],"valid",[],"NV8"],[[8469,8469],"mapped",[110]],[[8470,8470],"mapped",[110,111]],[[8471,8472],"valid",[],"NV8"],[[8473,8473],"mapped",[112]],[[8474,8474],"mapped",[113]],[[8475,8477],"mapped",[114]],[[8478,8479],"valid",[],"NV8"],[[8480,8480],"mapped",[115,109]],[[8481,8481],"mapped",[116,101,108]],[[8482,8482],"mapped",[116,109]],[[8483,8483],"valid",[],"NV8"],[[8484,8484],"mapped",[122]],[[8485,8485],"valid",[],"NV8"],[[8486,8486],"mapped",[969]],[[8487,8487],"valid",[],"NV8"],[[8488,8488],"mapped",[122]],[[8489,8489],"valid",[],"NV8"],[[8490,8490],"mapped",[107]],[[8491,8491],"mapped",[229]],[[8492,8492],"mapped",[98]],[[8493,8493],"mapped",[99]],[[8494,8494],"valid",[],"NV8"],[[8495,8496],"mapped",[101]],[[8497,8497],"mapped",[102]],[[8498,8498],"disallowed"],[[8499,8499],"mapped",[109]],[[8500,8500],"mapped",[111]],[[8501,8501],"mapped",[1488]],[[8502,8502],"mapped",[1489]],[[8503,8503],"mapped",[1490]],[[8504,8504],"mapped",[1491]],[[8505,8505],"mapped",[105]],[[8506,8506],"valid",[],"NV8"],[[8507,8507],"mapped",[102,97,120]],[[8508,8508],"mapped",[960]],[[8509,8510],"mapped",[947]],[[8511,8511],"mapped",[960]],[[8512,8512],"mapped",[8721]],[[8513,8516],"valid",[],"NV8"],[[8517,8518],"mapped",[100]],[[8519,8519],"mapped",[101]],[[8520,8520],"mapped",[105]],[[8521,8521],"mapped",[106]],[[8522,8523],"valid",[],"NV8"],[[8524,8524],"valid",[],"NV8"],[[8525,8525],"valid",[],"NV8"],[[8526,8526],"valid"],[[8527,8527],"valid",[],"NV8"],[[8528,8528],"mapped",[49,8260,55]],[[8529,8529],"mapped",[49,8260,57]],[[8530,8530],"mapped",[49,8260,49,48]],[[8531,8531],"mapped",[49,8260,51]],[[8532,8532],"mapped",[50,8260,51]],[[8533,8533],"mapped",[49,8260,53]],[[8534,8534],"mapped",[50,8260,53]],[[8535,8535],"mapped",[51,8260,53]],[[8536,8536],"mapped",[52,8260,53]],[[8537,8537],"mapped",[49,8260,54]],[[8538,8538],"mapped",[53,8260,54]],[[8539,8539],"mapped",[49,8260,56]],[[8540,8540],"mapped",[51,8260,56]],[[8541,8541],"mapped",[53,8260,56]],[[8542,8542],"mapped",[55,8260,56]],[[8543,8543],"mapped",[49,8260]],[[8544,8544],"mapped",[105]],[[8545,8545],"mapped",[105,105]],[[8546,8546],"mapped",[105,105,105]],[[8547,8547],"mapped",[105,118]],[[8548,8548],"mapped",[118]],[[8549,8549],"mapped",[118,105]],[[8550,8550],"mapped",[118,105,105]],[[8551,8551],"mapped",[118,105,105,105]],[[8552,8552],"mapped",[105,120]],[[8553,8553],"mapped",[120]],[[8554,8554],"mapped",[120,105]],[[8555,8555],"mapped",[120,105,105]],[[8556,8556],"mapped",[108]],[[8557,8557],"mapped",[99]],[[8558,8558],"mapped",[100]],[[8559,8559],"mapped",[109]],[[8560,8560],"mapped",[105]],[[8561,8561],"mapped",[105,105]],[[8562,8562],"mapped",[105,105,105]],[[8563,8563],"mapped",[105,118]],[[8564,8564],"mapped",[118]],[[8565,8565],"mapped",[118,105]],[[8566,8566],"mapped",[118,105,105]],[[8567,8567],"mapped",[118,105,105,105]],[[8568,8568],"mapped",[105,120]],[[8569,8569],"mapped",[120]],[[8570,8570],"mapped",[120,105]],[[8571,8571],"mapped",[120,105,105]],[[8572,8572],"mapped",[108]],[[8573,8573],"mapped",[99]],[[8574,8574],"mapped",[100]],[[8575,8575],"mapped",[109]],[[8576,8578],"valid",[],"NV8"],[[8579,8579],"disallowed"],[[8580,8580],"valid"],[[8581,8584],"valid",[],"NV8"],[[8585,8585],"mapped",[48,8260,51]],[[8586,8587],"valid",[],"NV8"],[[8588,8591],"disallowed"],[[8592,8682],"valid",[],"NV8"],[[8683,8691],"valid",[],"NV8"],[[8692,8703],"valid",[],"NV8"],[[8704,8747],"valid",[],"NV8"],[[8748,8748],"mapped",[8747,8747]],[[8749,8749],"mapped",[8747,8747,8747]],[[8750,8750],"valid",[],"NV8"],[[8751,8751],"mapped",[8750,8750]],[[8752,8752],"mapped",[8750,8750,8750]],[[8753,8799],"valid",[],"NV8"],[[8800,8800],"disallowed_STD3_valid"],[[8801,8813],"valid",[],"NV8"],[[8814,8815],"disallowed_STD3_valid"],[[8816,8945],"valid",[],"NV8"],[[8946,8959],"valid",[],"NV8"],[[8960,8960],"valid",[],"NV8"],[[8961,8961],"valid",[],"NV8"],[[8962,9000],"valid",[],"NV8"],[[9001,9001],"mapped",[12296]],[[9002,9002],"mapped",[12297]],[[9003,9082],"valid",[],"NV8"],[[9083,9083],"valid",[],"NV8"],[[9084,9084],"valid",[],"NV8"],[[9085,9114],"valid",[],"NV8"],[[9115,9166],"valid",[],"NV8"],[[9167,9168],"valid",[],"NV8"],[[9169,9179],"valid",[],"NV8"],[[9180,9191],"valid",[],"NV8"],[[9192,9192],"valid",[],"NV8"],[[9193,9203],"valid",[],"NV8"],[[9204,9210],"valid",[],"NV8"],[[9211,9215],"disallowed"],[[9216,9252],"valid",[],"NV8"],[[9253,9254],"valid",[],"NV8"],[[9255,9279],"disallowed"],[[9280,9290],"valid",[],"NV8"],[[9291,9311],"disallowed"],[[9312,9312],"mapped",[49]],[[9313,9313],"mapped",[50]],[[9314,9314],"mapped",[51]],[[9315,9315],"mapped",[52]],[[9316,9316],"mapped",[53]],[[9317,9317],"mapped",[54]],[[9318,9318],"mapped",[55]],[[9319,9319],"mapped",[56]],[[9320,9320],"mapped",[57]],[[9321,9321],"mapped",[49,48]],[[9322,9322],"mapped",[49,49]],[[9323,9323],"mapped",[49,50]],[[9324,9324],"mapped",[49,51]],[[9325,9325],"mapped",[49,52]],[[9326,9326],"mapped",[49,53]],[[9327,9327],"mapped",[49,54]],[[9328,9328],"mapped",[49,55]],[[9329,9329],"mapped",[49,56]],[[9330,9330],"mapped",[49,57]],[[9331,9331],"mapped",[50,48]],[[9332,9332],"disallowed_STD3_mapped",[40,49,41]],[[9333,9333],"disallowed_STD3_mapped",[40,50,41]],[[9334,9334],"disallowed_STD3_mapped",[40,51,41]],[[9335,9335],"disallowed_STD3_mapped",[40,52,41]],[[9336,9336],"disallowed_STD3_mapped",[40,53,41]],[[9337,9337],"disallowed_STD3_mapped",[40,54,41]],[[9338,9338],"disallowed_STD3_mapped",[40,55,41]],[[9339,9339],"disallowed_STD3_mapped",[40,56,41]],[[9340,9340],"disallowed_STD3_mapped",[40,57,41]],[[9341,9341],"disallowed_STD3_mapped",[40,49,48,41]],[[9342,9342],"disallowed_STD3_mapped",[40,49,49,41]],[[9343,9343],"disallowed_STD3_mapped",[40,49,50,41]],[[9344,9344],"disallowed_STD3_mapped",[40,49,51,41]],[[9345,9345],"disallowed_STD3_mapped",[40,49,52,41]],[[9346,9346],"disallowed_STD3_mapped",[40,49,53,41]],[[9347,9347],"disallowed_STD3_mapped",[40,49,54,41]],[[9348,9348],"disallowed_STD3_mapped",[40,49,55,41]],[[9349,9349],"disallowed_STD3_mapped",[40,49,56,41]],[[9350,9350],"disallowed_STD3_mapped",[40,49,57,41]],[[9351,9351],"disallowed_STD3_mapped",[40,50,48,41]],[[9352,9371],"disallowed"],[[9372,9372],"disallowed_STD3_mapped",[40,97,41]],[[9373,9373],"disallowed_STD3_mapped",[40,98,41]],[[9374,9374],"disallowed_STD3_mapped",[40,99,41]],[[9375,9375],"disallowed_STD3_mapped",[40,100,41]],[[9376,9376],"disallowed_STD3_mapped",[40,101,41]],[[9377,9377],"disallowed_STD3_mapped",[40,102,41]],[[9378,9378],"disallowed_STD3_mapped",[40,103,41]],[[9379,9379],"disallowed_STD3_mapped",[40,104,41]],[[9380,9380],"disallowed_STD3_mapped",[40,105,41]],[[9381,9381],"disallowed_STD3_mapped",[40,106,41]],[[9382,9382],"disallowed_STD3_mapped",[40,107,41]],[[9383,9383],"disallowed_STD3_mapped",[40,108,41]],[[9384,9384],"disallowed_STD3_mapped",[40,109,41]],[[9385,9385],"disallowed_STD3_mapped",[40,110,41]],[[9386,9386],"disallowed_STD3_mapped",[40,111,41]],[[9387,9387],"disallowed_STD3_mapped",[40,112,41]],[[9388,9388],"disallowed_STD3_mapped",[40,113,41]],[[9389,9389],"disallowed_STD3_mapped",[40,114,41]],[[9390,9390],"disallowed_STD3_mapped",[40,115,41]],[[9391,9391],"disallowed_STD3_mapped",[40,116,41]],[[9392,9392],"disallowed_STD3_mapped",[40,117,41]],[[9393,9393],"disallowed_STD3_mapped",[40,118,41]],[[9394,9394],"disallowed_STD3_mapped",[40,119,41]],[[9395,9395],"disallowed_STD3_mapped",[40,120,41]],[[9396,9396],"disallowed_STD3_mapped",[40,121,41]],[[9397,9397],"disallowed_STD3_mapped",[40,122,41]],[[9398,9398],"mapped",[97]],[[9399,9399],"mapped",[98]],[[9400,9400],"mapped",[99]],[[9401,9401],"mapped",[100]],[[9402,9402],"mapped",[101]],[[9403,9403],"mapped",[102]],[[9404,9404],"mapped",[103]],[[9405,9405],"mapped",[104]],[[9406,9406],"mapped",[105]],[[9407,9407],"mapped",[106]],[[9408,9408],"mapped",[107]],[[9409,9409],"mapped",[108]],[[9410,9410],"mapped",[109]],[[9411,9411],"mapped",[110]],[[9412,9412],"mapped",[111]],[[9413,9413],"mapped",[112]],[[9414,9414],"mapped",[113]],[[9415,9415],"mapped",[114]],[[9416,9416],"mapped",[115]],[[9417,9417],"mapped",[116]],[[9418,9418],"mapped",[117]],[[9419,9419],"mapped",[118]],[[9420,9420],"mapped",[119]],[[9421,9421],"mapped",[120]],[[9422,9422],"mapped",[121]],[[9423,9423],"mapped",[122]],[[9424,9424],"mapped",[97]],[[9425,9425],"mapped",[98]],[[9426,9426],"mapped",[99]],[[9427,9427],"mapped",[100]],[[9428,9428],"mapped",[101]],[[9429,9429],"mapped",[102]],[[9430,9430],"mapped",[103]],[[9431,9431],"mapped",[104]],[[9432,9432],"mapped",[105]],[[9433,9433],"mapped",[106]],[[9434,9434],"mapped",[107]],[[9435,9435],"mapped",[108]],[[9436,9436],"mapped",[109]],[[9437,9437],"mapped",[110]],[[9438,9438],"mapped",[111]],[[9439,9439],"mapped",[112]],[[9440,9440],"mapped",[113]],[[9441,9441],"mapped",[114]],[[9442,9442],"mapped",[115]],[[9443,9443],"mapped",[116]],[[9444,9444],"mapped",[117]],[[9445,9445],"mapped",[118]],[[9446,9446],"mapped",[119]],[[9447,9447],"mapped",[120]],[[9448,9448],"mapped",[121]],[[9449,9449],"mapped",[122]],[[9450,9450],"mapped",[48]],[[9451,9470],"valid",[],"NV8"],[[9471,9471],"valid",[],"NV8"],[[9472,9621],"valid",[],"NV8"],[[9622,9631],"valid",[],"NV8"],[[9632,9711],"valid",[],"NV8"],[[9712,9719],"valid",[],"NV8"],[[9720,9727],"valid",[],"NV8"],[[9728,9747],"valid",[],"NV8"],[[9748,9749],"valid",[],"NV8"],[[9750,9751],"valid",[],"NV8"],[[9752,9752],"valid",[],"NV8"],[[9753,9753],"valid",[],"NV8"],[[9754,9839],"valid",[],"NV8"],[[9840,9841],"valid",[],"NV8"],[[9842,9853],"valid",[],"NV8"],[[9854,9855],"valid",[],"NV8"],[[9856,9865],"valid",[],"NV8"],[[9866,9873],"valid",[],"NV8"],[[9874,9884],"valid",[],"NV8"],[[9885,9885],"valid",[],"NV8"],[[9886,9887],"valid",[],"NV8"],[[9888,9889],"valid",[],"NV8"],[[9890,9905],"valid",[],"NV8"],[[9906,9906],"valid",[],"NV8"],[[9907,9916],"valid",[],"NV8"],[[9917,9919],"valid",[],"NV8"],[[9920,9923],"valid",[],"NV8"],[[9924,9933],"valid",[],"NV8"],[[9934,9934],"valid",[],"NV8"],[[9935,9953],"valid",[],"NV8"],[[9954,9954],"valid",[],"NV8"],[[9955,9955],"valid",[],"NV8"],[[9956,9959],"valid",[],"NV8"],[[9960,9983],"valid",[],"NV8"],[[9984,9984],"valid",[],"NV8"],[[9985,9988],"valid",[],"NV8"],[[9989,9989],"valid",[],"NV8"],[[9990,9993],"valid",[],"NV8"],[[9994,9995],"valid",[],"NV8"],[[9996,10023],"valid",[],"NV8"],[[10024,10024],"valid",[],"NV8"],[[10025,10059],"valid",[],"NV8"],[[10060,10060],"valid",[],"NV8"],[[10061,10061],"valid",[],"NV8"],[[10062,10062],"valid",[],"NV8"],[[10063,10066],"valid",[],"NV8"],[[10067,10069],"valid",[],"NV8"],[[10070,10070],"valid",[],"NV8"],[[10071,10071],"valid",[],"NV8"],[[10072,10078],"valid",[],"NV8"],[[10079,10080],"valid",[],"NV8"],[[10081,10087],"valid",[],"NV8"],[[10088,10101],"valid",[],"NV8"],[[10102,10132],"valid",[],"NV8"],[[10133,10135],"valid",[],"NV8"],[[10136,10159],"valid",[],"NV8"],[[10160,10160],"valid",[],"NV8"],[[10161,10174],"valid",[],"NV8"],[[10175,10175],"valid",[],"NV8"],[[10176,10182],"valid",[],"NV8"],[[10183,10186],"valid",[],"NV8"],[[10187,10187],"valid",[],"NV8"],[[10188,10188],"valid",[],"NV8"],[[10189,10189],"valid",[],"NV8"],[[10190,10191],"valid",[],"NV8"],[[10192,10219],"valid",[],"NV8"],[[10220,10223],"valid",[],"NV8"],[[10224,10239],"valid",[],"NV8"],[[10240,10495],"valid",[],"NV8"],[[10496,10763],"valid",[],"NV8"],[[10764,10764],"mapped",[8747,8747,8747,8747]],[[10765,10867],"valid",[],"NV8"],[[10868,10868],"disallowed_STD3_mapped",[58,58,61]],[[10869,10869],"disallowed_STD3_mapped",[61,61]],[[10870,10870],"disallowed_STD3_mapped",[61,61,61]],[[10871,10971],"valid",[],"NV8"],[[10972,10972],"mapped",[10973,824]],[[10973,11007],"valid",[],"NV8"],[[11008,11021],"valid",[],"NV8"],[[11022,11027],"valid",[],"NV8"],[[11028,11034],"valid",[],"NV8"],[[11035,11039],"valid",[],"NV8"],[[11040,11043],"valid",[],"NV8"],[[11044,11084],"valid",[],"NV8"],[[11085,11087],"valid",[],"NV8"],[[11088,11092],"valid",[],"NV8"],[[11093,11097],"valid",[],"NV8"],[[11098,11123],"valid",[],"NV8"],[[11124,11125],"disallowed"],[[11126,11157],"valid",[],"NV8"],[[11158,11159],"disallowed"],[[11160,11193],"valid",[],"NV8"],[[11194,11196],"disallowed"],[[11197,11208],"valid",[],"NV8"],[[11209,11209],"disallowed"],[[11210,11217],"valid",[],"NV8"],[[11218,11243],"disallowed"],[[11244,11247],"valid",[],"NV8"],[[11248,11263],"disallowed"],[[11264,11264],"mapped",[11312]],[[11265,11265],"mapped",[11313]],[[11266,11266],"mapped",[11314]],[[11267,11267],"mapped",[11315]],[[11268,11268],"mapped",[11316]],[[11269,11269],"mapped",[11317]],[[11270,11270],"mapped",[11318]],[[11271,11271],"mapped",[11319]],[[11272,11272],"mapped",[11320]],[[11273,11273],"mapped",[11321]],[[11274,11274],"mapped",[11322]],[[11275,11275],"mapped",[11323]],[[11276,11276],"mapped",[11324]],[[11277,11277],"mapped",[11325]],[[11278,11278],"mapped",[11326]],[[11279,11279],"mapped",[11327]],[[11280,11280],"mapped",[11328]],[[11281,11281],"mapped",[11329]],[[11282,11282],"mapped",[11330]],[[11283,11283],"mapped",[11331]],[[11284,11284],"mapped",[11332]],[[11285,11285],"mapped",[11333]],[[11286,11286],"mapped",[11334]],[[11287,11287],"mapped",[11335]],[[11288,11288],"mapped",[11336]],[[11289,11289],"mapped",[11337]],[[11290,11290],"mapped",[11338]],[[11291,11291],"mapped",[11339]],[[11292,11292],"mapped",[11340]],[[11293,11293],"mapped",[11341]],[[11294,11294],"mapped",[11342]],[[11295,11295],"mapped",[11343]],[[11296,11296],"mapped",[11344]],[[11297,11297],"mapped",[11345]],[[11298,11298],"mapped",[11346]],[[11299,11299],"mapped",[11347]],[[11300,11300],"mapped",[11348]],[[11301,11301],"mapped",[11349]],[[11302,11302],"mapped",[11350]],[[11303,11303],"mapped",[11351]],[[11304,11304],"mapped",[11352]],[[11305,11305],"mapped",[11353]],[[11306,11306],"mapped",[11354]],[[11307,11307],"mapped",[11355]],[[11308,11308],"mapped",[11356]],[[11309,11309],"mapped",[11357]],[[11310,11310],"mapped",[11358]],[[11311,11311],"disallowed"],[[11312,11358],"valid"],[[11359,11359],"disallowed"],[[11360,11360],"mapped",[11361]],[[11361,11361],"valid"],[[11362,11362],"mapped",[619]],[[11363,11363],"mapped",[7549]],[[11364,11364],"mapped",[637]],[[11365,11366],"valid"],[[11367,11367],"mapped",[11368]],[[11368,11368],"valid"],[[11369,11369],"mapped",[11370]],[[11370,11370],"valid"],[[11371,11371],"mapped",[11372]],[[11372,11372],"valid"],[[11373,11373],"mapped",[593]],[[11374,11374],"mapped",[625]],[[11375,11375],"mapped",[592]],[[11376,11376],"mapped",[594]],[[11377,11377],"valid"],[[11378,11378],"mapped",[11379]],[[11379,11379],"valid"],[[11380,11380],"valid"],[[11381,11381],"mapped",[11382]],[[11382,11383],"valid"],[[11384,11387],"valid"],[[11388,11388],"mapped",[106]],[[11389,11389],"mapped",[118]],[[11390,11390],"mapped",[575]],[[11391,11391],"mapped",[576]],[[11392,11392],"mapped",[11393]],[[11393,11393],"valid"],[[11394,11394],"mapped",[11395]],[[11395,11395],"valid"],[[11396,11396],"mapped",[11397]],[[11397,11397],"valid"],[[11398,11398],"mapped",[11399]],[[11399,11399],"valid"],[[11400,11400],"mapped",[11401]],[[11401,11401],"valid"],[[11402,11402],"mapped",[11403]],[[11403,11403],"valid"],[[11404,11404],"mapped",[11405]],[[11405,11405],"valid"],[[11406,11406],"mapped",[11407]],[[11407,11407],"valid"],[[11408,11408],"mapped",[11409]],[[11409,11409],"valid"],[[11410,11410],"mapped",[11411]],[[11411,11411],"valid"],[[11412,11412],"mapped",[11413]],[[11413,11413],"valid"],[[11414,11414],"mapped",[11415]],[[11415,11415],"valid"],[[11416,11416],"mapped",[11417]],[[11417,11417],"valid"],[[11418,11418],"mapped",[11419]],[[11419,11419],"valid"],[[11420,11420],"mapped",[11421]],[[11421,11421],"valid"],[[11422,11422],"mapped",[11423]],[[11423,11423],"valid"],[[11424,11424],"mapped",[11425]],[[11425,11425],"valid"],[[11426,11426],"mapped",[11427]],[[11427,11427],"valid"],[[11428,11428],"mapped",[11429]],[[11429,11429],"valid"],[[11430,11430],"mapped",[11431]],[[11431,11431],"valid"],[[11432,11432],"mapped",[11433]],[[11433,11433],"valid"],[[11434,11434],"mapped",[11435]],[[11435,11435],"valid"],[[11436,11436],"mapped",[11437]],[[11437,11437],"valid"],[[11438,11438],"mapped",[11439]],[[11439,11439],"valid"],[[11440,11440],"mapped",[11441]],[[11441,11441],"valid"],[[11442,11442],"mapped",[11443]],[[11443,11443],"valid"],[[11444,11444],"mapped",[11445]],[[11445,11445],"valid"],[[11446,11446],"mapped",[11447]],[[11447,11447],"valid"],[[11448,11448],"mapped",[11449]],[[11449,11449],"valid"],[[11450,11450],"mapped",[11451]],[[11451,11451],"valid"],[[11452,11452],"mapped",[11453]],[[11453,11453],"valid"],[[11454,11454],"mapped",[11455]],[[11455,11455],"valid"],[[11456,11456],"mapped",[11457]],[[11457,11457],"valid"],[[11458,11458],"mapped",[11459]],[[11459,11459],"valid"],[[11460,11460],"mapped",[11461]],[[11461,11461],"valid"],[[11462,11462],"mapped",[11463]],[[11463,11463],"valid"],[[11464,11464],"mapped",[11465]],[[11465,11465],"valid"],[[11466,11466],"mapped",[11467]],[[11467,11467],"valid"],[[11468,11468],"mapped",[11469]],[[11469,11469],"valid"],[[11470,11470],"mapped",[11471]],[[11471,11471],"valid"],[[11472,11472],"mapped",[11473]],[[11473,11473],"valid"],[[11474,11474],"mapped",[11475]],[[11475,11475],"valid"],[[11476,11476],"mapped",[11477]],[[11477,11477],"valid"],[[11478,11478],"mapped",[11479]],[[11479,11479],"valid"],[[11480,11480],"mapped",[11481]],[[11481,11481],"valid"],[[11482,11482],"mapped",[11483]],[[11483,11483],"valid"],[[11484,11484],"mapped",[11485]],[[11485,11485],"valid"],[[11486,11486],"mapped",[11487]],[[11487,11487],"valid"],[[11488,11488],"mapped",[11489]],[[11489,11489],"valid"],[[11490,11490],"mapped",[11491]],[[11491,11492],"valid"],[[11493,11498],"valid",[],"NV8"],[[11499,11499],"mapped",[11500]],[[11500,11500],"valid"],[[11501,11501],"mapped",[11502]],[[11502,11505],"valid"],[[11506,11506],"mapped",[11507]],[[11507,11507],"valid"],[[11508,11512],"disallowed"],[[11513,11519],"valid",[],"NV8"],[[11520,11557],"valid"],[[11558,11558],"disallowed"],[[11559,11559],"valid"],[[11560,11564],"disallowed"],[[11565,11565],"valid"],[[11566,11567],"disallowed"],[[11568,11621],"valid"],[[11622,11623],"valid"],[[11624,11630],"disallowed"],[[11631,11631],"mapped",[11617]],[[11632,11632],"valid",[],"NV8"],[[11633,11646],"disallowed"],[[11647,11647],"valid"],[[11648,11670],"valid"],[[11671,11679],"disallowed"],[[11680,11686],"valid"],[[11687,11687],"disallowed"],[[11688,11694],"valid"],[[11695,11695],"disallowed"],[[11696,11702],"valid"],[[11703,11703],"disallowed"],[[11704,11710],"valid"],[[11711,11711],"disallowed"],[[11712,11718],"valid"],[[11719,11719],"disallowed"],[[11720,11726],"valid"],[[11727,11727],"disallowed"],[[11728,11734],"valid"],[[11735,11735],"disallowed"],[[11736,11742],"valid"],[[11743,11743],"disallowed"],[[11744,11775],"valid"],[[11776,11799],"valid",[],"NV8"],[[11800,11803],"valid",[],"NV8"],[[11804,11805],"valid",[],"NV8"],[[11806,11822],"valid",[],"NV8"],[[11823,11823],"valid"],[[11824,11824],"valid",[],"NV8"],[[11825,11825],"valid",[],"NV8"],[[11826,11835],"valid",[],"NV8"],[[11836,11842],"valid",[],"NV8"],[[11843,11903],"disallowed"],[[11904,11929],"valid",[],"NV8"],[[11930,11930],"disallowed"],[[11931,11934],"valid",[],"NV8"],[[11935,11935],"mapped",[27597]],[[11936,12018],"valid",[],"NV8"],[[12019,12019],"mapped",[40863]],[[12020,12031],"disallowed"],[[12032,12032],"mapped",[19968]],[[12033,12033],"mapped",[20008]],[[12034,12034],"mapped",[20022]],[[12035,12035],"mapped",[20031]],[[12036,12036],"mapped",[20057]],[[12037,12037],"mapped",[20101]],[[12038,12038],"mapped",[20108]],[[12039,12039],"mapped",[20128]],[[12040,12040],"mapped",[20154]],[[12041,12041],"mapped",[20799]],[[12042,12042],"mapped",[20837]],[[12043,12043],"mapped",[20843]],[[12044,12044],"mapped",[20866]],[[12045,12045],"mapped",[20886]],[[12046,12046],"mapped",[20907]],[[12047,12047],"mapped",[20960]],[[12048,12048],"mapped",[20981]],[[12049,12049],"mapped",[20992]],[[12050,12050],"mapped",[21147]],[[12051,12051],"mapped",[21241]],[[12052,12052],"mapped",[21269]],[[12053,12053],"mapped",[21274]],[[12054,12054],"mapped",[21304]],[[12055,12055],"mapped",[21313]],[[12056,12056],"mapped",[21340]],[[12057,12057],"mapped",[21353]],[[12058,12058],"mapped",[21378]],[[12059,12059],"mapped",[21430]],[[12060,12060],"mapped",[21448]],[[12061,12061],"mapped",[21475]],[[12062,12062],"mapped",[22231]],[[12063,12063],"mapped",[22303]],[[12064,12064],"mapped",[22763]],[[12065,12065],"mapped",[22786]],[[12066,12066],"mapped",[22794]],[[12067,12067],"mapped",[22805]],[[12068,12068],"mapped",[22823]],[[12069,12069],"mapped",[22899]],[[12070,12070],"mapped",[23376]],[[12071,12071],"mapped",[23424]],[[12072,12072],"mapped",[23544]],[[12073,12073],"mapped",[23567]],[[12074,12074],"mapped",[23586]],[[12075,12075],"mapped",[23608]],[[12076,12076],"mapped",[23662]],[[12077,12077],"mapped",[23665]],[[12078,12078],"mapped",[24027]],[[12079,12079],"mapped",[24037]],[[12080,12080],"mapped",[24049]],[[12081,12081],"mapped",[24062]],[[12082,12082],"mapped",[24178]],[[12083,12083],"mapped",[24186]],[[12084,12084],"mapped",[24191]],[[12085,12085],"mapped",[24308]],[[12086,12086],"mapped",[24318]],[[12087,12087],"mapped",[24331]],[[12088,12088],"mapped",[24339]],[[12089,12089],"mapped",[24400]],[[12090,12090],"mapped",[24417]],[[12091,12091],"mapped",[24435]],[[12092,12092],"mapped",[24515]],[[12093,12093],"mapped",[25096]],[[12094,12094],"mapped",[25142]],[[12095,12095],"mapped",[25163]],[[12096,12096],"mapped",[25903]],[[12097,12097],"mapped",[25908]],[[12098,12098],"mapped",[25991]],[[12099,12099],"mapped",[26007]],[[12100,12100],"mapped",[26020]],[[12101,12101],"mapped",[26041]],[[12102,12102],"mapped",[26080]],[[12103,12103],"mapped",[26085]],[[12104,12104],"mapped",[26352]],[[12105,12105],"mapped",[26376]],[[12106,12106],"mapped",[26408]],[[12107,12107],"mapped",[27424]],[[12108,12108],"mapped",[27490]],[[12109,12109],"mapped",[27513]],[[12110,12110],"mapped",[27571]],[[12111,12111],"mapped",[27595]],[[12112,12112],"mapped",[27604]],[[12113,12113],"mapped",[27611]],[[12114,12114],"mapped",[27663]],[[12115,12115],"mapped",[27668]],[[12116,12116],"mapped",[27700]],[[12117,12117],"mapped",[28779]],[[12118,12118],"mapped",[29226]],[[12119,12119],"mapped",[29238]],[[12120,12120],"mapped",[29243]],[[12121,12121],"mapped",[29247]],[[12122,12122],"mapped",[29255]],[[12123,12123],"mapped",[29273]],[[12124,12124],"mapped",[29275]],[[12125,12125],"mapped",[29356]],[[12126,12126],"mapped",[29572]],[[12127,12127],"mapped",[29577]],[[12128,12128],"mapped",[29916]],[[12129,12129],"mapped",[29926]],[[12130,12130],"mapped",[29976]],[[12131,12131],"mapped",[29983]],[[12132,12132],"mapped",[29992]],[[12133,12133],"mapped",[30000]],[[12134,12134],"mapped",[30091]],[[12135,12135],"mapped",[30098]],[[12136,12136],"mapped",[30326]],[[12137,12137],"mapped",[30333]],[[12138,12138],"mapped",[30382]],[[12139,12139],"mapped",[30399]],[[12140,12140],"mapped",[30446]],[[12141,12141],"mapped",[30683]],[[12142,12142],"mapped",[30690]],[[12143,12143],"mapped",[30707]],[[12144,12144],"mapped",[31034]],[[12145,12145],"mapped",[31160]],[[12146,12146],"mapped",[31166]],[[12147,12147],"mapped",[31348]],[[12148,12148],"mapped",[31435]],[[12149,12149],"mapped",[31481]],[[12150,12150],"mapped",[31859]],[[12151,12151],"mapped",[31992]],[[12152,12152],"mapped",[32566]],[[12153,12153],"mapped",[32593]],[[12154,12154],"mapped",[32650]],[[12155,12155],"mapped",[32701]],[[12156,12156],"mapped",[32769]],[[12157,12157],"mapped",[32780]],[[12158,12158],"mapped",[32786]],[[12159,12159],"mapped",[32819]],[[12160,12160],"mapped",[32895]],[[12161,12161],"mapped",[32905]],[[12162,12162],"mapped",[33251]],[[12163,12163],"mapped",[33258]],[[12164,12164],"mapped",[33267]],[[12165,12165],"mapped",[33276]],[[12166,12166],"mapped",[33292]],[[12167,12167],"mapped",[33307]],[[12168,12168],"mapped",[33311]],[[12169,12169],"mapped",[33390]],[[12170,12170],"mapped",[33394]],[[12171,12171],"mapped",[33400]],[[12172,12172],"mapped",[34381]],[[12173,12173],"mapped",[34411]],[[12174,12174],"mapped",[34880]],[[12175,12175],"mapped",[34892]],[[12176,12176],"mapped",[34915]],[[12177,12177],"mapped",[35198]],[[12178,12178],"mapped",[35211]],[[12179,12179],"mapped",[35282]],[[12180,12180],"mapped",[35328]],[[12181,12181],"mapped",[35895]],[[12182,12182],"mapped",[35910]],[[12183,12183],"mapped",[35925]],[[12184,12184],"mapped",[35960]],[[12185,12185],"mapped",[35997]],[[12186,12186],"mapped",[36196]],[[12187,12187],"mapped",[36208]],[[12188,12188],"mapped",[36275]],[[12189,12189],"mapped",[36523]],[[12190,12190],"mapped",[36554]],[[12191,12191],"mapped",[36763]],[[12192,12192],"mapped",[36784]],[[12193,12193],"mapped",[36789]],[[12194,12194],"mapped",[37009]],[[12195,12195],"mapped",[37193]],[[12196,12196],"mapped",[37318]],[[12197,12197],"mapped",[37324]],[[12198,12198],"mapped",[37329]],[[12199,12199],"mapped",[38263]],[[12200,12200],"mapped",[38272]],[[12201,12201],"mapped",[38428]],[[12202,12202],"mapped",[38582]],[[12203,12203],"mapped",[38585]],[[12204,12204],"mapped",[38632]],[[12205,12205],"mapped",[38737]],[[12206,12206],"mapped",[38750]],[[12207,12207],"mapped",[38754]],[[12208,12208],"mapped",[38761]],[[12209,12209],"mapped",[38859]],[[12210,12210],"mapped",[38893]],[[12211,12211],"mapped",[38899]],[[12212,12212],"mapped",[38913]],[[12213,12213],"mapped",[39080]],[[12214,12214],"mapped",[39131]],[[12215,12215],"mapped",[39135]],[[12216,12216],"mapped",[39318]],[[12217,12217],"mapped",[39321]],[[12218,12218],"mapped",[39340]],[[12219,12219],"mapped",[39592]],[[12220,12220],"mapped",[39640]],[[12221,12221],"mapped",[39647]],[[12222,12222],"mapped",[39717]],[[12223,12223],"mapped",[39727]],[[12224,12224],"mapped",[39730]],[[12225,12225],"mapped",[39740]],[[12226,12226],"mapped",[39770]],[[12227,12227],"mapped",[40165]],[[12228,12228],"mapped",[40565]],[[12229,12229],"mapped",[40575]],[[12230,12230],"mapped",[40613]],[[12231,12231],"mapped",[40635]],[[12232,12232],"mapped",[40643]],[[12233,12233],"mapped",[40653]],[[12234,12234],"mapped",[40657]],[[12235,12235],"mapped",[40697]],[[12236,12236],"mapped",[40701]],[[12237,12237],"mapped",[40718]],[[12238,12238],"mapped",[40723]],[[12239,12239],"mapped",[40736]],[[12240,12240],"mapped",[40763]],[[12241,12241],"mapped",[40778]],[[12242,12242],"mapped",[40786]],[[12243,12243],"mapped",[40845]],[[12244,12244],"mapped",[40860]],[[12245,12245],"mapped",[40864]],[[12246,12271],"disallowed"],[[12272,12283],"disallowed"],[[12284,12287],"disallowed"],[[12288,12288],"disallowed_STD3_mapped",[32]],[[12289,12289],"valid",[],"NV8"],[[12290,12290],"mapped",[46]],[[12291,12292],"valid",[],"NV8"],[[12293,12295],"valid"],[[12296,12329],"valid",[],"NV8"],[[12330,12333],"valid"],[[12334,12341],"valid",[],"NV8"],[[12342,12342],"mapped",[12306]],[[12343,12343],"valid",[],"NV8"],[[12344,12344],"mapped",[21313]],[[12345,12345],"mapped",[21316]],[[12346,12346],"mapped",[21317]],[[12347,12347],"valid",[],"NV8"],[[12348,12348],"valid"],[[12349,12349],"valid",[],"NV8"],[[12350,12350],"valid",[],"NV8"],[[12351,12351],"valid",[],"NV8"],[[12352,12352],"disallowed"],[[12353,12436],"valid"],[[12437,12438],"valid"],[[12439,12440],"disallowed"],[[12441,12442],"valid"],[[12443,12443],"disallowed_STD3_mapped",[32,12441]],[[12444,12444],"disallowed_STD3_mapped",[32,12442]],[[12445,12446],"valid"],[[12447,12447],"mapped",[12424,12426]],[[12448,12448],"valid",[],"NV8"],[[12449,12542],"valid"],[[12543,12543],"mapped",[12467,12488]],[[12544,12548],"disallowed"],[[12549,12588],"valid"],[[12589,12589],"valid"],[[12590,12592],"disallowed"],[[12593,12593],"mapped",[4352]],[[12594,12594],"mapped",[4353]],[[12595,12595],"mapped",[4522]],[[12596,12596],"mapped",[4354]],[[12597,12597],"mapped",[4524]],[[12598,12598],"mapped",[4525]],[[12599,12599],"mapped",[4355]],[[12600,12600],"mapped",[4356]],[[12601,12601],"mapped",[4357]],[[12602,12602],"mapped",[4528]],[[12603,12603],"mapped",[4529]],[[12604,12604],"mapped",[4530]],[[12605,12605],"mapped",[4531]],[[12606,12606],"mapped",[4532]],[[12607,12607],"mapped",[4533]],[[12608,12608],"mapped",[4378]],[[12609,12609],"mapped",[4358]],[[12610,12610],"mapped",[4359]],[[12611,12611],"mapped",[4360]],[[12612,12612],"mapped",[4385]],[[12613,12613],"mapped",[4361]],[[12614,12614],"mapped",[4362]],[[12615,12615],"mapped",[4363]],[[12616,12616],"mapped",[4364]],[[12617,12617],"mapped",[4365]],[[12618,12618],"mapped",[4366]],[[12619,12619],"mapped",[4367]],[[12620,12620],"mapped",[4368]],[[12621,12621],"mapped",[4369]],[[12622,12622],"mapped",[4370]],[[12623,12623],"mapped",[4449]],[[12624,12624],"mapped",[4450]],[[12625,12625],"mapped",[4451]],[[12626,12626],"mapped",[4452]],[[12627,12627],"mapped",[4453]],[[12628,12628],"mapped",[4454]],[[12629,12629],"mapped",[4455]],[[12630,12630],"mapped",[4456]],[[12631,12631],"mapped",[4457]],[[12632,12632],"mapped",[4458]],[[12633,12633],"mapped",[4459]],[[12634,12634],"mapped",[4460]],[[12635,12635],"mapped",[4461]],[[12636,12636],"mapped",[4462]],[[12637,12637],"mapped",[4463]],[[12638,12638],"mapped",[4464]],[[12639,12639],"mapped",[4465]],[[12640,12640],"mapped",[4466]],[[12641,12641],"mapped",[4467]],[[12642,12642],"mapped",[4468]],[[12643,12643],"mapped",[4469]],[[12644,12644],"disallowed"],[[12645,12645],"mapped",[4372]],[[12646,12646],"mapped",[4373]],[[12647,12647],"mapped",[4551]],[[12648,12648],"mapped",[4552]],[[12649,12649],"mapped",[4556]],[[12650,12650],"mapped",[4558]],[[12651,12651],"mapped",[4563]],[[12652,12652],"mapped",[4567]],[[12653,12653],"mapped",[4569]],[[12654,12654],"mapped",[4380]],[[12655,12655],"mapped",[4573]],[[12656,12656],"mapped",[4575]],[[12657,12657],"mapped",[4381]],[[12658,12658],"mapped",[4382]],[[12659,12659],"mapped",[4384]],[[12660,12660],"mapped",[4386]],[[12661,12661],"mapped",[4387]],[[12662,12662],"mapped",[4391]],[[12663,12663],"mapped",[4393]],[[12664,12664],"mapped",[4395]],[[12665,12665],"mapped",[4396]],[[12666,12666],"mapped",[4397]],[[12667,12667],"mapped",[4398]],[[12668,12668],"mapped",[4399]],[[12669,12669],"mapped",[4402]],[[12670,12670],"mapped",[4406]],[[12671,12671],"mapped",[4416]],[[12672,12672],"mapped",[4423]],[[12673,12673],"mapped",[4428]],[[12674,12674],"mapped",[4593]],[[12675,12675],"mapped",[4594]],[[12676,12676],"mapped",[4439]],[[12677,12677],"mapped",[4440]],[[12678,12678],"mapped",[4441]],[[12679,12679],"mapped",[4484]],[[12680,12680],"mapped",[4485]],[[12681,12681],"mapped",[4488]],[[12682,12682],"mapped",[4497]],[[12683,12683],"mapped",[4498]],[[12684,12684],"mapped",[4500]],[[12685,12685],"mapped",[4510]],[[12686,12686],"mapped",[4513]],[[12687,12687],"disallowed"],[[12688,12689],"valid",[],"NV8"],[[12690,12690],"mapped",[19968]],[[12691,12691],"mapped",[20108]],[[12692,12692],"mapped",[19977]],[[12693,12693],"mapped",[22235]],[[12694,12694],"mapped",[19978]],[[12695,12695],"mapped",[20013]],[[12696,12696],"mapped",[19979]],[[12697,12697],"mapped",[30002]],[[12698,12698],"mapped",[20057]],[[12699,12699],"mapped",[19993]],[[12700,12700],"mapped",[19969]],[[12701,12701],"mapped",[22825]],[[12702,12702],"mapped",[22320]],[[12703,12703],"mapped",[20154]],[[12704,12727],"valid"],[[12728,12730],"valid"],[[12731,12735],"disallowed"],[[12736,12751],"valid",[],"NV8"],[[12752,12771],"valid",[],"NV8"],[[12772,12783],"disallowed"],[[12784,12799],"valid"],[[12800,12800],"disallowed_STD3_mapped",[40,4352,41]],[[12801,12801],"disallowed_STD3_mapped",[40,4354,41]],[[12802,12802],"disallowed_STD3_mapped",[40,4355,41]],[[12803,12803],"disallowed_STD3_mapped",[40,4357,41]],[[12804,12804],"disallowed_STD3_mapped",[40,4358,41]],[[12805,12805],"disallowed_STD3_mapped",[40,4359,41]],[[12806,12806],"disallowed_STD3_mapped",[40,4361,41]],[[12807,12807],"disallowed_STD3_mapped",[40,4363,41]],[[12808,12808],"disallowed_STD3_mapped",[40,4364,41]],[[12809,12809],"disallowed_STD3_mapped",[40,4366,41]],[[12810,12810],"disallowed_STD3_mapped",[40,4367,41]],[[12811,12811],"disallowed_STD3_mapped",[40,4368,41]],[[12812,12812],"disallowed_STD3_mapped",[40,4369,41]],[[12813,12813],"disallowed_STD3_mapped",[40,4370,41]],[[12814,12814],"disallowed_STD3_mapped",[40,44032,41]],[[12815,12815],"disallowed_STD3_mapped",[40,45208,41]],[[12816,12816],"disallowed_STD3_mapped",[40,45796,41]],[[12817,12817],"disallowed_STD3_mapped",[40,46972,41]],[[12818,12818],"disallowed_STD3_mapped",[40,47560,41]],[[12819,12819],"disallowed_STD3_mapped",[40,48148,41]],[[12820,12820],"disallowed_STD3_mapped",[40,49324,41]],[[12821,12821],"disallowed_STD3_mapped",[40,50500,41]],[[12822,12822],"disallowed_STD3_mapped",[40,51088,41]],[[12823,12823],"disallowed_STD3_mapped",[40,52264,41]],[[12824,12824],"disallowed_STD3_mapped",[40,52852,41]],[[12825,12825],"disallowed_STD3_mapped",[40,53440,41]],[[12826,12826],"disallowed_STD3_mapped",[40,54028,41]],[[12827,12827],"disallowed_STD3_mapped",[40,54616,41]],[[12828,12828],"disallowed_STD3_mapped",[40,51452,41]],[[12829,12829],"disallowed_STD3_mapped",[40,50724,51204,41]],[[12830,12830],"disallowed_STD3_mapped",[40,50724,54980,41]],[[12831,12831],"disallowed"],[[12832,12832],"disallowed_STD3_mapped",[40,19968,41]],[[12833,12833],"disallowed_STD3_mapped",[40,20108,41]],[[12834,12834],"disallowed_STD3_mapped",[40,19977,41]],[[12835,12835],"disallowed_STD3_mapped",[40,22235,41]],[[12836,12836],"disallowed_STD3_mapped",[40,20116,41]],[[12837,12837],"disallowed_STD3_mapped",[40,20845,41]],[[12838,12838],"disallowed_STD3_mapped",[40,19971,41]],[[12839,12839],"disallowed_STD3_mapped",[40,20843,41]],[[12840,12840],"disallowed_STD3_mapped",[40,20061,41]],[[12841,12841],"disallowed_STD3_mapped",[40,21313,41]],[[12842,12842],"disallowed_STD3_mapped",[40,26376,41]],[[12843,12843],"disallowed_STD3_mapped",[40,28779,41]],[[12844,12844],"disallowed_STD3_mapped",[40,27700,41]],[[12845,12845],"disallowed_STD3_mapped",[40,26408,41]],[[12846,12846],"disallowed_STD3_mapped",[40,37329,41]],[[12847,12847],"disallowed_STD3_mapped",[40,22303,41]],[[12848,12848],"disallowed_STD3_mapped",[40,26085,41]],[[12849,12849],"disallowed_STD3_mapped",[40,26666,41]],[[12850,12850],"disallowed_STD3_mapped",[40,26377,41]],[[12851,12851],"disallowed_STD3_mapped",[40,31038,41]],[[12852,12852],"disallowed_STD3_mapped",[40,21517,41]],[[12853,12853],"disallowed_STD3_mapped",[40,29305,41]],[[12854,12854],"disallowed_STD3_mapped",[40,36001,41]],[[12855,12855],"disallowed_STD3_mapped",[40,31069,41]],[[12856,12856],"disallowed_STD3_mapped",[40,21172,41]],[[12857,12857],"disallowed_STD3_mapped",[40,20195,41]],[[12858,12858],"disallowed_STD3_mapped",[40,21628,41]],[[12859,12859],"disallowed_STD3_mapped",[40,23398,41]],[[12860,12860],"disallowed_STD3_mapped",[40,30435,41]],[[12861,12861],"disallowed_STD3_mapped",[40,20225,41]],[[12862,12862],"disallowed_STD3_mapped",[40,36039,41]],[[12863,12863],"disallowed_STD3_mapped",[40,21332,41]],[[12864,12864],"disallowed_STD3_mapped",[40,31085,41]],[[12865,12865],"disallowed_STD3_mapped",[40,20241,41]],[[12866,12866],"disallowed_STD3_mapped",[40,33258,41]],[[12867,12867],"disallowed_STD3_mapped",[40,33267,41]],[[12868,12868],"mapped",[21839]],[[12869,12869],"mapped",[24188]],[[12870,12870],"mapped",[25991]],[[12871,12871],"mapped",[31631]],[[12872,12879],"valid",[],"NV8"],[[12880,12880],"mapped",[112,116,101]],[[12881,12881],"mapped",[50,49]],[[12882,12882],"mapped",[50,50]],[[12883,12883],"mapped",[50,51]],[[12884,12884],"mapped",[50,52]],[[12885,12885],"mapped",[50,53]],[[12886,12886],"mapped",[50,54]],[[12887,12887],"mapped",[50,55]],[[12888,12888],"mapped",[50,56]],[[12889,12889],"mapped",[50,57]],[[12890,12890],"mapped",[51,48]],[[12891,12891],"mapped",[51,49]],[[12892,12892],"mapped",[51,50]],[[12893,12893],"mapped",[51,51]],[[12894,12894],"mapped",[51,52]],[[12895,12895],"mapped",[51,53]],[[12896,12896],"mapped",[4352]],[[12897,12897],"mapped",[4354]],[[12898,12898],"mapped",[4355]],[[12899,12899],"mapped",[4357]],[[12900,12900],"mapped",[4358]],[[12901,12901],"mapped",[4359]],[[12902,12902],"mapped",[4361]],[[12903,12903],"mapped",[4363]],[[12904,12904],"mapped",[4364]],[[12905,12905],"mapped",[4366]],[[12906,12906],"mapped",[4367]],[[12907,12907],"mapped",[4368]],[[12908,12908],"mapped",[4369]],[[12909,12909],"mapped",[4370]],[[12910,12910],"mapped",[44032]],[[12911,12911],"mapped",[45208]],[[12912,12912],"mapped",[45796]],[[12913,12913],"mapped",[46972]],[[12914,12914],"mapped",[47560]],[[12915,12915],"mapped",[48148]],[[12916,12916],"mapped",[49324]],[[12917,12917],"mapped",[50500]],[[12918,12918],"mapped",[51088]],[[12919,12919],"mapped",[52264]],[[12920,12920],"mapped",[52852]],[[12921,12921],"mapped",[53440]],[[12922,12922],"mapped",[54028]],[[12923,12923],"mapped",[54616]],[[12924,12924],"mapped",[52280,44256]],[[12925,12925],"mapped",[51452,51032]],[[12926,12926],"mapped",[50864]],[[12927,12927],"valid",[],"NV8"],[[12928,12928],"mapped",[19968]],[[12929,12929],"mapped",[20108]],[[12930,12930],"mapped",[19977]],[[12931,12931],"mapped",[22235]],[[12932,12932],"mapped",[20116]],[[12933,12933],"mapped",[20845]],[[12934,12934],"mapped",[19971]],[[12935,12935],"mapped",[20843]],[[12936,12936],"mapped",[20061]],[[12937,12937],"mapped",[21313]],[[12938,12938],"mapped",[26376]],[[12939,12939],"mapped",[28779]],[[12940,12940],"mapped",[27700]],[[12941,12941],"mapped",[26408]],[[12942,12942],"mapped",[37329]],[[12943,12943],"mapped",[22303]],[[12944,12944],"mapped",[26085]],[[12945,12945],"mapped",[26666]],[[12946,12946],"mapped",[26377]],[[12947,12947],"mapped",[31038]],[[12948,12948],"mapped",[21517]],[[12949,12949],"mapped",[29305]],[[12950,12950],"mapped",[36001]],[[12951,12951],"mapped",[31069]],[[12952,12952],"mapped",[21172]],[[12953,12953],"mapped",[31192]],[[12954,12954],"mapped",[30007]],[[12955,12955],"mapped",[22899]],[[12956,12956],"mapped",[36969]],[[12957,12957],"mapped",[20778]],[[12958,12958],"mapped",[21360]],[[12959,12959],"mapped",[27880]],[[12960,12960],"mapped",[38917]],[[12961,12961],"mapped",[20241]],[[12962,12962],"mapped",[20889]],[[12963,12963],"mapped",[27491]],[[12964,12964],"mapped",[19978]],[[12965,12965],"mapped",[20013]],[[12966,12966],"mapped",[19979]],[[12967,12967],"mapped",[24038]],[[12968,12968],"mapped",[21491]],[[12969,12969],"mapped",[21307]],[[12970,12970],"mapped",[23447]],[[12971,12971],"mapped",[23398]],[[12972,12972],"mapped",[30435]],[[12973,12973],"mapped",[20225]],[[12974,12974],"mapped",[36039]],[[12975,12975],"mapped",[21332]],[[12976,12976],"mapped",[22812]],[[12977,12977],"mapped",[51,54]],[[12978,12978],"mapped",[51,55]],[[12979,12979],"mapped",[51,56]],[[12980,12980],"mapped",[51,57]],[[12981,12981],"mapped",[52,48]],[[12982,12982],"mapped",[52,49]],[[12983,12983],"mapped",[52,50]],[[12984,12984],"mapped",[52,51]],[[12985,12985],"mapped",[52,52]],[[12986,12986],"mapped",[52,53]],[[12987,12987],"mapped",[52,54]],[[12988,12988],"mapped",[52,55]],[[12989,12989],"mapped",[52,56]],[[12990,12990],"mapped",[52,57]],[[12991,12991],"mapped",[53,48]],[[12992,12992],"mapped",[49,26376]],[[12993,12993],"mapped",[50,26376]],[[12994,12994],"mapped",[51,26376]],[[12995,12995],"mapped",[52,26376]],[[12996,12996],"mapped",[53,26376]],[[12997,12997],"mapped",[54,26376]],[[12998,12998],"mapped",[55,26376]],[[12999,12999],"mapped",[56,26376]],[[13000,13000],"mapped",[57,26376]],[[13001,13001],"mapped",[49,48,26376]],[[13002,13002],"mapped",[49,49,26376]],[[13003,13003],"mapped",[49,50,26376]],[[13004,13004],"mapped",[104,103]],[[13005,13005],"mapped",[101,114,103]],[[13006,13006],"mapped",[101,118]],[[13007,13007],"mapped",[108,116,100]],[[13008,13008],"mapped",[12450]],[[13009,13009],"mapped",[12452]],[[13010,13010],"mapped",[12454]],[[13011,13011],"mapped",[12456]],[[13012,13012],"mapped",[12458]],[[13013,13013],"mapped",[12459]],[[13014,13014],"mapped",[12461]],[[13015,13015],"mapped",[12463]],[[13016,13016],"mapped",[12465]],[[13017,13017],"mapped",[12467]],[[13018,13018],"mapped",[12469]],[[13019,13019],"mapped",[12471]],[[13020,13020],"mapped",[12473]],[[13021,13021],"mapped",[12475]],[[13022,13022],"mapped",[12477]],[[13023,13023],"mapped",[12479]],[[13024,13024],"mapped",[12481]],[[13025,13025],"mapped",[12484]],[[13026,13026],"mapped",[12486]],[[13027,13027],"mapped",[12488]],[[13028,13028],"mapped",[12490]],[[13029,13029],"mapped",[12491]],[[13030,13030],"mapped",[12492]],[[13031,13031],"mapped",[12493]],[[13032,13032],"mapped",[12494]],[[13033,13033],"mapped",[12495]],[[13034,13034],"mapped",[12498]],[[13035,13035],"mapped",[12501]],[[13036,13036],"mapped",[12504]],[[13037,13037],"mapped",[12507]],[[13038,13038],"mapped",[12510]],[[13039,13039],"mapped",[12511]],[[13040,13040],"mapped",[12512]],[[13041,13041],"mapped",[12513]],[[13042,13042],"mapped",[12514]],[[13043,13043],"mapped",[12516]],[[13044,13044],"mapped",[12518]],[[13045,13045],"mapped",[12520]],[[13046,13046],"mapped",[12521]],[[13047,13047],"mapped",[12522]],[[13048,13048],"mapped",[12523]],[[13049,13049],"mapped",[12524]],[[13050,13050],"mapped",[12525]],[[13051,13051],"mapped",[12527]],[[13052,13052],"mapped",[12528]],[[13053,13053],"mapped",[12529]],[[13054,13054],"mapped",[12530]],[[13055,13055],"disallowed"],[[13056,13056],"mapped",[12450,12497,12540,12488]],[[13057,13057],"mapped",[12450,12523,12501,12449]],[[13058,13058],"mapped",[12450,12531,12506,12450]],[[13059,13059],"mapped",[12450,12540,12523]],[[13060,13060],"mapped",[12452,12491,12531,12464]],[[13061,13061],"mapped",[12452,12531,12481]],[[13062,13062],"mapped",[12454,12457,12531]],[[13063,13063],"mapped",[12456,12473,12463,12540,12489]],[[13064,13064],"mapped",[12456,12540,12459,12540]],[[13065,13065],"mapped",[12458,12531,12473]],[[13066,13066],"mapped",[12458,12540,12512]],[[13067,13067],"mapped",[12459,12452,12522]],[[13068,13068],"mapped",[12459,12521,12483,12488]],[[13069,13069],"mapped",[12459,12525,12522,12540]],[[13070,13070],"mapped",[12460,12525,12531]],[[13071,13071],"mapped",[12460,12531,12510]],[[13072,13072],"mapped",[12462,12460]],[[13073,13073],"mapped",[12462,12491,12540]],[[13074,13074],"mapped",[12461,12517,12522,12540]],[[13075,13075],"mapped",[12462,12523,12480,12540]],[[13076,13076],"mapped",[12461,12525]],[[13077,13077],"mapped",[12461,12525,12464,12521,12512]],[[13078,13078],"mapped",[12461,12525,12513,12540,12488,12523]],[[13079,13079],"mapped",[12461,12525,12527,12483,12488]],[[13080,13080],"mapped",[12464,12521,12512]],[[13081,13081],"mapped",[12464,12521,12512,12488,12531]],[[13082,13082],"mapped",[12463,12523,12476,12452,12525]],[[13083,13083],"mapped",[12463,12525,12540,12493]],[[13084,13084],"mapped",[12465,12540,12473]],[[13085,13085],"mapped",[12467,12523,12490]],[[13086,13086],"mapped",[12467,12540,12509]],[[13087,13087],"mapped",[12469,12452,12463,12523]],[[13088,13088],"mapped",[12469,12531,12481,12540,12512]],[[13089,13089],"mapped",[12471,12522,12531,12464]],[[13090,13090],"mapped",[12475,12531,12481]],[[13091,13091],"mapped",[12475,12531,12488]],[[13092,13092],"mapped",[12480,12540,12473]],[[13093,13093],"mapped",[12487,12471]],[[13094,13094],"mapped",[12489,12523]],[[13095,13095],"mapped",[12488,12531]],[[13096,13096],"mapped",[12490,12494]],[[13097,13097],"mapped",[12494,12483,12488]],[[13098,13098],"mapped",[12495,12452,12484]],[[13099,13099],"mapped",[12497,12540,12475,12531,12488]],[[13100,13100],"mapped",[12497,12540,12484]],[[13101,13101],"mapped",[12496,12540,12524,12523]],[[13102,13102],"mapped",[12500,12450,12473,12488,12523]],[[13103,13103],"mapped",[12500,12463,12523]],[[13104,13104],"mapped",[12500,12467]],[[13105,13105],"mapped",[12499,12523]],[[13106,13106],"mapped",[12501,12449,12521,12483,12489]],[[13107,13107],"mapped",[12501,12451,12540,12488]],[[13108,13108],"mapped",[12502,12483,12471,12455,12523]],[[13109,13109],"mapped",[12501,12521,12531]],[[13110,13110],"mapped",[12504,12463,12479,12540,12523]],[[13111,13111],"mapped",[12506,12477]],[[13112,13112],"mapped",[12506,12491,12498]],[[13113,13113],"mapped",[12504,12523,12484]],[[13114,13114],"mapped",[12506,12531,12473]],[[13115,13115],"mapped",[12506,12540,12472]],[[13116,13116],"mapped",[12505,12540,12479]],[[13117,13117],"mapped",[12509,12452,12531,12488]],[[13118,13118],"mapped",[12508,12523,12488]],[[13119,13119],"mapped",[12507,12531]],[[13120,13120],"mapped",[12509,12531,12489]],[[13121,13121],"mapped",[12507,12540,12523]],[[13122,13122],"mapped",[12507,12540,12531]],[[13123,13123],"mapped",[12510,12452,12463,12525]],[[13124,13124],"mapped",[12510,12452,12523]],[[13125,13125],"mapped",[12510,12483,12495]],[[13126,13126],"mapped",[12510,12523,12463]],[[13127,13127],"mapped",[12510,12531,12471,12519,12531]],[[13128,13128],"mapped",[12511,12463,12525,12531]],[[13129,13129],"mapped",[12511,12522]],[[13130,13130],"mapped",[12511,12522,12496,12540,12523]],[[13131,13131],"mapped",[12513,12460]],[[13132,13132],"mapped",[12513,12460,12488,12531]],[[13133,13133],"mapped",[12513,12540,12488,12523]],[[13134,13134],"mapped",[12516,12540,12489]],[[13135,13135],"mapped",[12516,12540,12523]],[[13136,13136],"mapped",[12518,12450,12531]],[[13137,13137],"mapped",[12522,12483,12488,12523]],[[13138,13138],"mapped",[12522,12521]],[[13139,13139],"mapped",[12523,12500,12540]],[[13140,13140],"mapped",[12523,12540,12502,12523]],[[13141,13141],"mapped",[12524,12512]],[[13142,13142],"mapped",[12524,12531,12488,12466,12531]],[[13143,13143],"mapped",[12527,12483,12488]],[[13144,13144],"mapped",[48,28857]],[[13145,13145],"mapped",[49,28857]],[[13146,13146],"mapped",[50,28857]],[[13147,13147],"mapped",[51,28857]],[[13148,13148],"mapped",[52,28857]],[[13149,13149],"mapped",[53,28857]],[[13150,13150],"mapped",[54,28857]],[[13151,13151],"mapped",[55,28857]],[[13152,13152],"mapped",[56,28857]],[[13153,13153],"mapped",[57,28857]],[[13154,13154],"mapped",[49,48,28857]],[[13155,13155],"mapped",[49,49,28857]],[[13156,13156],"mapped",[49,50,28857]],[[13157,13157],"mapped",[49,51,28857]],[[13158,13158],"mapped",[49,52,28857]],[[13159,13159],"mapped",[49,53,28857]],[[13160,13160],"mapped",[49,54,28857]],[[13161,13161],"mapped",[49,55,28857]],[[13162,13162],"mapped",[49,56,28857]],[[13163,13163],"mapped",[49,57,28857]],[[13164,13164],"mapped",[50,48,28857]],[[13165,13165],"mapped",[50,49,28857]],[[13166,13166],"mapped",[50,50,28857]],[[13167,13167],"mapped",[50,51,28857]],[[13168,13168],"mapped",[50,52,28857]],[[13169,13169],"mapped",[104,112,97]],[[13170,13170],"mapped",[100,97]],[[13171,13171],"mapped",[97,117]],[[13172,13172],"mapped",[98,97,114]],[[13173,13173],"mapped",[111,118]],[[13174,13174],"mapped",[112,99]],[[13175,13175],"mapped",[100,109]],[[13176,13176],"mapped",[100,109,50]],[[13177,13177],"mapped",[100,109,51]],[[13178,13178],"mapped",[105,117]],[[13179,13179],"mapped",[24179,25104]],[[13180,13180],"mapped",[26157,21644]],[[13181,13181],"mapped",[22823,27491]],[[13182,13182],"mapped",[26126,27835]],[[13183,13183],"mapped",[26666,24335,20250,31038]],[[13184,13184],"mapped",[112,97]],[[13185,13185],"mapped",[110,97]],[[13186,13186],"mapped",[956,97]],[[13187,13187],"mapped",[109,97]],[[13188,13188],"mapped",[107,97]],[[13189,13189],"mapped",[107,98]],[[13190,13190],"mapped",[109,98]],[[13191,13191],"mapped",[103,98]],[[13192,13192],"mapped",[99,97,108]],[[13193,13193],"mapped",[107,99,97,108]],[[13194,13194],"mapped",[112,102]],[[13195,13195],"mapped",[110,102]],[[13196,13196],"mapped",[956,102]],[[13197,13197],"mapped",[956,103]],[[13198,13198],"mapped",[109,103]],[[13199,13199],"mapped",[107,103]],[[13200,13200],"mapped",[104,122]],[[13201,13201],"mapped",[107,104,122]],[[13202,13202],"mapped",[109,104,122]],[[13203,13203],"mapped",[103,104,122]],[[13204,13204],"mapped",[116,104,122]],[[13205,13205],"mapped",[956,108]],[[13206,13206],"mapped",[109,108]],[[13207,13207],"mapped",[100,108]],[[13208,13208],"mapped",[107,108]],[[13209,13209],"mapped",[102,109]],[[13210,13210],"mapped",[110,109]],[[13211,13211],"mapped",[956,109]],[[13212,13212],"mapped",[109,109]],[[13213,13213],"mapped",[99,109]],[[13214,13214],"mapped",[107,109]],[[13215,13215],"mapped",[109,109,50]],[[13216,13216],"mapped",[99,109,50]],[[13217,13217],"mapped",[109,50]],[[13218,13218],"mapped",[107,109,50]],[[13219,13219],"mapped",[109,109,51]],[[13220,13220],"mapped",[99,109,51]],[[13221,13221],"mapped",[109,51]],[[13222,13222],"mapped",[107,109,51]],[[13223,13223],"mapped",[109,8725,115]],[[13224,13224],"mapped",[109,8725,115,50]],[[13225,13225],"mapped",[112,97]],[[13226,13226],"mapped",[107,112,97]],[[13227,13227],"mapped",[109,112,97]],[[13228,13228],"mapped",[103,112,97]],[[13229,13229],"mapped",[114,97,100]],[[13230,13230],"mapped",[114,97,100,8725,115]],[[13231,13231],"mapped",[114,97,100,8725,115,50]],[[13232,13232],"mapped",[112,115]],[[13233,13233],"mapped",[110,115]],[[13234,13234],"mapped",[956,115]],[[13235,13235],"mapped",[109,115]],[[13236,13236],"mapped",[112,118]],[[13237,13237],"mapped",[110,118]],[[13238,13238],"mapped",[956,118]],[[13239,13239],"mapped",[109,118]],[[13240,13240],"mapped",[107,118]],[[13241,13241],"mapped",[109,118]],[[13242,13242],"mapped",[112,119]],[[13243,13243],"mapped",[110,119]],[[13244,13244],"mapped",[956,119]],[[13245,13245],"mapped",[109,119]],[[13246,13246],"mapped",[107,119]],[[13247,13247],"mapped",[109,119]],[[13248,13248],"mapped",[107,969]],[[13249,13249],"mapped",[109,969]],[[13250,13250],"disallowed"],[[13251,13251],"mapped",[98,113]],[[13252,13252],"mapped",[99,99]],[[13253,13253],"mapped",[99,100]],[[13254,13254],"mapped",[99,8725,107,103]],[[13255,13255],"disallowed"],[[13256,13256],"mapped",[100,98]],[[13257,13257],"mapped",[103,121]],[[13258,13258],"mapped",[104,97]],[[13259,13259],"mapped",[104,112]],[[13260,13260],"mapped",[105,110]],[[13261,13261],"mapped",[107,107]],[[13262,13262],"mapped",[107,109]],[[13263,13263],"mapped",[107,116]],[[13264,13264],"mapped",[108,109]],[[13265,13265],"mapped",[108,110]],[[13266,13266],"mapped",[108,111,103]],[[13267,13267],"mapped",[108,120]],[[13268,13268],"mapped",[109,98]],[[13269,13269],"mapped",[109,105,108]],[[13270,13270],"mapped",[109,111,108]],[[13271,13271],"mapped",[112,104]],[[13272,13272],"disallowed"],[[13273,13273],"mapped",[112,112,109]],[[13274,13274],"mapped",[112,114]],[[13275,13275],"mapped",[115,114]],[[13276,13276],"mapped",[115,118]],[[13277,13277],"mapped",[119,98]],[[13278,13278],"mapped",[118,8725,109]],[[13279,13279],"mapped",[97,8725,109]],[[13280,13280],"mapped",[49,26085]],[[13281,13281],"mapped",[50,26085]],[[13282,13282],"mapped",[51,26085]],[[13283,13283],"mapped",[52,26085]],[[13284,13284],"mapped",[53,26085]],[[13285,13285],"mapped",[54,26085]],[[13286,13286],"mapped",[55,26085]],[[13287,13287],"mapped",[56,26085]],[[13288,13288],"mapped",[57,26085]],[[13289,13289],"mapped",[49,48,26085]],[[13290,13290],"mapped",[49,49,26085]],[[13291,13291],"mapped",[49,50,26085]],[[13292,13292],"mapped",[49,51,26085]],[[13293,13293],"mapped",[49,52,26085]],[[13294,13294],"mapped",[49,53,26085]],[[13295,13295],"mapped",[49,54,26085]],[[13296,13296],"mapped",[49,55,26085]],[[13297,13297],"mapped",[49,56,26085]],[[13298,13298],"mapped",[49,57,26085]],[[13299,13299],"mapped",[50,48,26085]],[[13300,13300],"mapped",[50,49,26085]],[[13301,13301],"mapped",[50,50,26085]],[[13302,13302],"mapped",[50,51,26085]],[[13303,13303],"mapped",[50,52,26085]],[[13304,13304],"mapped",[50,53,26085]],[[13305,13305],"mapped",[50,54,26085]],[[13306,13306],"mapped",[50,55,26085]],[[13307,13307],"mapped",[50,56,26085]],[[13308,13308],"mapped",[50,57,26085]],[[13309,13309],"mapped",[51,48,26085]],[[13310,13310],"mapped",[51,49,26085]],[[13311,13311],"mapped",[103,97,108]],[[13312,19893],"valid"],[[19894,19903],"disallowed"],[[19904,19967],"valid",[],"NV8"],[[19968,40869],"valid"],[[40870,40891],"valid"],[[40892,40899],"valid"],[[40900,40907],"valid"],[[40908,40908],"valid"],[[40909,40917],"valid"],[[40918,40959],"disallowed"],[[40960,42124],"valid"],[[42125,42127],"disallowed"],[[42128,42145],"valid",[],"NV8"],[[42146,42147],"valid",[],"NV8"],[[42148,42163],"valid",[],"NV8"],[[42164,42164],"valid",[],"NV8"],[[42165,42176],"valid",[],"NV8"],[[42177,42177],"valid",[],"NV8"],[[42178,42180],"valid",[],"NV8"],[[42181,42181],"valid",[],"NV8"],[[42182,42182],"valid",[],"NV8"],[[42183,42191],"disallowed"],[[42192,42237],"valid"],[[42238,42239],"valid",[],"NV8"],[[42240,42508],"valid"],[[42509,42511],"valid",[],"NV8"],[[42512,42539],"valid"],[[42540,42559],"disallowed"],[[42560,42560],"mapped",[42561]],[[42561,42561],"valid"],[[42562,42562],"mapped",[42563]],[[42563,42563],"valid"],[[42564,42564],"mapped",[42565]],[[42565,42565],"valid"],[[42566,42566],"mapped",[42567]],[[42567,42567],"valid"],[[42568,42568],"mapped",[42569]],[[42569,42569],"valid"],[[42570,42570],"mapped",[42571]],[[42571,42571],"valid"],[[42572,42572],"mapped",[42573]],[[42573,42573],"valid"],[[42574,42574],"mapped",[42575]],[[42575,42575],"valid"],[[42576,42576],"mapped",[42577]],[[42577,42577],"valid"],[[42578,42578],"mapped",[42579]],[[42579,42579],"valid"],[[42580,42580],"mapped",[42581]],[[42581,42581],"valid"],[[42582,42582],"mapped",[42583]],[[42583,42583],"valid"],[[42584,42584],"mapped",[42585]],[[42585,42585],"valid"],[[42586,42586],"mapped",[42587]],[[42587,42587],"valid"],[[42588,42588],"mapped",[42589]],[[42589,42589],"valid"],[[42590,42590],"mapped",[42591]],[[42591,42591],"valid"],[[42592,42592],"mapped",[42593]],[[42593,42593],"valid"],[[42594,42594],"mapped",[42595]],[[42595,42595],"valid"],[[42596,42596],"mapped",[42597]],[[42597,42597],"valid"],[[42598,42598],"mapped",[42599]],[[42599,42599],"valid"],[[42600,42600],"mapped",[42601]],[[42601,42601],"valid"],[[42602,42602],"mapped",[42603]],[[42603,42603],"valid"],[[42604,42604],"mapped",[42605]],[[42605,42607],"valid"],[[42608,42611],"valid",[],"NV8"],[[42612,42619],"valid"],[[42620,42621],"valid"],[[42622,42622],"valid",[],"NV8"],[[42623,42623],"valid"],[[42624,42624],"mapped",[42625]],[[42625,42625],"valid"],[[42626,42626],"mapped",[42627]],[[42627,42627],"valid"],[[42628,42628],"mapped",[42629]],[[42629,42629],"valid"],[[42630,42630],"mapped",[42631]],[[42631,42631],"valid"],[[42632,42632],"mapped",[42633]],[[42633,42633],"valid"],[[42634,42634],"mapped",[42635]],[[42635,42635],"valid"],[[42636,42636],"mapped",[42637]],[[42637,42637],"valid"],[[42638,42638],"mapped",[42639]],[[42639,42639],"valid"],[[42640,42640],"mapped",[42641]],[[42641,42641],"valid"],[[42642,42642],"mapped",[42643]],[[42643,42643],"valid"],[[42644,42644],"mapped",[42645]],[[42645,42645],"valid"],[[42646,42646],"mapped",[42647]],[[42647,42647],"valid"],[[42648,42648],"mapped",[42649]],[[42649,42649],"valid"],[[42650,42650],"mapped",[42651]],[[42651,42651],"valid"],[[42652,42652],"mapped",[1098]],[[42653,42653],"mapped",[1100]],[[42654,42654],"valid"],[[42655,42655],"valid"],[[42656,42725],"valid"],[[42726,42735],"valid",[],"NV8"],[[42736,42737],"valid"],[[42738,42743],"valid",[],"NV8"],[[42744,42751],"disallowed"],[[42752,42774],"valid",[],"NV8"],[[42775,42778],"valid"],[[42779,42783],"valid"],[[42784,42785],"valid",[],"NV8"],[[42786,42786],"mapped",[42787]],[[42787,42787],"valid"],[[42788,42788],"mapped",[42789]],[[42789,42789],"valid"],[[42790,42790],"mapped",[42791]],[[42791,42791],"valid"],[[42792,42792],"mapped",[42793]],[[42793,42793],"valid"],[[42794,42794],"mapped",[42795]],[[42795,42795],"valid"],[[42796,42796],"mapped",[42797]],[[42797,42797],"valid"],[[42798,42798],"mapped",[42799]],[[42799,42801],"valid"],[[42802,42802],"mapped",[42803]],[[42803,42803],"valid"],[[42804,42804],"mapped",[42805]],[[42805,42805],"valid"],[[42806,42806],"mapped",[42807]],[[42807,42807],"valid"],[[42808,42808],"mapped",[42809]],[[42809,42809],"valid"],[[42810,42810],"mapped",[42811]],[[42811,42811],"valid"],[[42812,42812],"mapped",[42813]],[[42813,42813],"valid"],[[42814,42814],"mapped",[42815]],[[42815,42815],"valid"],[[42816,42816],"mapped",[42817]],[[42817,42817],"valid"],[[42818,42818],"mapped",[42819]],[[42819,42819],"valid"],[[42820,42820],"mapped",[42821]],[[42821,42821],"valid"],[[42822,42822],"mapped",[42823]],[[42823,42823],"valid"],[[42824,42824],"mapped",[42825]],[[42825,42825],"valid"],[[42826,42826],"mapped",[42827]],[[42827,42827],"valid"],[[42828,42828],"mapped",[42829]],[[42829,42829],"valid"],[[42830,42830],"mapped",[42831]],[[42831,42831],"valid"],[[42832,42832],"mapped",[42833]],[[42833,42833],"valid"],[[42834,42834],"mapped",[42835]],[[42835,42835],"valid"],[[42836,42836],"mapped",[42837]],[[42837,42837],"valid"],[[42838,42838],"mapped",[42839]],[[42839,42839],"valid"],[[42840,42840],"mapped",[42841]],[[42841,42841],"valid"],[[42842,42842],"mapped",[42843]],[[42843,42843],"valid"],[[42844,42844],"mapped",[42845]],[[42845,42845],"valid"],[[42846,42846],"mapped",[42847]],[[42847,42847],"valid"],[[42848,42848],"mapped",[42849]],[[42849,42849],"valid"],[[42850,42850],"mapped",[42851]],[[42851,42851],"valid"],[[42852,42852],"mapped",[42853]],[[42853,42853],"valid"],[[42854,42854],"mapped",[42855]],[[42855,42855],"valid"],[[42856,42856],"mapped",[42857]],[[42857,42857],"valid"],[[42858,42858],"mapped",[42859]],[[42859,42859],"valid"],[[42860,42860],"mapped",[42861]],[[42861,42861],"valid"],[[42862,42862],"mapped",[42863]],[[42863,42863],"valid"],[[42864,42864],"mapped",[42863]],[[42865,42872],"valid"],[[42873,42873],"mapped",[42874]],[[42874,42874],"valid"],[[42875,42875],"mapped",[42876]],[[42876,42876],"valid"],[[42877,42877],"mapped",[7545]],[[42878,42878],"mapped",[42879]],[[42879,42879],"valid"],[[42880,42880],"mapped",[42881]],[[42881,42881],"valid"],[[42882,42882],"mapped",[42883]],[[42883,42883],"valid"],[[42884,42884],"mapped",[42885]],[[42885,42885],"valid"],[[42886,42886],"mapped",[42887]],[[42887,42888],"valid"],[[42889,42890],"valid",[],"NV8"],[[42891,42891],"mapped",[42892]],[[42892,42892],"valid"],[[42893,42893],"mapped",[613]],[[42894,42894],"valid"],[[42895,42895],"valid"],[[42896,42896],"mapped",[42897]],[[42897,42897],"valid"],[[42898,42898],"mapped",[42899]],[[42899,42899],"valid"],[[42900,42901],"valid"],[[42902,42902],"mapped",[42903]],[[42903,42903],"valid"],[[42904,42904],"mapped",[42905]],[[42905,42905],"valid"],[[42906,42906],"mapped",[42907]],[[42907,42907],"valid"],[[42908,42908],"mapped",[42909]],[[42909,42909],"valid"],[[42910,42910],"mapped",[42911]],[[42911,42911],"valid"],[[42912,42912],"mapped",[42913]],[[42913,42913],"valid"],[[42914,42914],"mapped",[42915]],[[42915,42915],"valid"],[[42916,42916],"mapped",[42917]],[[42917,42917],"valid"],[[42918,42918],"mapped",[42919]],[[42919,42919],"valid"],[[42920,42920],"mapped",[42921]],[[42921,42921],"valid"],[[42922,42922],"mapped",[614]],[[42923,42923],"mapped",[604]],[[42924,42924],"mapped",[609]],[[42925,42925],"mapped",[620]],[[42926,42927],"disallowed"],[[42928,42928],"mapped",[670]],[[42929,42929],"mapped",[647]],[[42930,42930],"mapped",[669]],[[42931,42931],"mapped",[43859]],[[42932,42932],"mapped",[42933]],[[42933,42933],"valid"],[[42934,42934],"mapped",[42935]],[[42935,42935],"valid"],[[42936,42998],"disallowed"],[[42999,42999],"valid"],[[43000,43000],"mapped",[295]],[[43001,43001],"mapped",[339]],[[43002,43002],"valid"],[[43003,43007],"valid"],[[43008,43047],"valid"],[[43048,43051],"valid",[],"NV8"],[[43052,43055],"disallowed"],[[43056,43065],"valid",[],"NV8"],[[43066,43071],"disallowed"],[[43072,43123],"valid"],[[43124,43127],"valid",[],"NV8"],[[43128,43135],"disallowed"],[[43136,43204],"valid"],[[43205,43213],"disallowed"],[[43214,43215],"valid",[],"NV8"],[[43216,43225],"valid"],[[43226,43231],"disallowed"],[[43232,43255],"valid"],[[43256,43258],"valid",[],"NV8"],[[43259,43259],"valid"],[[43260,43260],"valid",[],"NV8"],[[43261,43261],"valid"],[[43262,43263],"disallowed"],[[43264,43309],"valid"],[[43310,43311],"valid",[],"NV8"],[[43312,43347],"valid"],[[43348,43358],"disallowed"],[[43359,43359],"valid",[],"NV8"],[[43360,43388],"valid",[],"NV8"],[[43389,43391],"disallowed"],[[43392,43456],"valid"],[[43457,43469],"valid",[],"NV8"],[[43470,43470],"disallowed"],[[43471,43481],"valid"],[[43482,43485],"disallowed"],[[43486,43487],"valid",[],"NV8"],[[43488,43518],"valid"],[[43519,43519],"disallowed"],[[43520,43574],"valid"],[[43575,43583],"disallowed"],[[43584,43597],"valid"],[[43598,43599],"disallowed"],[[43600,43609],"valid"],[[43610,43611],"disallowed"],[[43612,43615],"valid",[],"NV8"],[[43616,43638],"valid"],[[43639,43641],"valid",[],"NV8"],[[43642,43643],"valid"],[[43644,43647],"valid"],[[43648,43714],"valid"],[[43715,43738],"disallowed"],[[43739,43741],"valid"],[[43742,43743],"valid",[],"NV8"],[[43744,43759],"valid"],[[43760,43761],"valid",[],"NV8"],[[43762,43766],"valid"],[[43767,43776],"disallowed"],[[43777,43782],"valid"],[[43783,43784],"disallowed"],[[43785,43790],"valid"],[[43791,43792],"disallowed"],[[43793,43798],"valid"],[[43799,43807],"disallowed"],[[43808,43814],"valid"],[[43815,43815],"disallowed"],[[43816,43822],"valid"],[[43823,43823],"disallowed"],[[43824,43866],"valid"],[[43867,43867],"valid",[],"NV8"],[[43868,43868],"mapped",[42791]],[[43869,43869],"mapped",[43831]],[[43870,43870],"mapped",[619]],[[43871,43871],"mapped",[43858]],[[43872,43875],"valid"],[[43876,43877],"valid"],[[43878,43887],"disallowed"],[[43888,43888],"mapped",[5024]],[[43889,43889],"mapped",[5025]],[[43890,43890],"mapped",[5026]],[[43891,43891],"mapped",[5027]],[[43892,43892],"mapped",[5028]],[[43893,43893],"mapped",[5029]],[[43894,43894],"mapped",[5030]],[[43895,43895],"mapped",[5031]],[[43896,43896],"mapped",[5032]],[[43897,43897],"mapped",[5033]],[[43898,43898],"mapped",[5034]],[[43899,43899],"mapped",[5035]],[[43900,43900],"mapped",[5036]],[[43901,43901],"mapped",[5037]],[[43902,43902],"mapped",[5038]],[[43903,43903],"mapped",[5039]],[[43904,43904],"mapped",[5040]],[[43905,43905],"mapped",[5041]],[[43906,43906],"mapped",[5042]],[[43907,43907],"mapped",[5043]],[[43908,43908],"mapped",[5044]],[[43909,43909],"mapped",[5045]],[[43910,43910],"mapped",[5046]],[[43911,43911],"mapped",[5047]],[[43912,43912],"mapped",[5048]],[[43913,43913],"mapped",[5049]],[[43914,43914],"mapped",[5050]],[[43915,43915],"mapped",[5051]],[[43916,43916],"mapped",[5052]],[[43917,43917],"mapped",[5053]],[[43918,43918],"mapped",[5054]],[[43919,43919],"mapped",[5055]],[[43920,43920],"mapped",[5056]],[[43921,43921],"mapped",[5057]],[[43922,43922],"mapped",[5058]],[[43923,43923],"mapped",[5059]],[[43924,43924],"mapped",[5060]],[[43925,43925],"mapped",[5061]],[[43926,43926],"mapped",[5062]],[[43927,43927],"mapped",[5063]],[[43928,43928],"mapped",[5064]],[[43929,43929],"mapped",[5065]],[[43930,43930],"mapped",[5066]],[[43931,43931],"mapped",[5067]],[[43932,43932],"mapped",[5068]],[[43933,43933],"mapped",[5069]],[[43934,43934],"mapped",[5070]],[[43935,43935],"mapped",[5071]],[[43936,43936],"mapped",[5072]],[[43937,43937],"mapped",[5073]],[[43938,43938],"mapped",[5074]],[[43939,43939],"mapped",[5075]],[[43940,43940],"mapped",[5076]],[[43941,43941],"mapped",[5077]],[[43942,43942],"mapped",[5078]],[[43943,43943],"mapped",[5079]],[[43944,43944],"mapped",[5080]],[[43945,43945],"mapped",[5081]],[[43946,43946],"mapped",[5082]],[[43947,43947],"mapped",[5083]],[[43948,43948],"mapped",[5084]],[[43949,43949],"mapped",[5085]],[[43950,43950],"mapped",[5086]],[[43951,43951],"mapped",[5087]],[[43952,43952],"mapped",[5088]],[[43953,43953],"mapped",[5089]],[[43954,43954],"mapped",[5090]],[[43955,43955],"mapped",[5091]],[[43956,43956],"mapped",[5092]],[[43957,43957],"mapped",[5093]],[[43958,43958],"mapped",[5094]],[[43959,43959],"mapped",[5095]],[[43960,43960],"mapped",[5096]],[[43961,43961],"mapped",[5097]],[[43962,43962],"mapped",[5098]],[[43963,43963],"mapped",[5099]],[[43964,43964],"mapped",[5100]],[[43965,43965],"mapped",[5101]],[[43966,43966],"mapped",[5102]],[[43967,43967],"mapped",[5103]],[[43968,44010],"valid"],[[44011,44011],"valid",[],"NV8"],[[44012,44013],"valid"],[[44014,44015],"disallowed"],[[44016,44025],"valid"],[[44026,44031],"disallowed"],[[44032,55203],"valid"],[[55204,55215],"disallowed"],[[55216,55238],"valid",[],"NV8"],[[55239,55242],"disallowed"],[[55243,55291],"valid",[],"NV8"],[[55292,55295],"disallowed"],[[55296,57343],"disallowed"],[[57344,63743],"disallowed"],[[63744,63744],"mapped",[35912]],[[63745,63745],"mapped",[26356]],[[63746,63746],"mapped",[36554]],[[63747,63747],"mapped",[36040]],[[63748,63748],"mapped",[28369]],[[63749,63749],"mapped",[20018]],[[63750,63750],"mapped",[21477]],[[63751,63752],"mapped",[40860]],[[63753,63753],"mapped",[22865]],[[63754,63754],"mapped",[37329]],[[63755,63755],"mapped",[21895]],[[63756,63756],"mapped",[22856]],[[63757,63757],"mapped",[25078]],[[63758,63758],"mapped",[30313]],[[63759,63759],"mapped",[32645]],[[63760,63760],"mapped",[34367]],[[63761,63761],"mapped",[34746]],[[63762,63762],"mapped",[35064]],[[63763,63763],"mapped",[37007]],[[63764,63764],"mapped",[27138]],[[63765,63765],"mapped",[27931]],[[63766,63766],"mapped",[28889]],[[63767,63767],"mapped",[29662]],[[63768,63768],"mapped",[33853]],[[63769,63769],"mapped",[37226]],[[63770,63770],"mapped",[39409]],[[63771,63771],"mapped",[20098]],[[63772,63772],"mapped",[21365]],[[63773,63773],"mapped",[27396]],[[63774,63774],"mapped",[29211]],[[63775,63775],"mapped",[34349]],[[63776,63776],"mapped",[40478]],[[63777,63777],"mapped",[23888]],[[63778,63778],"mapped",[28651]],[[63779,63779],"mapped",[34253]],[[63780,63780],"mapped",[35172]],[[63781,63781],"mapped",[25289]],[[63782,63782],"mapped",[33240]],[[63783,63783],"mapped",[34847]],[[63784,63784],"mapped",[24266]],[[63785,63785],"mapped",[26391]],[[63786,63786],"mapped",[28010]],[[63787,63787],"mapped",[29436]],[[63788,63788],"mapped",[37070]],[[63789,63789],"mapped",[20358]],[[63790,63790],"mapped",[20919]],[[63791,63791],"mapped",[21214]],[[63792,63792],"mapped",[25796]],[[63793,63793],"mapped",[27347]],[[63794,63794],"mapped",[29200]],[[63795,63795],"mapped",[30439]],[[63796,63796],"mapped",[32769]],[[63797,63797],"mapped",[34310]],[[63798,63798],"mapped",[34396]],[[63799,63799],"mapped",[36335]],[[63800,63800],"mapped",[38706]],[[63801,63801],"mapped",[39791]],[[63802,63802],"mapped",[40442]],[[63803,63803],"mapped",[30860]],[[63804,63804],"mapped",[31103]],[[63805,63805],"mapped",[32160]],[[63806,63806],"mapped",[33737]],[[63807,63807],"mapped",[37636]],[[63808,63808],"mapped",[40575]],[[63809,63809],"mapped",[35542]],[[63810,63810],"mapped",[22751]],[[63811,63811],"mapped",[24324]],[[63812,63812],"mapped",[31840]],[[63813,63813],"mapped",[32894]],[[63814,63814],"mapped",[29282]],[[63815,63815],"mapped",[30922]],[[63816,63816],"mapped",[36034]],[[63817,63817],"mapped",[38647]],[[63818,63818],"mapped",[22744]],[[63819,63819],"mapped",[23650]],[[63820,63820],"mapped",[27155]],[[63821,63821],"mapped",[28122]],[[63822,63822],"mapped",[28431]],[[63823,63823],"mapped",[32047]],[[63824,63824],"mapped",[32311]],[[63825,63825],"mapped",[38475]],[[63826,63826],"mapped",[21202]],[[63827,63827],"mapped",[32907]],[[63828,63828],"mapped",[20956]],[[63829,63829],"mapped",[20940]],[[63830,63830],"mapped",[31260]],[[63831,63831],"mapped",[32190]],[[63832,63832],"mapped",[33777]],[[63833,63833],"mapped",[38517]],[[63834,63834],"mapped",[35712]],[[63835,63835],"mapped",[25295]],[[63836,63836],"mapped",[27138]],[[63837,63837],"mapped",[35582]],[[63838,63838],"mapped",[20025]],[[63839,63839],"mapped",[23527]],[[63840,63840],"mapped",[24594]],[[63841,63841],"mapped",[29575]],[[63842,63842],"mapped",[30064]],[[63843,63843],"mapped",[21271]],[[63844,63844],"mapped",[30971]],[[63845,63845],"mapped",[20415]],[[63846,63846],"mapped",[24489]],[[63847,63847],"mapped",[19981]],[[63848,63848],"mapped",[27852]],[[63849,63849],"mapped",[25976]],[[63850,63850],"mapped",[32034]],[[63851,63851],"mapped",[21443]],[[63852,63852],"mapped",[22622]],[[63853,63853],"mapped",[30465]],[[63854,63854],"mapped",[33865]],[[63855,63855],"mapped",[35498]],[[63856,63856],"mapped",[27578]],[[63857,63857],"mapped",[36784]],[[63858,63858],"mapped",[27784]],[[63859,63859],"mapped",[25342]],[[63860,63860],"mapped",[33509]],[[63861,63861],"mapped",[25504]],[[63862,63862],"mapped",[30053]],[[63863,63863],"mapped",[20142]],[[63864,63864],"mapped",[20841]],[[63865,63865],"mapped",[20937]],[[63866,63866],"mapped",[26753]],[[63867,63867],"mapped",[31975]],[[63868,63868],"mapped",[33391]],[[63869,63869],"mapped",[35538]],[[63870,63870],"mapped",[37327]],[[63871,63871],"mapped",[21237]],[[63872,63872],"mapped",[21570]],[[63873,63873],"mapped",[22899]],[[63874,63874],"mapped",[24300]],[[63875,63875],"mapped",[26053]],[[63876,63876],"mapped",[28670]],[[63877,63877],"mapped",[31018]],[[63878,63878],"mapped",[38317]],[[63879,63879],"mapped",[39530]],[[63880,63880],"mapped",[40599]],[[63881,63881],"mapped",[40654]],[[63882,63882],"mapped",[21147]],[[63883,63883],"mapped",[26310]],[[63884,63884],"mapped",[27511]],[[63885,63885],"mapped",[36706]],[[63886,63886],"mapped",[24180]],[[63887,63887],"mapped",[24976]],[[63888,63888],"mapped",[25088]],[[63889,63889],"mapped",[25754]],[[63890,63890],"mapped",[28451]],[[63891,63891],"mapped",[29001]],[[63892,63892],"mapped",[29833]],[[63893,63893],"mapped",[31178]],[[63894,63894],"mapped",[32244]],[[63895,63895],"mapped",[32879]],[[63896,63896],"mapped",[36646]],[[63897,63897],"mapped",[34030]],[[63898,63898],"mapped",[36899]],[[63899,63899],"mapped",[37706]],[[63900,63900],"mapped",[21015]],[[63901,63901],"mapped",[21155]],[[63902,63902],"mapped",[21693]],[[63903,63903],"mapped",[28872]],[[63904,63904],"mapped",[35010]],[[63905,63905],"mapped",[35498]],[[63906,63906],"mapped",[24265]],[[63907,63907],"mapped",[24565]],[[63908,63908],"mapped",[25467]],[[63909,63909],"mapped",[27566]],[[63910,63910],"mapped",[31806]],[[63911,63911],"mapped",[29557]],[[63912,63912],"mapped",[20196]],[[63913,63913],"mapped",[22265]],[[63914,63914],"mapped",[23527]],[[63915,63915],"mapped",[23994]],[[63916,63916],"mapped",[24604]],[[63917,63917],"mapped",[29618]],[[63918,63918],"mapped",[29801]],[[63919,63919],"mapped",[32666]],[[63920,63920],"mapped",[32838]],[[63921,63921],"mapped",[37428]],[[63922,63922],"mapped",[38646]],[[63923,63923],"mapped",[38728]],[[63924,63924],"mapped",[38936]],[[63925,63925],"mapped",[20363]],[[63926,63926],"mapped",[31150]],[[63927,63927],"mapped",[37300]],[[63928,63928],"mapped",[38584]],[[63929,63929],"mapped",[24801]],[[63930,63930],"mapped",[20102]],[[63931,63931],"mapped",[20698]],[[63932,63932],"mapped",[23534]],[[63933,63933],"mapped",[23615]],[[63934,63934],"mapped",[26009]],[[63935,63935],"mapped",[27138]],[[63936,63936],"mapped",[29134]],[[63937,63937],"mapped",[30274]],[[63938,63938],"mapped",[34044]],[[63939,63939],"mapped",[36988]],[[63940,63940],"mapped",[40845]],[[63941,63941],"mapped",[26248]],[[63942,63942],"mapped",[38446]],[[63943,63943],"mapped",[21129]],[[63944,63944],"mapped",[26491]],[[63945,63945],"mapped",[26611]],[[63946,63946],"mapped",[27969]],[[63947,63947],"mapped",[28316]],[[63948,63948],"mapped",[29705]],[[63949,63949],"mapped",[30041]],[[63950,63950],"mapped",[30827]],[[63951,63951],"mapped",[32016]],[[63952,63952],"mapped",[39006]],[[63953,63953],"mapped",[20845]],[[63954,63954],"mapped",[25134]],[[63955,63955],"mapped",[38520]],[[63956,63956],"mapped",[20523]],[[63957,63957],"mapped",[23833]],[[63958,63958],"mapped",[28138]],[[63959,63959],"mapped",[36650]],[[63960,63960],"mapped",[24459]],[[63961,63961],"mapped",[24900]],[[63962,63962],"mapped",[26647]],[[63963,63963],"mapped",[29575]],[[63964,63964],"mapped",[38534]],[[63965,63965],"mapped",[21033]],[[63966,63966],"mapped",[21519]],[[63967,63967],"mapped",[23653]],[[63968,63968],"mapped",[26131]],[[63969,63969],"mapped",[26446]],[[63970,63970],"mapped",[26792]],[[63971,63971],"mapped",[27877]],[[63972,63972],"mapped",[29702]],[[63973,63973],"mapped",[30178]],[[63974,63974],"mapped",[32633]],[[63975,63975],"mapped",[35023]],[[63976,63976],"mapped",[35041]],[[63977,63977],"mapped",[37324]],[[63978,63978],"mapped",[38626]],[[63979,63979],"mapped",[21311]],[[63980,63980],"mapped",[28346]],[[63981,63981],"mapped",[21533]],[[63982,63982],"mapped",[29136]],[[63983,63983],"mapped",[29848]],[[63984,63984],"mapped",[34298]],[[63985,63985],"mapped",[38563]],[[63986,63986],"mapped",[40023]],[[63987,63987],"mapped",[40607]],[[63988,63988],"mapped",[26519]],[[63989,63989],"mapped",[28107]],[[63990,63990],"mapped",[33256]],[[63991,63991],"mapped",[31435]],[[63992,63992],"mapped",[31520]],[[63993,63993],"mapped",[31890]],[[63994,63994],"mapped",[29376]],[[63995,63995],"mapped",[28825]],[[63996,63996],"mapped",[35672]],[[63997,63997],"mapped",[20160]],[[63998,63998],"mapped",[33590]],[[63999,63999],"mapped",[21050]],[[64000,64000],"mapped",[20999]],[[64001,64001],"mapped",[24230]],[[64002,64002],"mapped",[25299]],[[64003,64003],"mapped",[31958]],[[64004,64004],"mapped",[23429]],[[64005,64005],"mapped",[27934]],[[64006,64006],"mapped",[26292]],[[64007,64007],"mapped",[36667]],[[64008,64008],"mapped",[34892]],[[64009,64009],"mapped",[38477]],[[64010,64010],"mapped",[35211]],[[64011,64011],"mapped",[24275]],[[64012,64012],"mapped",[20800]],[[64013,64013],"mapped",[21952]],[[64014,64015],"valid"],[[64016,64016],"mapped",[22618]],[[64017,64017],"valid"],[[64018,64018],"mapped",[26228]],[[64019,64020],"valid"],[[64021,64021],"mapped",[20958]],[[64022,64022],"mapped",[29482]],[[64023,64023],"mapped",[30410]],[[64024,64024],"mapped",[31036]],[[64025,64025],"mapped",[31070]],[[64026,64026],"mapped",[31077]],[[64027,64027],"mapped",[31119]],[[64028,64028],"mapped",[38742]],[[64029,64029],"mapped",[31934]],[[64030,64030],"mapped",[32701]],[[64031,64031],"valid"],[[64032,64032],"mapped",[34322]],[[64033,64033],"valid"],[[64034,64034],"mapped",[35576]],[[64035,64036],"valid"],[[64037,64037],"mapped",[36920]],[[64038,64038],"mapped",[37117]],[[64039,64041],"valid"],[[64042,64042],"mapped",[39151]],[[64043,64043],"mapped",[39164]],[[64044,64044],"mapped",[39208]],[[64045,64045],"mapped",[40372]],[[64046,64046],"mapped",[37086]],[[64047,64047],"mapped",[38583]],[[64048,64048],"mapped",[20398]],[[64049,64049],"mapped",[20711]],[[64050,64050],"mapped",[20813]],[[64051,64051],"mapped",[21193]],[[64052,64052],"mapped",[21220]],[[64053,64053],"mapped",[21329]],[[64054,64054],"mapped",[21917]],[[64055,64055],"mapped",[22022]],[[64056,64056],"mapped",[22120]],[[64057,64057],"mapped",[22592]],[[64058,64058],"mapped",[22696]],[[64059,64059],"mapped",[23652]],[[64060,64060],"mapped",[23662]],[[64061,64061],"mapped",[24724]],[[64062,64062],"mapped",[24936]],[[64063,64063],"mapped",[24974]],[[64064,64064],"mapped",[25074]],[[64065,64065],"mapped",[25935]],[[64066,64066],"mapped",[26082]],[[64067,64067],"mapped",[26257]],[[64068,64068],"mapped",[26757]],[[64069,64069],"mapped",[28023]],[[64070,64070],"mapped",[28186]],[[64071,64071],"mapped",[28450]],[[64072,64072],"mapped",[29038]],[[64073,64073],"mapped",[29227]],[[64074,64074],"mapped",[29730]],[[64075,64075],"mapped",[30865]],[[64076,64076],"mapped",[31038]],[[64077,64077],"mapped",[31049]],[[64078,64078],"mapped",[31048]],[[64079,64079],"mapped",[31056]],[[64080,64080],"mapped",[31062]],[[64081,64081],"mapped",[31069]],[[64082,64082],"mapped",[31117]],[[64083,64083],"mapped",[31118]],[[64084,64084],"mapped",[31296]],[[64085,64085],"mapped",[31361]],[[64086,64086],"mapped",[31680]],[[64087,64087],"mapped",[32244]],[[64088,64088],"mapped",[32265]],[[64089,64089],"mapped",[32321]],[[64090,64090],"mapped",[32626]],[[64091,64091],"mapped",[32773]],[[64092,64092],"mapped",[33261]],[[64093,64094],"mapped",[33401]],[[64095,64095],"mapped",[33879]],[[64096,64096],"mapped",[35088]],[[64097,64097],"mapped",[35222]],[[64098,64098],"mapped",[35585]],[[64099,64099],"mapped",[35641]],[[64100,64100],"mapped",[36051]],[[64101,64101],"mapped",[36104]],[[64102,64102],"mapped",[36790]],[[64103,64103],"mapped",[36920]],[[64104,64104],"mapped",[38627]],[[64105,64105],"mapped",[38911]],[[64106,64106],"mapped",[38971]],[[64107,64107],"mapped",[24693]],[[64108,64108],"mapped",[148206]],[[64109,64109],"mapped",[33304]],[[64110,64111],"disallowed"],[[64112,64112],"mapped",[20006]],[[64113,64113],"mapped",[20917]],[[64114,64114],"mapped",[20840]],[[64115,64115],"mapped",[20352]],[[64116,64116],"mapped",[20805]],[[64117,64117],"mapped",[20864]],[[64118,64118],"mapped",[21191]],[[64119,64119],"mapped",[21242]],[[64120,64120],"mapped",[21917]],[[64121,64121],"mapped",[21845]],[[64122,64122],"mapped",[21913]],[[64123,64123],"mapped",[21986]],[[64124,64124],"mapped",[22618]],[[64125,64125],"mapped",[22707]],[[64126,64126],"mapped",[22852]],[[64127,64127],"mapped",[22868]],[[64128,64128],"mapped",[23138]],[[64129,64129],"mapped",[23336]],[[64130,64130],"mapped",[24274]],[[64131,64131],"mapped",[24281]],[[64132,64132],"mapped",[24425]],[[64133,64133],"mapped",[24493]],[[64134,64134],"mapped",[24792]],[[64135,64135],"mapped",[24910]],[[64136,64136],"mapped",[24840]],[[64137,64137],"mapped",[24974]],[[64138,64138],"mapped",[24928]],[[64139,64139],"mapped",[25074]],[[64140,64140],"mapped",[25140]],[[64141,64141],"mapped",[25540]],[[64142,64142],"mapped",[25628]],[[64143,64143],"mapped",[25682]],[[64144,64144],"mapped",[25942]],[[64145,64145],"mapped",[26228]],[[64146,64146],"mapped",[26391]],[[64147,64147],"mapped",[26395]],[[64148,64148],"mapped",[26454]],[[64149,64149],"mapped",[27513]],[[64150,64150],"mapped",[27578]],[[64151,64151],"mapped",[27969]],[[64152,64152],"mapped",[28379]],[[64153,64153],"mapped",[28363]],[[64154,64154],"mapped",[28450]],[[64155,64155],"mapped",[28702]],[[64156,64156],"mapped",[29038]],[[64157,64157],"mapped",[30631]],[[64158,64158],"mapped",[29237]],[[64159,64159],"mapped",[29359]],[[64160,64160],"mapped",[29482]],[[64161,64161],"mapped",[29809]],[[64162,64162],"mapped",[29958]],[[64163,64163],"mapped",[30011]],[[64164,64164],"mapped",[30237]],[[64165,64165],"mapped",[30239]],[[64166,64166],"mapped",[30410]],[[64167,64167],"mapped",[30427]],[[64168,64168],"mapped",[30452]],[[64169,64169],"mapped",[30538]],[[64170,64170],"mapped",[30528]],[[64171,64171],"mapped",[30924]],[[64172,64172],"mapped",[31409]],[[64173,64173],"mapped",[31680]],[[64174,64174],"mapped",[31867]],[[64175,64175],"mapped",[32091]],[[64176,64176],"mapped",[32244]],[[64177,64177],"mapped",[32574]],[[64178,64178],"mapped",[32773]],[[64179,64179],"mapped",[33618]],[[64180,64180],"mapped",[33775]],[[64181,64181],"mapped",[34681]],[[64182,64182],"mapped",[35137]],[[64183,64183],"mapped",[35206]],[[64184,64184],"mapped",[35222]],[[64185,64185],"mapped",[35519]],[[64186,64186],"mapped",[35576]],[[64187,64187],"mapped",[35531]],[[64188,64188],"mapped",[35585]],[[64189,64189],"mapped",[35582]],[[64190,64190],"mapped",[35565]],[[64191,64191],"mapped",[35641]],[[64192,64192],"mapped",[35722]],[[64193,64193],"mapped",[36104]],[[64194,64194],"mapped",[36664]],[[64195,64195],"mapped",[36978]],[[64196,64196],"mapped",[37273]],[[64197,64197],"mapped",[37494]],[[64198,64198],"mapped",[38524]],[[64199,64199],"mapped",[38627]],[[64200,64200],"mapped",[38742]],[[64201,64201],"mapped",[38875]],[[64202,64202],"mapped",[38911]],[[64203,64203],"mapped",[38923]],[[64204,64204],"mapped",[38971]],[[64205,64205],"mapped",[39698]],[[64206,64206],"mapped",[40860]],[[64207,64207],"mapped",[141386]],[[64208,64208],"mapped",[141380]],[[64209,64209],"mapped",[144341]],[[64210,64210],"mapped",[15261]],[[64211,64211],"mapped",[16408]],[[64212,64212],"mapped",[16441]],[[64213,64213],"mapped",[152137]],[[64214,64214],"mapped",[154832]],[[64215,64215],"mapped",[163539]],[[64216,64216],"mapped",[40771]],[[64217,64217],"mapped",[40846]],[[64218,64255],"disallowed"],[[64256,64256],"mapped",[102,102]],[[64257,64257],"mapped",[102,105]],[[64258,64258],"mapped",[102,108]],[[64259,64259],"mapped",[102,102,105]],[[64260,64260],"mapped",[102,102,108]],[[64261,64262],"mapped",[115,116]],[[64263,64274],"disallowed"],[[64275,64275],"mapped",[1396,1398]],[[64276,64276],"mapped",[1396,1381]],[[64277,64277],"mapped",[1396,1387]],[[64278,64278],"mapped",[1406,1398]],[[64279,64279],"mapped",[1396,1389]],[[64280,64284],"disallowed"],[[64285,64285],"mapped",[1497,1460]],[[64286,64286],"valid"],[[64287,64287],"mapped",[1522,1463]],[[64288,64288],"mapped",[1506]],[[64289,64289],"mapped",[1488]],[[64290,64290],"mapped",[1491]],[[64291,64291],"mapped",[1492]],[[64292,64292],"mapped",[1499]],[[64293,64293],"mapped",[1500]],[[64294,64294],"mapped",[1501]],[[64295,64295],"mapped",[1512]],[[64296,64296],"mapped",[1514]],[[64297,64297],"disallowed_STD3_mapped",[43]],[[64298,64298],"mapped",[1513,1473]],[[64299,64299],"mapped",[1513,1474]],[[64300,64300],"mapped",[1513,1468,1473]],[[64301,64301],"mapped",[1513,1468,1474]],[[64302,64302],"mapped",[1488,1463]],[[64303,64303],"mapped",[1488,1464]],[[64304,64304],"mapped",[1488,1468]],[[64305,64305],"mapped",[1489,1468]],[[64306,64306],"mapped",[1490,1468]],[[64307,64307],"mapped",[1491,1468]],[[64308,64308],"mapped",[1492,1468]],[[64309,64309],"mapped",[1493,1468]],[[64310,64310],"mapped",[1494,1468]],[[64311,64311],"disallowed"],[[64312,64312],"mapped",[1496,1468]],[[64313,64313],"mapped",[1497,1468]],[[64314,64314],"mapped",[1498,1468]],[[64315,64315],"mapped",[1499,1468]],[[64316,64316],"mapped",[1500,1468]],[[64317,64317],"disallowed"],[[64318,64318],"mapped",[1502,1468]],[[64319,64319],"disallowed"],[[64320,64320],"mapped",[1504,1468]],[[64321,64321],"mapped",[1505,1468]],[[64322,64322],"disallowed"],[[64323,64323],"mapped",[1507,1468]],[[64324,64324],"mapped",[1508,1468]],[[64325,64325],"disallowed"],[[64326,64326],"mapped",[1510,1468]],[[64327,64327],"mapped",[1511,1468]],[[64328,64328],"mapped",[1512,1468]],[[64329,64329],"mapped",[1513,1468]],[[64330,64330],"mapped",[1514,1468]],[[64331,64331],"mapped",[1493,1465]],[[64332,64332],"mapped",[1489,1471]],[[64333,64333],"mapped",[1499,1471]],[[64334,64334],"mapped",[1508,1471]],[[64335,64335],"mapped",[1488,1500]],[[64336,64337],"mapped",[1649]],[[64338,64341],"mapped",[1659]],[[64342,64345],"mapped",[1662]],[[64346,64349],"mapped",[1664]],[[64350,64353],"mapped",[1658]],[[64354,64357],"mapped",[1663]],[[64358,64361],"mapped",[1657]],[[64362,64365],"mapped",[1700]],[[64366,64369],"mapped",[1702]],[[64370,64373],"mapped",[1668]],[[64374,64377],"mapped",[1667]],[[64378,64381],"mapped",[1670]],[[64382,64385],"mapped",[1671]],[[64386,64387],"mapped",[1677]],[[64388,64389],"mapped",[1676]],[[64390,64391],"mapped",[1678]],[[64392,64393],"mapped",[1672]],[[64394,64395],"mapped",[1688]],[[64396,64397],"mapped",[1681]],[[64398,64401],"mapped",[1705]],[[64402,64405],"mapped",[1711]],[[64406,64409],"mapped",[1715]],[[64410,64413],"mapped",[1713]],[[64414,64415],"mapped",[1722]],[[64416,64419],"mapped",[1723]],[[64420,64421],"mapped",[1728]],[[64422,64425],"mapped",[1729]],[[64426,64429],"mapped",[1726]],[[64430,64431],"mapped",[1746]],[[64432,64433],"mapped",[1747]],[[64434,64449],"valid",[],"NV8"],[[64450,64466],"disallowed"],[[64467,64470],"mapped",[1709]],[[64471,64472],"mapped",[1735]],[[64473,64474],"mapped",[1734]],[[64475,64476],"mapped",[1736]],[[64477,64477],"mapped",[1735,1652]],[[64478,64479],"mapped",[1739]],[[64480,64481],"mapped",[1733]],[[64482,64483],"mapped",[1737]],[[64484,64487],"mapped",[1744]],[[64488,64489],"mapped",[1609]],[[64490,64491],"mapped",[1574,1575]],[[64492,64493],"mapped",[1574,1749]],[[64494,64495],"mapped",[1574,1608]],[[64496,64497],"mapped",[1574,1735]],[[64498,64499],"mapped",[1574,1734]],[[64500,64501],"mapped",[1574,1736]],[[64502,64504],"mapped",[1574,1744]],[[64505,64507],"mapped",[1574,1609]],[[64508,64511],"mapped",[1740]],[[64512,64512],"mapped",[1574,1580]],[[64513,64513],"mapped",[1574,1581]],[[64514,64514],"mapped",[1574,1605]],[[64515,64515],"mapped",[1574,1609]],[[64516,64516],"mapped",[1574,1610]],[[64517,64517],"mapped",[1576,1580]],[[64518,64518],"mapped",[1576,1581]],[[64519,64519],"mapped",[1576,1582]],[[64520,64520],"mapped",[1576,1605]],[[64521,64521],"mapped",[1576,1609]],[[64522,64522],"mapped",[1576,1610]],[[64523,64523],"mapped",[1578,1580]],[[64524,64524],"mapped",[1578,1581]],[[64525,64525],"mapped",[1578,1582]],[[64526,64526],"mapped",[1578,1605]],[[64527,64527],"mapped",[1578,1609]],[[64528,64528],"mapped",[1578,1610]],[[64529,64529],"mapped",[1579,1580]],[[64530,64530],"mapped",[1579,1605]],[[64531,64531],"mapped",[1579,1609]],[[64532,64532],"mapped",[1579,1610]],[[64533,64533],"mapped",[1580,1581]],[[64534,64534],"mapped",[1580,1605]],[[64535,64535],"mapped",[1581,1580]],[[64536,64536],"mapped",[1581,1605]],[[64537,64537],"mapped",[1582,1580]],[[64538,64538],"mapped",[1582,1581]],[[64539,64539],"mapped",[1582,1605]],[[64540,64540],"mapped",[1587,1580]],[[64541,64541],"mapped",[1587,1581]],[[64542,64542],"mapped",[1587,1582]],[[64543,64543],"mapped",[1587,1605]],[[64544,64544],"mapped",[1589,1581]],[[64545,64545],"mapped",[1589,1605]],[[64546,64546],"mapped",[1590,1580]],[[64547,64547],"mapped",[1590,1581]],[[64548,64548],"mapped",[1590,1582]],[[64549,64549],"mapped",[1590,1605]],[[64550,64550],"mapped",[1591,1581]],[[64551,64551],"mapped",[1591,1605]],[[64552,64552],"mapped",[1592,1605]],[[64553,64553],"mapped",[1593,1580]],[[64554,64554],"mapped",[1593,1605]],[[64555,64555],"mapped",[1594,1580]],[[64556,64556],"mapped",[1594,1605]],[[64557,64557],"mapped",[1601,1580]],[[64558,64558],"mapped",[1601,1581]],[[64559,64559],"mapped",[1601,1582]],[[64560,64560],"mapped",[1601,1605]],[[64561,64561],"mapped",[1601,1609]],[[64562,64562],"mapped",[1601,1610]],[[64563,64563],"mapped",[1602,1581]],[[64564,64564],"mapped",[1602,1605]],[[64565,64565],"mapped",[1602,1609]],[[64566,64566],"mapped",[1602,1610]],[[64567,64567],"mapped",[1603,1575]],[[64568,64568],"mapped",[1603,1580]],[[64569,64569],"mapped",[1603,1581]],[[64570,64570],"mapped",[1603,1582]],[[64571,64571],"mapped",[1603,1604]],[[64572,64572],"mapped",[1603,1605]],[[64573,64573],"mapped",[1603,1609]],[[64574,64574],"mapped",[1603,1610]],[[64575,64575],"mapped",[1604,1580]],[[64576,64576],"mapped",[1604,1581]],[[64577,64577],"mapped",[1604,1582]],[[64578,64578],"mapped",[1604,1605]],[[64579,64579],"mapped",[1604,1609]],[[64580,64580],"mapped",[1604,1610]],[[64581,64581],"mapped",[1605,1580]],[[64582,64582],"mapped",[1605,1581]],[[64583,64583],"mapped",[1605,1582]],[[64584,64584],"mapped",[1605,1605]],[[64585,64585],"mapped",[1605,1609]],[[64586,64586],"mapped",[1605,1610]],[[64587,64587],"mapped",[1606,1580]],[[64588,64588],"mapped",[1606,1581]],[[64589,64589],"mapped",[1606,1582]],[[64590,64590],"mapped",[1606,1605]],[[64591,64591],"mapped",[1606,1609]],[[64592,64592],"mapped",[1606,1610]],[[64593,64593],"mapped",[1607,1580]],[[64594,64594],"mapped",[1607,1605]],[[64595,64595],"mapped",[1607,1609]],[[64596,64596],"mapped",[1607,1610]],[[64597,64597],"mapped",[1610,1580]],[[64598,64598],"mapped",[1610,1581]],[[64599,64599],"mapped",[1610,1582]],[[64600,64600],"mapped",[1610,1605]],[[64601,64601],"mapped",[1610,1609]],[[64602,64602],"mapped",[1610,1610]],[[64603,64603],"mapped",[1584,1648]],[[64604,64604],"mapped",[1585,1648]],[[64605,64605],"mapped",[1609,1648]],[[64606,64606],"disallowed_STD3_mapped",[32,1612,1617]],[[64607,64607],"disallowed_STD3_mapped",[32,1613,1617]],[[64608,64608],"disallowed_STD3_mapped",[32,1614,1617]],[[64609,64609],"disallowed_STD3_mapped",[32,1615,1617]],[[64610,64610],"disallowed_STD3_mapped",[32,1616,1617]],[[64611,64611],"disallowed_STD3_mapped",[32,1617,1648]],[[64612,64612],"mapped",[1574,1585]],[[64613,64613],"mapped",[1574,1586]],[[64614,64614],"mapped",[1574,1605]],[[64615,64615],"mapped",[1574,1606]],[[64616,64616],"mapped",[1574,1609]],[[64617,64617],"mapped",[1574,1610]],[[64618,64618],"mapped",[1576,1585]],[[64619,64619],"mapped",[1576,1586]],[[64620,64620],"mapped",[1576,1605]],[[64621,64621],"mapped",[1576,1606]],[[64622,64622],"mapped",[1576,1609]],[[64623,64623],"mapped",[1576,1610]],[[64624,64624],"mapped",[1578,1585]],[[64625,64625],"mapped",[1578,1586]],[[64626,64626],"mapped",[1578,1605]],[[64627,64627],"mapped",[1578,1606]],[[64628,64628],"mapped",[1578,1609]],[[64629,64629],"mapped",[1578,1610]],[[64630,64630],"mapped",[1579,1585]],[[64631,64631],"mapped",[1579,1586]],[[64632,64632],"mapped",[1579,1605]],[[64633,64633],"mapped",[1579,1606]],[[64634,64634],"mapped",[1579,1609]],[[64635,64635],"mapped",[1579,1610]],[[64636,64636],"mapped",[1601,1609]],[[64637,64637],"mapped",[1601,1610]],[[64638,64638],"mapped",[1602,1609]],[[64639,64639],"mapped",[1602,1610]],[[64640,64640],"mapped",[1603,1575]],[[64641,64641],"mapped",[1603,1604]],[[64642,64642],"mapped",[1603,1605]],[[64643,64643],"mapped",[1603,1609]],[[64644,64644],"mapped",[1603,1610]],[[64645,64645],"mapped",[1604,1605]],[[64646,64646],"mapped",[1604,1609]],[[64647,64647],"mapped",[1604,1610]],[[64648,64648],"mapped",[1605,1575]],[[64649,64649],"mapped",[1605,1605]],[[64650,64650],"mapped",[1606,1585]],[[64651,64651],"mapped",[1606,1586]],[[64652,64652],"mapped",[1606,1605]],[[64653,64653],"mapped",[1606,1606]],[[64654,64654],"mapped",[1606,1609]],[[64655,64655],"mapped",[1606,1610]],[[64656,64656],"mapped",[1609,1648]],[[64657,64657],"mapped",[1610,1585]],[[64658,64658],"mapped",[1610,1586]],[[64659,64659],"mapped",[1610,1605]],[[64660,64660],"mapped",[1610,1606]],[[64661,64661],"mapped",[1610,1609]],[[64662,64662],"mapped",[1610,1610]],[[64663,64663],"mapped",[1574,1580]],[[64664,64664],"mapped",[1574,1581]],[[64665,64665],"mapped",[1574,1582]],[[64666,64666],"mapped",[1574,1605]],[[64667,64667],"mapped",[1574,1607]],[[64668,64668],"mapped",[1576,1580]],[[64669,64669],"mapped",[1576,1581]],[[64670,64670],"mapped",[1576,1582]],[[64671,64671],"mapped",[1576,1605]],[[64672,64672],"mapped",[1576,1607]],[[64673,64673],"mapped",[1578,1580]],[[64674,64674],"mapped",[1578,1581]],[[64675,64675],"mapped",[1578,1582]],[[64676,64676],"mapped",[1578,1605]],[[64677,64677],"mapped",[1578,1607]],[[64678,64678],"mapped",[1579,1605]],[[64679,64679],"mapped",[1580,1581]],[[64680,64680],"mapped",[1580,1605]],[[64681,64681],"mapped",[1581,1580]],[[64682,64682],"mapped",[1581,1605]],[[64683,64683],"mapped",[1582,1580]],[[64684,64684],"mapped",[1582,1605]],[[64685,64685],"mapped",[1587,1580]],[[64686,64686],"mapped",[1587,1581]],[[64687,64687],"mapped",[1587,1582]],[[64688,64688],"mapped",[1587,1605]],[[64689,64689],"mapped",[1589,1581]],[[64690,64690],"mapped",[1589,1582]],[[64691,64691],"mapped",[1589,1605]],[[64692,64692],"mapped",[1590,1580]],[[64693,64693],"mapped",[1590,1581]],[[64694,64694],"mapped",[1590,1582]],[[64695,64695],"mapped",[1590,1605]],[[64696,64696],"mapped",[1591,1581]],[[64697,64697],"mapped",[1592,1605]],[[64698,64698],"mapped",[1593,1580]],[[64699,64699],"mapped",[1593,1605]],[[64700,64700],"mapped",[1594,1580]],[[64701,64701],"mapped",[1594,1605]],[[64702,64702],"mapped",[1601,1580]],[[64703,64703],"mapped",[1601,1581]],[[64704,64704],"mapped",[1601,1582]],[[64705,64705],"mapped",[1601,1605]],[[64706,64706],"mapped",[1602,1581]],[[64707,64707],"mapped",[1602,1605]],[[64708,64708],"mapped",[1603,1580]],[[64709,64709],"mapped",[1603,1581]],[[64710,64710],"mapped",[1603,1582]],[[64711,64711],"mapped",[1603,1604]],[[64712,64712],"mapped",[1603,1605]],[[64713,64713],"mapped",[1604,1580]],[[64714,64714],"mapped",[1604,1581]],[[64715,64715],"mapped",[1604,1582]],[[64716,64716],"mapped",[1604,1605]],[[64717,64717],"mapped",[1604,1607]],[[64718,64718],"mapped",[1605,1580]],[[64719,64719],"mapped",[1605,1581]],[[64720,64720],"mapped",[1605,1582]],[[64721,64721],"mapped",[1605,1605]],[[64722,64722],"mapped",[1606,1580]],[[64723,64723],"mapped",[1606,1581]],[[64724,64724],"mapped",[1606,1582]],[[64725,64725],"mapped",[1606,1605]],[[64726,64726],"mapped",[1606,1607]],[[64727,64727],"mapped",[1607,1580]],[[64728,64728],"mapped",[1607,1605]],[[64729,64729],"mapped",[1607,1648]],[[64730,64730],"mapped",[1610,1580]],[[64731,64731],"mapped",[1610,1581]],[[64732,64732],"mapped",[1610,1582]],[[64733,64733],"mapped",[1610,1605]],[[64734,64734],"mapped",[1610,1607]],[[64735,64735],"mapped",[1574,1605]],[[64736,64736],"mapped",[1574,1607]],[[64737,64737],"mapped",[1576,1605]],[[64738,64738],"mapped",[1576,1607]],[[64739,64739],"mapped",[1578,1605]],[[64740,64740],"mapped",[1578,1607]],[[64741,64741],"mapped",[1579,1605]],[[64742,64742],"mapped",[1579,1607]],[[64743,64743],"mapped",[1587,1605]],[[64744,64744],"mapped",[1587,1607]],[[64745,64745],"mapped",[1588,1605]],[[64746,64746],"mapped",[1588,1607]],[[64747,64747],"mapped",[1603,1604]],[[64748,64748],"mapped",[1603,1605]],[[64749,64749],"mapped",[1604,1605]],[[64750,64750],"mapped",[1606,1605]],[[64751,64751],"mapped",[1606,1607]],[[64752,64752],"mapped",[1610,1605]],[[64753,64753],"mapped",[1610,1607]],[[64754,64754],"mapped",[1600,1614,1617]],[[64755,64755],"mapped",[1600,1615,1617]],[[64756,64756],"mapped",[1600,1616,1617]],[[64757,64757],"mapped",[1591,1609]],[[64758,64758],"mapped",[1591,1610]],[[64759,64759],"mapped",[1593,1609]],[[64760,64760],"mapped",[1593,1610]],[[64761,64761],"mapped",[1594,1609]],[[64762,64762],"mapped",[1594,1610]],[[64763,64763],"mapped",[1587,1609]],[[64764,64764],"mapped",[1587,1610]],[[64765,64765],"mapped",[1588,1609]],[[64766,64766],"mapped",[1588,1610]],[[64767,64767],"mapped",[1581,1609]],[[64768,64768],"mapped",[1581,1610]],[[64769,64769],"mapped",[1580,1609]],[[64770,64770],"mapped",[1580,1610]],[[64771,64771],"mapped",[1582,1609]],[[64772,64772],"mapped",[1582,1610]],[[64773,64773],"mapped",[1589,1609]],[[64774,64774],"mapped",[1589,1610]],[[64775,64775],"mapped",[1590,1609]],[[64776,64776],"mapped",[1590,1610]],[[64777,64777],"mapped",[1588,1580]],[[64778,64778],"mapped",[1588,1581]],[[64779,64779],"mapped",[1588,1582]],[[64780,64780],"mapped",[1588,1605]],[[64781,64781],"mapped",[1588,1585]],[[64782,64782],"mapped",[1587,1585]],[[64783,64783],"mapped",[1589,1585]],[[64784,64784],"mapped",[1590,1585]],[[64785,64785],"mapped",[1591,1609]],[[64786,64786],"mapped",[1591,1610]],[[64787,64787],"mapped",[1593,1609]],[[64788,64788],"mapped",[1593,1610]],[[64789,64789],"mapped",[1594,1609]],[[64790,64790],"mapped",[1594,1610]],[[64791,64791],"mapped",[1587,1609]],[[64792,64792],"mapped",[1587,1610]],[[64793,64793],"mapped",[1588,1609]],[[64794,64794],"mapped",[1588,1610]],[[64795,64795],"mapped",[1581,1609]],[[64796,64796],"mapped",[1581,1610]],[[64797,64797],"mapped",[1580,1609]],[[64798,64798],"mapped",[1580,1610]],[[64799,64799],"mapped",[1582,1609]],[[64800,64800],"mapped",[1582,1610]],[[64801,64801],"mapped",[1589,1609]],[[64802,64802],"mapped",[1589,1610]],[[64803,64803],"mapped",[1590,1609]],[[64804,64804],"mapped",[1590,1610]],[[64805,64805],"mapped",[1588,1580]],[[64806,64806],"mapped",[1588,1581]],[[64807,64807],"mapped",[1588,1582]],[[64808,64808],"mapped",[1588,1605]],[[64809,64809],"mapped",[1588,1585]],[[64810,64810],"mapped",[1587,1585]],[[64811,64811],"mapped",[1589,1585]],[[64812,64812],"mapped",[1590,1585]],[[64813,64813],"mapped",[1588,1580]],[[64814,64814],"mapped",[1588,1581]],[[64815,64815],"mapped",[1588,1582]],[[64816,64816],"mapped",[1588,1605]],[[64817,64817],"mapped",[1587,1607]],[[64818,64818],"mapped",[1588,1607]],[[64819,64819],"mapped",[1591,1605]],[[64820,64820],"mapped",[1587,1580]],[[64821,64821],"mapped",[1587,1581]],[[64822,64822],"mapped",[1587,1582]],[[64823,64823],"mapped",[1588,1580]],[[64824,64824],"mapped",[1588,1581]],[[64825,64825],"mapped",[1588,1582]],[[64826,64826],"mapped",[1591,1605]],[[64827,64827],"mapped",[1592,1605]],[[64828,64829],"mapped",[1575,1611]],[[64830,64831],"valid",[],"NV8"],[[64832,64847],"disallowed"],[[64848,64848],"mapped",[1578,1580,1605]],[[64849,64850],"mapped",[1578,1581,1580]],[[64851,64851],"mapped",[1578,1581,1605]],[[64852,64852],"mapped",[1578,1582,1605]],[[64853,64853],"mapped",[1578,1605,1580]],[[64854,64854],"mapped",[1578,1605,1581]],[[64855,64855],"mapped",[1578,1605,1582]],[[64856,64857],"mapped",[1580,1605,1581]],[[64858,64858],"mapped",[1581,1605,1610]],[[64859,64859],"mapped",[1581,1605,1609]],[[64860,64860],"mapped",[1587,1581,1580]],[[64861,64861],"mapped",[1587,1580,1581]],[[64862,64862],"mapped",[1587,1580,1609]],[[64863,64864],"mapped",[1587,1605,1581]],[[64865,64865],"mapped",[1587,1605,1580]],[[64866,64867],"mapped",[1587,1605,1605]],[[64868,64869],"mapped",[1589,1581,1581]],[[64870,64870],"mapped",[1589,1605,1605]],[[64871,64872],"mapped",[1588,1581,1605]],[[64873,64873],"mapped",[1588,1580,1610]],[[64874,64875],"mapped",[1588,1605,1582]],[[64876,64877],"mapped",[1588,1605,1605]],[[64878,64878],"mapped",[1590,1581,1609]],[[64879,64880],"mapped",[1590,1582,1605]],[[64881,64882],"mapped",[1591,1605,1581]],[[64883,64883],"mapped",[1591,1605,1605]],[[64884,64884],"mapped",[1591,1605,1610]],[[64885,64885],"mapped",[1593,1580,1605]],[[64886,64887],"mapped",[1593,1605,1605]],[[64888,64888],"mapped",[1593,1605,1609]],[[64889,64889],"mapped",[1594,1605,1605]],[[64890,64890],"mapped",[1594,1605,1610]],[[64891,64891],"mapped",[1594,1605,1609]],[[64892,64893],"mapped",[1601,1582,1605]],[[64894,64894],"mapped",[1602,1605,1581]],[[64895,64895],"mapped",[1602,1605,1605]],[[64896,64896],"mapped",[1604,1581,1605]],[[64897,64897],"mapped",[1604,1581,1610]],[[64898,64898],"mapped",[1604,1581,1609]],[[64899,64900],"mapped",[1604,1580,1580]],[[64901,64902],"mapped",[1604,1582,1605]],[[64903,64904],"mapped",[1604,1605,1581]],[[64905,64905],"mapped",[1605,1581,1580]],[[64906,64906],"mapped",[1605,1581,1605]],[[64907,64907],"mapped",[1605,1581,1610]],[[64908,64908],"mapped",[1605,1580,1581]],[[64909,64909],"mapped",[1605,1580,1605]],[[64910,64910],"mapped",[1605,1582,1580]],[[64911,64911],"mapped",[1605,1582,1605]],[[64912,64913],"disallowed"],[[64914,64914],"mapped",[1605,1580,1582]],[[64915,64915],"mapped",[1607,1605,1580]],[[64916,64916],"mapped",[1607,1605,1605]],[[64917,64917],"mapped",[1606,1581,1605]],[[64918,64918],"mapped",[1606,1581,1609]],[[64919,64920],"mapped",[1606,1580,1605]],[[64921,64921],"mapped",[1606,1580,1609]],[[64922,64922],"mapped",[1606,1605,1610]],[[64923,64923],"mapped",[1606,1605,1609]],[[64924,64925],"mapped",[1610,1605,1605]],[[64926,64926],"mapped",[1576,1582,1610]],[[64927,64927],"mapped",[1578,1580,1610]],[[64928,64928],"mapped",[1578,1580,1609]],[[64929,64929],"mapped",[1578,1582,1610]],[[64930,64930],"mapped",[1578,1582,1609]],[[64931,64931],"mapped",[1578,1605,1610]],[[64932,64932],"mapped",[1578,1605,1609]],[[64933,64933],"mapped",[1580,1605,1610]],[[64934,64934],"mapped",[1580,1581,1609]],[[64935,64935],"mapped",[1580,1605,1609]],[[64936,64936],"mapped",[1587,1582,1609]],[[64937,64937],"mapped",[1589,1581,1610]],[[64938,64938],"mapped",[1588,1581,1610]],[[64939,64939],"mapped",[1590,1581,1610]],[[64940,64940],"mapped",[1604,1580,1610]],[[64941,64941],"mapped",[1604,1605,1610]],[[64942,64942],"mapped",[1610,1581,1610]],[[64943,64943],"mapped",[1610,1580,1610]],[[64944,64944],"mapped",[1610,1605,1610]],[[64945,64945],"mapped",[1605,1605,1610]],[[64946,64946],"mapped",[1602,1605,1610]],[[64947,64947],"mapped",[1606,1581,1610]],[[64948,64948],"mapped",[1602,1605,1581]],[[64949,64949],"mapped",[1604,1581,1605]],[[64950,64950],"mapped",[1593,1605,1610]],[[64951,64951],"mapped",[1603,1605,1610]],[[64952,64952],"mapped",[1606,1580,1581]],[[64953,64953],"mapped",[1605,1582,1610]],[[64954,64954],"mapped",[1604,1580,1605]],[[64955,64955],"mapped",[1603,1605,1605]],[[64956,64956],"mapped",[1604,1580,1605]],[[64957,64957],"mapped",[1606,1580,1581]],[[64958,64958],"mapped",[1580,1581,1610]],[[64959,64959],"mapped",[1581,1580,1610]],[[64960,64960],"mapped",[1605,1580,1610]],[[64961,64961],"mapped",[1601,1605,1610]],[[64962,64962],"mapped",[1576,1581,1610]],[[64963,64963],"mapped",[1603,1605,1605]],[[64964,64964],"mapped",[1593,1580,1605]],[[64965,64965],"mapped",[1589,1605,1605]],[[64966,64966],"mapped",[1587,1582,1610]],[[64967,64967],"mapped",[1606,1580,1610]],[[64968,64975],"disallowed"],[[64976,65007],"disallowed"],[[65008,65008],"mapped",[1589,1604,1746]],[[65009,65009],"mapped",[1602,1604,1746]],[[65010,65010],"mapped",[1575,1604,1604,1607]],[[65011,65011],"mapped",[1575,1603,1576,1585]],[[65012,65012],"mapped",[1605,1581,1605,1583]],[[65013,65013],"mapped",[1589,1604,1593,1605]],[[65014,65014],"mapped",[1585,1587,1608,1604]],[[65015,65015],"mapped",[1593,1604,1610,1607]],[[65016,65016],"mapped",[1608,1587,1604,1605]],[[65017,65017],"mapped",[1589,1604,1609]],[[65018,65018],"disallowed_STD3_mapped",[1589,1604,1609,32,1575,1604,1604,1607,32,1593,1604,1610,1607,32,1608,1587,1604,1605]],[[65019,65019],"disallowed_STD3_mapped",[1580,1604,32,1580,1604,1575,1604,1607]],[[65020,65020],"mapped",[1585,1740,1575,1604]],[[65021,65021],"valid",[],"NV8"],[[65022,65023],"disallowed"],[[65024,65039],"ignored"],[[65040,65040],"disallowed_STD3_mapped",[44]],[[65041,65041],"mapped",[12289]],[[65042,65042],"disallowed"],[[65043,65043],"disallowed_STD3_mapped",[58]],[[65044,65044],"disallowed_STD3_mapped",[59]],[[65045,65045],"disallowed_STD3_mapped",[33]],[[65046,65046],"disallowed_STD3_mapped",[63]],[[65047,65047],"mapped",[12310]],[[65048,65048],"mapped",[12311]],[[65049,65049],"disallowed"],[[65050,65055],"disallowed"],[[65056,65059],"valid"],[[65060,65062],"valid"],[[65063,65069],"valid"],[[65070,65071],"valid"],[[65072,65072],"disallowed"],[[65073,65073],"mapped",[8212]],[[65074,65074],"mapped",[8211]],[[65075,65076],"disallowed_STD3_mapped",[95]],[[65077,65077],"disallowed_STD3_mapped",[40]],[[65078,65078],"disallowed_STD3_mapped",[41]],[[65079,65079],"disallowed_STD3_mapped",[123]],[[65080,65080],"disallowed_STD3_mapped",[125]],[[65081,65081],"mapped",[12308]],[[65082,65082],"mapped",[12309]],[[65083,65083],"mapped",[12304]],[[65084,65084],"mapped",[12305]],[[65085,65085],"mapped",[12298]],[[65086,65086],"mapped",[12299]],[[65087,65087],"mapped",[12296]],[[65088,65088],"mapped",[12297]],[[65089,65089],"mapped",[12300]],[[65090,65090],"mapped",[12301]],[[65091,65091],"mapped",[12302]],[[65092,65092],"mapped",[12303]],[[65093,65094],"valid",[],"NV8"],[[65095,65095],"disallowed_STD3_mapped",[91]],[[65096,65096],"disallowed_STD3_mapped",[93]],[[65097,65100],"disallowed_STD3_mapped",[32,773]],[[65101,65103],"disallowed_STD3_mapped",[95]],[[65104,65104],"disallowed_STD3_mapped",[44]],[[65105,65105],"mapped",[12289]],[[65106,65106],"disallowed"],[[65107,65107],"disallowed"],[[65108,65108],"disallowed_STD3_mapped",[59]],[[65109,65109],"disallowed_STD3_mapped",[58]],[[65110,65110],"disallowed_STD3_mapped",[63]],[[65111,65111],"disallowed_STD3_mapped",[33]],[[65112,65112],"mapped",[8212]],[[65113,65113],"disallowed_STD3_mapped",[40]],[[65114,65114],"disallowed_STD3_mapped",[41]],[[65115,65115],"disallowed_STD3_mapped",[123]],[[65116,65116],"disallowed_STD3_mapped",[125]],[[65117,65117],"mapped",[12308]],[[65118,65118],"mapped",[12309]],[[65119,65119],"disallowed_STD3_mapped",[35]],[[65120,65120],"disallowed_STD3_mapped",[38]],[[65121,65121],"disallowed_STD3_mapped",[42]],[[65122,65122],"disallowed_STD3_mapped",[43]],[[65123,65123],"mapped",[45]],[[65124,65124],"disallowed_STD3_mapped",[60]],[[65125,65125],"disallowed_STD3_mapped",[62]],[[65126,65126],"disallowed_STD3_mapped",[61]],[[65127,65127],"disallowed"],[[65128,65128],"disallowed_STD3_mapped",[92]],[[65129,65129],"disallowed_STD3_mapped",[36]],[[65130,65130],"disallowed_STD3_mapped",[37]],[[65131,65131],"disallowed_STD3_mapped",[64]],[[65132,65135],"disallowed"],[[65136,65136],"disallowed_STD3_mapped",[32,1611]],[[65137,65137],"mapped",[1600,1611]],[[65138,65138],"disallowed_STD3_mapped",[32,1612]],[[65139,65139],"valid"],[[65140,65140],"disallowed_STD3_mapped",[32,1613]],[[65141,65141],"disallowed"],[[65142,65142],"disallowed_STD3_mapped",[32,1614]],[[65143,65143],"mapped",[1600,1614]],[[65144,65144],"disallowed_STD3_mapped",[32,1615]],[[65145,65145],"mapped",[1600,1615]],[[65146,65146],"disallowed_STD3_mapped",[32,1616]],[[65147,65147],"mapped",[1600,1616]],[[65148,65148],"disallowed_STD3_mapped",[32,1617]],[[65149,65149],"mapped",[1600,1617]],[[65150,65150],"disallowed_STD3_mapped",[32,1618]],[[65151,65151],"mapped",[1600,1618]],[[65152,65152],"mapped",[1569]],[[65153,65154],"mapped",[1570]],[[65155,65156],"mapped",[1571]],[[65157,65158],"mapped",[1572]],[[65159,65160],"mapped",[1573]],[[65161,65164],"mapped",[1574]],[[65165,65166],"mapped",[1575]],[[65167,65170],"mapped",[1576]],[[65171,65172],"mapped",[1577]],[[65173,65176],"mapped",[1578]],[[65177,65180],"mapped",[1579]],[[65181,65184],"mapped",[1580]],[[65185,65188],"mapped",[1581]],[[65189,65192],"mapped",[1582]],[[65193,65194],"mapped",[1583]],[[65195,65196],"mapped",[1584]],[[65197,65198],"mapped",[1585]],[[65199,65200],"mapped",[1586]],[[65201,65204],"mapped",[1587]],[[65205,65208],"mapped",[1588]],[[65209,65212],"mapped",[1589]],[[65213,65216],"mapped",[1590]],[[65217,65220],"mapped",[1591]],[[65221,65224],"mapped",[1592]],[[65225,65228],"mapped",[1593]],[[65229,65232],"mapped",[1594]],[[65233,65236],"mapped",[1601]],[[65237,65240],"mapped",[1602]],[[65241,65244],"mapped",[1603]],[[65245,65248],"mapped",[1604]],[[65249,65252],"mapped",[1605]],[[65253,65256],"mapped",[1606]],[[65257,65260],"mapped",[1607]],[[65261,65262],"mapped",[1608]],[[65263,65264],"mapped",[1609]],[[65265,65268],"mapped",[1610]],[[65269,65270],"mapped",[1604,1570]],[[65271,65272],"mapped",[1604,1571]],[[65273,65274],"mapped",[1604,1573]],[[65275,65276],"mapped",[1604,1575]],[[65277,65278],"disallowed"],[[65279,65279],"ignored"],[[65280,65280],"disallowed"],[[65281,65281],"disallowed_STD3_mapped",[33]],[[65282,65282],"disallowed_STD3_mapped",[34]],[[65283,65283],"disallowed_STD3_mapped",[35]],[[65284,65284],"disallowed_STD3_mapped",[36]],[[65285,65285],"disallowed_STD3_mapped",[37]],[[65286,65286],"disallowed_STD3_mapped",[38]],[[65287,65287],"disallowed_STD3_mapped",[39]],[[65288,65288],"disallowed_STD3_mapped",[40]],[[65289,65289],"disallowed_STD3_mapped",[41]],[[65290,65290],"disallowed_STD3_mapped",[42]],[[65291,65291],"disallowed_STD3_mapped",[43]],[[65292,65292],"disallowed_STD3_mapped",[44]],[[65293,65293],"mapped",[45]],[[65294,65294],"mapped",[46]],[[65295,65295],"disallowed_STD3_mapped",[47]],[[65296,65296],"mapped",[48]],[[65297,65297],"mapped",[49]],[[65298,65298],"mapped",[50]],[[65299,65299],"mapped",[51]],[[65300,65300],"mapped",[52]],[[65301,65301],"mapped",[53]],[[65302,65302],"mapped",[54]],[[65303,65303],"mapped",[55]],[[65304,65304],"mapped",[56]],[[65305,65305],"mapped",[57]],[[65306,65306],"disallowed_STD3_mapped",[58]],[[65307,65307],"disallowed_STD3_mapped",[59]],[[65308,65308],"disallowed_STD3_mapped",[60]],[[65309,65309],"disallowed_STD3_mapped",[61]],[[65310,65310],"disallowed_STD3_mapped",[62]],[[65311,65311],"disallowed_STD3_mapped",[63]],[[65312,65312],"disallowed_STD3_mapped",[64]],[[65313,65313],"mapped",[97]],[[65314,65314],"mapped",[98]],[[65315,65315],"mapped",[99]],[[65316,65316],"mapped",[100]],[[65317,65317],"mapped",[101]],[[65318,65318],"mapped",[102]],[[65319,65319],"mapped",[103]],[[65320,65320],"mapped",[104]],[[65321,65321],"mapped",[105]],[[65322,65322],"mapped",[106]],[[65323,65323],"mapped",[107]],[[65324,65324],"mapped",[108]],[[65325,65325],"mapped",[109]],[[65326,65326],"mapped",[110]],[[65327,65327],"mapped",[111]],[[65328,65328],"mapped",[112]],[[65329,65329],"mapped",[113]],[[65330,65330],"mapped",[114]],[[65331,65331],"mapped",[115]],[[65332,65332],"mapped",[116]],[[65333,65333],"mapped",[117]],[[65334,65334],"mapped",[118]],[[65335,65335],"mapped",[119]],[[65336,65336],"mapped",[120]],[[65337,65337],"mapped",[121]],[[65338,65338],"mapped",[122]],[[65339,65339],"disallowed_STD3_mapped",[91]],[[65340,65340],"disallowed_STD3_mapped",[92]],[[65341,65341],"disallowed_STD3_mapped",[93]],[[65342,65342],"disallowed_STD3_mapped",[94]],[[65343,65343],"disallowed_STD3_mapped",[95]],[[65344,65344],"disallowed_STD3_mapped",[96]],[[65345,65345],"mapped",[97]],[[65346,65346],"mapped",[98]],[[65347,65347],"mapped",[99]],[[65348,65348],"mapped",[100]],[[65349,65349],"mapped",[101]],[[65350,65350],"mapped",[102]],[[65351,65351],"mapped",[103]],[[65352,65352],"mapped",[104]],[[65353,65353],"mapped",[105]],[[65354,65354],"mapped",[106]],[[65355,65355],"mapped",[107]],[[65356,65356],"mapped",[108]],[[65357,65357],"mapped",[109]],[[65358,65358],"mapped",[110]],[[65359,65359],"mapped",[111]],[[65360,65360],"mapped",[112]],[[65361,65361],"mapped",[113]],[[65362,65362],"mapped",[114]],[[65363,65363],"mapped",[115]],[[65364,65364],"mapped",[116]],[[65365,65365],"mapped",[117]],[[65366,65366],"mapped",[118]],[[65367,65367],"mapped",[119]],[[65368,65368],"mapped",[120]],[[65369,65369],"mapped",[121]],[[65370,65370],"mapped",[122]],[[65371,65371],"disallowed_STD3_mapped",[123]],[[65372,65372],"disallowed_STD3_mapped",[124]],[[65373,65373],"disallowed_STD3_mapped",[125]],[[65374,65374],"disallowed_STD3_mapped",[126]],[[65375,65375],"mapped",[10629]],[[65376,65376],"mapped",[10630]],[[65377,65377],"mapped",[46]],[[65378,65378],"mapped",[12300]],[[65379,65379],"mapped",[12301]],[[65380,65380],"mapped",[12289]],[[65381,65381],"mapped",[12539]],[[65382,65382],"mapped",[12530]],[[65383,65383],"mapped",[12449]],[[65384,65384],"mapped",[12451]],[[65385,65385],"mapped",[12453]],[[65386,65386],"mapped",[12455]],[[65387,65387],"mapped",[12457]],[[65388,65388],"mapped",[12515]],[[65389,65389],"mapped",[12517]],[[65390,65390],"mapped",[12519]],[[65391,65391],"mapped",[12483]],[[65392,65392],"mapped",[12540]],[[65393,65393],"mapped",[12450]],[[65394,65394],"mapped",[12452]],[[65395,65395],"mapped",[12454]],[[65396,65396],"mapped",[12456]],[[65397,65397],"mapped",[12458]],[[65398,65398],"mapped",[12459]],[[65399,65399],"mapped",[12461]],[[65400,65400],"mapped",[12463]],[[65401,65401],"mapped",[12465]],[[65402,65402],"mapped",[12467]],[[65403,65403],"mapped",[12469]],[[65404,65404],"mapped",[12471]],[[65405,65405],"mapped",[12473]],[[65406,65406],"mapped",[12475]],[[65407,65407],"mapped",[12477]],[[65408,65408],"mapped",[12479]],[[65409,65409],"mapped",[12481]],[[65410,65410],"mapped",[12484]],[[65411,65411],"mapped",[12486]],[[65412,65412],"mapped",[12488]],[[65413,65413],"mapped",[12490]],[[65414,65414],"mapped",[12491]],[[65415,65415],"mapped",[12492]],[[65416,65416],"mapped",[12493]],[[65417,65417],"mapped",[12494]],[[65418,65418],"mapped",[12495]],[[65419,65419],"mapped",[12498]],[[65420,65420],"mapped",[12501]],[[65421,65421],"mapped",[12504]],[[65422,65422],"mapped",[12507]],[[65423,65423],"mapped",[12510]],[[65424,65424],"mapped",[12511]],[[65425,65425],"mapped",[12512]],[[65426,65426],"mapped",[12513]],[[65427,65427],"mapped",[12514]],[[65428,65428],"mapped",[12516]],[[65429,65429],"mapped",[12518]],[[65430,65430],"mapped",[12520]],[[65431,65431],"mapped",[12521]],[[65432,65432],"mapped",[12522]],[[65433,65433],"mapped",[12523]],[[65434,65434],"mapped",[12524]],[[65435,65435],"mapped",[12525]],[[65436,65436],"mapped",[12527]],[[65437,65437],"mapped",[12531]],[[65438,65438],"mapped",[12441]],[[65439,65439],"mapped",[12442]],[[65440,65440],"disallowed"],[[65441,65441],"mapped",[4352]],[[65442,65442],"mapped",[4353]],[[65443,65443],"mapped",[4522]],[[65444,65444],"mapped",[4354]],[[65445,65445],"mapped",[4524]],[[65446,65446],"mapped",[4525]],[[65447,65447],"mapped",[4355]],[[65448,65448],"mapped",[4356]],[[65449,65449],"mapped",[4357]],[[65450,65450],"mapped",[4528]],[[65451,65451],"mapped",[4529]],[[65452,65452],"mapped",[4530]],[[65453,65453],"mapped",[4531]],[[65454,65454],"mapped",[4532]],[[65455,65455],"mapped",[4533]],[[65456,65456],"mapped",[4378]],[[65457,65457],"mapped",[4358]],[[65458,65458],"mapped",[4359]],[[65459,65459],"mapped",[4360]],[[65460,65460],"mapped",[4385]],[[65461,65461],"mapped",[4361]],[[65462,65462],"mapped",[4362]],[[65463,65463],"mapped",[4363]],[[65464,65464],"mapped",[4364]],[[65465,65465],"mapped",[4365]],[[65466,65466],"mapped",[4366]],[[65467,65467],"mapped",[4367]],[[65468,65468],"mapped",[4368]],[[65469,65469],"mapped",[4369]],[[65470,65470],"mapped",[4370]],[[65471,65473],"disallowed"],[[65474,65474],"mapped",[4449]],[[65475,65475],"mapped",[4450]],[[65476,65476],"mapped",[4451]],[[65477,65477],"mapped",[4452]],[[65478,65478],"mapped",[4453]],[[65479,65479],"mapped",[4454]],[[65480,65481],"disallowed"],[[65482,65482],"mapped",[4455]],[[65483,65483],"mapped",[4456]],[[65484,65484],"mapped",[4457]],[[65485,65485],"mapped",[4458]],[[65486,65486],"mapped",[4459]],[[65487,65487],"mapped",[4460]],[[65488,65489],"disallowed"],[[65490,65490],"mapped",[4461]],[[65491,65491],"mapped",[4462]],[[65492,65492],"mapped",[4463]],[[65493,65493],"mapped",[4464]],[[65494,65494],"mapped",[4465]],[[65495,65495],"mapped",[4466]],[[65496,65497],"disallowed"],[[65498,65498],"mapped",[4467]],[[65499,65499],"mapped",[4468]],[[65500,65500],"mapped",[4469]],[[65501,65503],"disallowed"],[[65504,65504],"mapped",[162]],[[65505,65505],"mapped",[163]],[[65506,65506],"mapped",[172]],[[65507,65507],"disallowed_STD3_mapped",[32,772]],[[65508,65508],"mapped",[166]],[[65509,65509],"mapped",[165]],[[65510,65510],"mapped",[8361]],[[65511,65511],"disallowed"],[[65512,65512],"mapped",[9474]],[[65513,65513],"mapped",[8592]],[[65514,65514],"mapped",[8593]],[[65515,65515],"mapped",[8594]],[[65516,65516],"mapped",[8595]],[[65517,65517],"mapped",[9632]],[[65518,65518],"mapped",[9675]],[[65519,65528],"disallowed"],[[65529,65531],"disallowed"],[[65532,65532],"disallowed"],[[65533,65533],"disallowed"],[[65534,65535],"disallowed"],[[65536,65547],"valid"],[[65548,65548],"disallowed"],[[65549,65574],"valid"],[[65575,65575],"disallowed"],[[65576,65594],"valid"],[[65595,65595],"disallowed"],[[65596,65597],"valid"],[[65598,65598],"disallowed"],[[65599,65613],"valid"],[[65614,65615],"disallowed"],[[65616,65629],"valid"],[[65630,65663],"disallowed"],[[65664,65786],"valid"],[[65787,65791],"disallowed"],[[65792,65794],"valid",[],"NV8"],[[65795,65798],"disallowed"],[[65799,65843],"valid",[],"NV8"],[[65844,65846],"disallowed"],[[65847,65855],"valid",[],"NV8"],[[65856,65930],"valid",[],"NV8"],[[65931,65932],"valid",[],"NV8"],[[65933,65935],"disallowed"],[[65936,65947],"valid",[],"NV8"],[[65948,65951],"disallowed"],[[65952,65952],"valid",[],"NV8"],[[65953,65999],"disallowed"],[[66000,66044],"valid",[],"NV8"],[[66045,66045],"valid"],[[66046,66175],"disallowed"],[[66176,66204],"valid"],[[66205,66207],"disallowed"],[[66208,66256],"valid"],[[66257,66271],"disallowed"],[[66272,66272],"valid"],[[66273,66299],"valid",[],"NV8"],[[66300,66303],"disallowed"],[[66304,66334],"valid"],[[66335,66335],"valid"],[[66336,66339],"valid",[],"NV8"],[[66340,66351],"disallowed"],[[66352,66368],"valid"],[[66369,66369],"valid",[],"NV8"],[[66370,66377],"valid"],[[66378,66378],"valid",[],"NV8"],[[66379,66383],"disallowed"],[[66384,66426],"valid"],[[66427,66431],"disallowed"],[[66432,66461],"valid"],[[66462,66462],"disallowed"],[[66463,66463],"valid",[],"NV8"],[[66464,66499],"valid"],[[66500,66503],"disallowed"],[[66504,66511],"valid"],[[66512,66517],"valid",[],"NV8"],[[66518,66559],"disallowed"],[[66560,66560],"mapped",[66600]],[[66561,66561],"mapped",[66601]],[[66562,66562],"mapped",[66602]],[[66563,66563],"mapped",[66603]],[[66564,66564],"mapped",[66604]],[[66565,66565],"mapped",[66605]],[[66566,66566],"mapped",[66606]],[[66567,66567],"mapped",[66607]],[[66568,66568],"mapped",[66608]],[[66569,66569],"mapped",[66609]],[[66570,66570],"mapped",[66610]],[[66571,66571],"mapped",[66611]],[[66572,66572],"mapped",[66612]],[[66573,66573],"mapped",[66613]],[[66574,66574],"mapped",[66614]],[[66575,66575],"mapped",[66615]],[[66576,66576],"mapped",[66616]],[[66577,66577],"mapped",[66617]],[[66578,66578],"mapped",[66618]],[[66579,66579],"mapped",[66619]],[[66580,66580],"mapped",[66620]],[[66581,66581],"mapped",[66621]],[[66582,66582],"mapped",[66622]],[[66583,66583],"mapped",[66623]],[[66584,66584],"mapped",[66624]],[[66585,66585],"mapped",[66625]],[[66586,66586],"mapped",[66626]],[[66587,66587],"mapped",[66627]],[[66588,66588],"mapped",[66628]],[[66589,66589],"mapped",[66629]],[[66590,66590],"mapped",[66630]],[[66591,66591],"mapped",[66631]],[[66592,66592],"mapped",[66632]],[[66593,66593],"mapped",[66633]],[[66594,66594],"mapped",[66634]],[[66595,66595],"mapped",[66635]],[[66596,66596],"mapped",[66636]],[[66597,66597],"mapped",[66637]],[[66598,66598],"mapped",[66638]],[[66599,66599],"mapped",[66639]],[[66600,66637],"valid"],[[66638,66717],"valid"],[[66718,66719],"disallowed"],[[66720,66729],"valid"],[[66730,66815],"disallowed"],[[66816,66855],"valid"],[[66856,66863],"disallowed"],[[66864,66915],"valid"],[[66916,66926],"disallowed"],[[66927,66927],"valid",[],"NV8"],[[66928,67071],"disallowed"],[[67072,67382],"valid"],[[67383,67391],"disallowed"],[[67392,67413],"valid"],[[67414,67423],"disallowed"],[[67424,67431],"valid"],[[67432,67583],"disallowed"],[[67584,67589],"valid"],[[67590,67591],"disallowed"],[[67592,67592],"valid"],[[67593,67593],"disallowed"],[[67594,67637],"valid"],[[67638,67638],"disallowed"],[[67639,67640],"valid"],[[67641,67643],"disallowed"],[[67644,67644],"valid"],[[67645,67646],"disallowed"],[[67647,67647],"valid"],[[67648,67669],"valid"],[[67670,67670],"disallowed"],[[67671,67679],"valid",[],"NV8"],[[67680,67702],"valid"],[[67703,67711],"valid",[],"NV8"],[[67712,67742],"valid"],[[67743,67750],"disallowed"],[[67751,67759],"valid",[],"NV8"],[[67760,67807],"disallowed"],[[67808,67826],"valid"],[[67827,67827],"disallowed"],[[67828,67829],"valid"],[[67830,67834],"disallowed"],[[67835,67839],"valid",[],"NV8"],[[67840,67861],"valid"],[[67862,67865],"valid",[],"NV8"],[[67866,67867],"valid",[],"NV8"],[[67868,67870],"disallowed"],[[67871,67871],"valid",[],"NV8"],[[67872,67897],"valid"],[[67898,67902],"disallowed"],[[67903,67903],"valid",[],"NV8"],[[67904,67967],"disallowed"],[[67968,68023],"valid"],[[68024,68027],"disallowed"],[[68028,68029],"valid",[],"NV8"],[[68030,68031],"valid"],[[68032,68047],"valid",[],"NV8"],[[68048,68049],"disallowed"],[[68050,68095],"valid",[],"NV8"],[[68096,68099],"valid"],[[68100,68100],"disallowed"],[[68101,68102],"valid"],[[68103,68107],"disallowed"],[[68108,68115],"valid"],[[68116,68116],"disallowed"],[[68117,68119],"valid"],[[68120,68120],"disallowed"],[[68121,68147],"valid"],[[68148,68151],"disallowed"],[[68152,68154],"valid"],[[68155,68158],"disallowed"],[[68159,68159],"valid"],[[68160,68167],"valid",[],"NV8"],[[68168,68175],"disallowed"],[[68176,68184],"valid",[],"NV8"],[[68185,68191],"disallowed"],[[68192,68220],"valid"],[[68221,68223],"valid",[],"NV8"],[[68224,68252],"valid"],[[68253,68255],"valid",[],"NV8"],[[68256,68287],"disallowed"],[[68288,68295],"valid"],[[68296,68296],"valid",[],"NV8"],[[68297,68326],"valid"],[[68327,68330],"disallowed"],[[68331,68342],"valid",[],"NV8"],[[68343,68351],"disallowed"],[[68352,68405],"valid"],[[68406,68408],"disallowed"],[[68409,68415],"valid",[],"NV8"],[[68416,68437],"valid"],[[68438,68439],"disallowed"],[[68440,68447],"valid",[],"NV8"],[[68448,68466],"valid"],[[68467,68471],"disallowed"],[[68472,68479],"valid",[],"NV8"],[[68480,68497],"valid"],[[68498,68504],"disallowed"],[[68505,68508],"valid",[],"NV8"],[[68509,68520],"disallowed"],[[68521,68527],"valid",[],"NV8"],[[68528,68607],"disallowed"],[[68608,68680],"valid"],[[68681,68735],"disallowed"],[[68736,68736],"mapped",[68800]],[[68737,68737],"mapped",[68801]],[[68738,68738],"mapped",[68802]],[[68739,68739],"mapped",[68803]],[[68740,68740],"mapped",[68804]],[[68741,68741],"mapped",[68805]],[[68742,68742],"mapped",[68806]],[[68743,68743],"mapped",[68807]],[[68744,68744],"mapped",[68808]],[[68745,68745],"mapped",[68809]],[[68746,68746],"mapped",[68810]],[[68747,68747],"mapped",[68811]],[[68748,68748],"mapped",[68812]],[[68749,68749],"mapped",[68813]],[[68750,68750],"mapped",[68814]],[[68751,68751],"mapped",[68815]],[[68752,68752],"mapped",[68816]],[[68753,68753],"mapped",[68817]],[[68754,68754],"mapped",[68818]],[[68755,68755],"mapped",[68819]],[[68756,68756],"mapped",[68820]],[[68757,68757],"mapped",[68821]],[[68758,68758],"mapped",[68822]],[[68759,68759],"mapped",[68823]],[[68760,68760],"mapped",[68824]],[[68761,68761],"mapped",[68825]],[[68762,68762],"mapped",[68826]],[[68763,68763],"mapped",[68827]],[[68764,68764],"mapped",[68828]],[[68765,68765],"mapped",[68829]],[[68766,68766],"mapped",[68830]],[[68767,68767],"mapped",[68831]],[[68768,68768],"mapped",[68832]],[[68769,68769],"mapped",[68833]],[[68770,68770],"mapped",[68834]],[[68771,68771],"mapped",[68835]],[[68772,68772],"mapped",[68836]],[[68773,68773],"mapped",[68837]],[[68774,68774],"mapped",[68838]],[[68775,68775],"mapped",[68839]],[[68776,68776],"mapped",[68840]],[[68777,68777],"mapped",[68841]],[[68778,68778],"mapped",[68842]],[[68779,68779],"mapped",[68843]],[[68780,68780],"mapped",[68844]],[[68781,68781],"mapped",[68845]],[[68782,68782],"mapped",[68846]],[[68783,68783],"mapped",[68847]],[[68784,68784],"mapped",[68848]],[[68785,68785],"mapped",[68849]],[[68786,68786],"mapped",[68850]],[[68787,68799],"disallowed"],[[68800,68850],"valid"],[[68851,68857],"disallowed"],[[68858,68863],"valid",[],"NV8"],[[68864,69215],"disallowed"],[[69216,69246],"valid",[],"NV8"],[[69247,69631],"disallowed"],[[69632,69702],"valid"],[[69703,69709],"valid",[],"NV8"],[[69710,69713],"disallowed"],[[69714,69733],"valid",[],"NV8"],[[69734,69743],"valid"],[[69744,69758],"disallowed"],[[69759,69759],"valid"],[[69760,69818],"valid"],[[69819,69820],"valid",[],"NV8"],[[69821,69821],"disallowed"],[[69822,69825],"valid",[],"NV8"],[[69826,69839],"disallowed"],[[69840,69864],"valid"],[[69865,69871],"disallowed"],[[69872,69881],"valid"],[[69882,69887],"disallowed"],[[69888,69940],"valid"],[[69941,69941],"disallowed"],[[69942,69951],"valid"],[[69952,69955],"valid",[],"NV8"],[[69956,69967],"disallowed"],[[69968,70003],"valid"],[[70004,70005],"valid",[],"NV8"],[[70006,70006],"valid"],[[70007,70015],"disallowed"],[[70016,70084],"valid"],[[70085,70088],"valid",[],"NV8"],[[70089,70089],"valid",[],"NV8"],[[70090,70092],"valid"],[[70093,70093],"valid",[],"NV8"],[[70094,70095],"disallowed"],[[70096,70105],"valid"],[[70106,70106],"valid"],[[70107,70107],"valid",[],"NV8"],[[70108,70108],"valid"],[[70109,70111],"valid",[],"NV8"],[[70112,70112],"disallowed"],[[70113,70132],"valid",[],"NV8"],[[70133,70143],"disallowed"],[[70144,70161],"valid"],[[70162,70162],"disallowed"],[[70163,70199],"valid"],[[70200,70205],"valid",[],"NV8"],[[70206,70271],"disallowed"],[[70272,70278],"valid"],[[70279,70279],"disallowed"],[[70280,70280],"valid"],[[70281,70281],"disallowed"],[[70282,70285],"valid"],[[70286,70286],"disallowed"],[[70287,70301],"valid"],[[70302,70302],"disallowed"],[[70303,70312],"valid"],[[70313,70313],"valid",[],"NV8"],[[70314,70319],"disallowed"],[[70320,70378],"valid"],[[70379,70383],"disallowed"],[[70384,70393],"valid"],[[70394,70399],"disallowed"],[[70400,70400],"valid"],[[70401,70403],"valid"],[[70404,70404],"disallowed"],[[70405,70412],"valid"],[[70413,70414],"disallowed"],[[70415,70416],"valid"],[[70417,70418],"disallowed"],[[70419,70440],"valid"],[[70441,70441],"disallowed"],[[70442,70448],"valid"],[[70449,70449],"disallowed"],[[70450,70451],"valid"],[[70452,70452],"disallowed"],[[70453,70457],"valid"],[[70458,70459],"disallowed"],[[70460,70468],"valid"],[[70469,70470],"disallowed"],[[70471,70472],"valid"],[[70473,70474],"disallowed"],[[70475,70477],"valid"],[[70478,70479],"disallowed"],[[70480,70480],"valid"],[[70481,70486],"disallowed"],[[70487,70487],"valid"],[[70488,70492],"disallowed"],[[70493,70499],"valid"],[[70500,70501],"disallowed"],[[70502,70508],"valid"],[[70509,70511],"disallowed"],[[70512,70516],"valid"],[[70517,70783],"disallowed"],[[70784,70853],"valid"],[[70854,70854],"valid",[],"NV8"],[[70855,70855],"valid"],[[70856,70863],"disallowed"],[[70864,70873],"valid"],[[70874,71039],"disallowed"],[[71040,71093],"valid"],[[71094,71095],"disallowed"],[[71096,71104],"valid"],[[71105,71113],"valid",[],"NV8"],[[71114,71127],"valid",[],"NV8"],[[71128,71133],"valid"],[[71134,71167],"disallowed"],[[71168,71232],"valid"],[[71233,71235],"valid",[],"NV8"],[[71236,71236],"valid"],[[71237,71247],"disallowed"],[[71248,71257],"valid"],[[71258,71295],"disallowed"],[[71296,71351],"valid"],[[71352,71359],"disallowed"],[[71360,71369],"valid"],[[71370,71423],"disallowed"],[[71424,71449],"valid"],[[71450,71452],"disallowed"],[[71453,71467],"valid"],[[71468,71471],"disallowed"],[[71472,71481],"valid"],[[71482,71487],"valid",[],"NV8"],[[71488,71839],"disallowed"],[[71840,71840],"mapped",[71872]],[[71841,71841],"mapped",[71873]],[[71842,71842],"mapped",[71874]],[[71843,71843],"mapped",[71875]],[[71844,71844],"mapped",[71876]],[[71845,71845],"mapped",[71877]],[[71846,71846],"mapped",[71878]],[[71847,71847],"mapped",[71879]],[[71848,71848],"mapped",[71880]],[[71849,71849],"mapped",[71881]],[[71850,71850],"mapped",[71882]],[[71851,71851],"mapped",[71883]],[[71852,71852],"mapped",[71884]],[[71853,71853],"mapped",[71885]],[[71854,71854],"mapped",[71886]],[[71855,71855],"mapped",[71887]],[[71856,71856],"mapped",[71888]],[[71857,71857],"mapped",[71889]],[[71858,71858],"mapped",[71890]],[[71859,71859],"mapped",[71891]],[[71860,71860],"mapped",[71892]],[[71861,71861],"mapped",[71893]],[[71862,71862],"mapped",[71894]],[[71863,71863],"mapped",[71895]],[[71864,71864],"mapped",[71896]],[[71865,71865],"mapped",[71897]],[[71866,71866],"mapped",[71898]],[[71867,71867],"mapped",[71899]],[[71868,71868],"mapped",[71900]],[[71869,71869],"mapped",[71901]],[[71870,71870],"mapped",[71902]],[[71871,71871],"mapped",[71903]],[[71872,71913],"valid"],[[71914,71922],"valid",[],"NV8"],[[71923,71934],"disallowed"],[[71935,71935],"valid"],[[71936,72383],"disallowed"],[[72384,72440],"valid"],[[72441,73727],"disallowed"],[[73728,74606],"valid"],[[74607,74648],"valid"],[[74649,74649],"valid"],[[74650,74751],"disallowed"],[[74752,74850],"valid",[],"NV8"],[[74851,74862],"valid",[],"NV8"],[[74863,74863],"disallowed"],[[74864,74867],"valid",[],"NV8"],[[74868,74868],"valid",[],"NV8"],[[74869,74879],"disallowed"],[[74880,75075],"valid"],[[75076,77823],"disallowed"],[[77824,78894],"valid"],[[78895,82943],"disallowed"],[[82944,83526],"valid"],[[83527,92159],"disallowed"],[[92160,92728],"valid"],[[92729,92735],"disallowed"],[[92736,92766],"valid"],[[92767,92767],"disallowed"],[[92768,92777],"valid"],[[92778,92781],"disallowed"],[[92782,92783],"valid",[],"NV8"],[[92784,92879],"disallowed"],[[92880,92909],"valid"],[[92910,92911],"disallowed"],[[92912,92916],"valid"],[[92917,92917],"valid",[],"NV8"],[[92918,92927],"disallowed"],[[92928,92982],"valid"],[[92983,92991],"valid",[],"NV8"],[[92992,92995],"valid"],[[92996,92997],"valid",[],"NV8"],[[92998,93007],"disallowed"],[[93008,93017],"valid"],[[93018,93018],"disallowed"],[[93019,93025],"valid",[],"NV8"],[[93026,93026],"disallowed"],[[93027,93047],"valid"],[[93048,93052],"disallowed"],[[93053,93071],"valid"],[[93072,93951],"disallowed"],[[93952,94020],"valid"],[[94021,94031],"disallowed"],[[94032,94078],"valid"],[[94079,94094],"disallowed"],[[94095,94111],"valid"],[[94112,110591],"disallowed"],[[110592,110593],"valid"],[[110594,113663],"disallowed"],[[113664,113770],"valid"],[[113771,113775],"disallowed"],[[113776,113788],"valid"],[[113789,113791],"disallowed"],[[113792,113800],"valid"],[[113801,113807],"disallowed"],[[113808,113817],"valid"],[[113818,113819],"disallowed"],[[113820,113820],"valid",[],"NV8"],[[113821,113822],"valid"],[[113823,113823],"valid",[],"NV8"],[[113824,113827],"ignored"],[[113828,118783],"disallowed"],[[118784,119029],"valid",[],"NV8"],[[119030,119039],"disallowed"],[[119040,119078],"valid",[],"NV8"],[[119079,119080],"disallowed"],[[119081,119081],"valid",[],"NV8"],[[119082,119133],"valid",[],"NV8"],[[119134,119134],"mapped",[119127,119141]],[[119135,119135],"mapped",[119128,119141]],[[119136,119136],"mapped",[119128,119141,119150]],[[119137,119137],"mapped",[119128,119141,119151]],[[119138,119138],"mapped",[119128,119141,119152]],[[119139,119139],"mapped",[119128,119141,119153]],[[119140,119140],"mapped",[119128,119141,119154]],[[119141,119154],"valid",[],"NV8"],[[119155,119162],"disallowed"],[[119163,119226],"valid",[],"NV8"],[[119227,119227],"mapped",[119225,119141]],[[119228,119228],"mapped",[119226,119141]],[[119229,119229],"mapped",[119225,119141,119150]],[[119230,119230],"mapped",[119226,119141,119150]],[[119231,119231],"mapped",[119225,119141,119151]],[[119232,119232],"mapped",[119226,119141,119151]],[[119233,119261],"valid",[],"NV8"],[[119262,119272],"valid",[],"NV8"],[[119273,119295],"disallowed"],[[119296,119365],"valid",[],"NV8"],[[119366,119551],"disallowed"],[[119552,119638],"valid",[],"NV8"],[[119639,119647],"disallowed"],[[119648,119665],"valid",[],"NV8"],[[119666,119807],"disallowed"],[[119808,119808],"mapped",[97]],[[119809,119809],"mapped",[98]],[[119810,119810],"mapped",[99]],[[119811,119811],"mapped",[100]],[[119812,119812],"mapped",[101]],[[119813,119813],"mapped",[102]],[[119814,119814],"mapped",[103]],[[119815,119815],"mapped",[104]],[[119816,119816],"mapped",[105]],[[119817,119817],"mapped",[106]],[[119818,119818],"mapped",[107]],[[119819,119819],"mapped",[108]],[[119820,119820],"mapped",[109]],[[119821,119821],"mapped",[110]],[[119822,119822],"mapped",[111]],[[119823,119823],"mapped",[112]],[[119824,119824],"mapped",[113]],[[119825,119825],"mapped",[114]],[[119826,119826],"mapped",[115]],[[119827,119827],"mapped",[116]],[[119828,119828],"mapped",[117]],[[119829,119829],"mapped",[118]],[[119830,119830],"mapped",[119]],[[119831,119831],"mapped",[120]],[[119832,119832],"mapped",[121]],[[119833,119833],"mapped",[122]],[[119834,119834],"mapped",[97]],[[119835,119835],"mapped",[98]],[[119836,119836],"mapped",[99]],[[119837,119837],"mapped",[100]],[[119838,119838],"mapped",[101]],[[119839,119839],"mapped",[102]],[[119840,119840],"mapped",[103]],[[119841,119841],"mapped",[104]],[[119842,119842],"mapped",[105]],[[119843,119843],"mapped",[106]],[[119844,119844],"mapped",[107]],[[119845,119845],"mapped",[108]],[[119846,119846],"mapped",[109]],[[119847,119847],"mapped",[110]],[[119848,119848],"mapped",[111]],[[119849,119849],"mapped",[112]],[[119850,119850],"mapped",[113]],[[119851,119851],"mapped",[114]],[[119852,119852],"mapped",[115]],[[119853,119853],"mapped",[116]],[[119854,119854],"mapped",[117]],[[119855,119855],"mapped",[118]],[[119856,119856],"mapped",[119]],[[119857,119857],"mapped",[120]],[[119858,119858],"mapped",[121]],[[119859,119859],"mapped",[122]],[[119860,119860],"mapped",[97]],[[119861,119861],"mapped",[98]],[[119862,119862],"mapped",[99]],[[119863,119863],"mapped",[100]],[[119864,119864],"mapped",[101]],[[119865,119865],"mapped",[102]],[[119866,119866],"mapped",[103]],[[119867,119867],"mapped",[104]],[[119868,119868],"mapped",[105]],[[119869,119869],"mapped",[106]],[[119870,119870],"mapped",[107]],[[119871,119871],"mapped",[108]],[[119872,119872],"mapped",[109]],[[119873,119873],"mapped",[110]],[[119874,119874],"mapped",[111]],[[119875,119875],"mapped",[112]],[[119876,119876],"mapped",[113]],[[119877,119877],"mapped",[114]],[[119878,119878],"mapped",[115]],[[119879,119879],"mapped",[116]],[[119880,119880],"mapped",[117]],[[119881,119881],"mapped",[118]],[[119882,119882],"mapped",[119]],[[119883,119883],"mapped",[120]],[[119884,119884],"mapped",[121]],[[119885,119885],"mapped",[122]],[[119886,119886],"mapped",[97]],[[119887,119887],"mapped",[98]],[[119888,119888],"mapped",[99]],[[119889,119889],"mapped",[100]],[[119890,119890],"mapped",[101]],[[119891,119891],"mapped",[102]],[[119892,119892],"mapped",[103]],[[119893,119893],"disallowed"],[[119894,119894],"mapped",[105]],[[119895,119895],"mapped",[106]],[[119896,119896],"mapped",[107]],[[119897,119897],"mapped",[108]],[[119898,119898],"mapped",[109]],[[119899,119899],"mapped",[110]],[[119900,119900],"mapped",[111]],[[119901,119901],"mapped",[112]],[[119902,119902],"mapped",[113]],[[119903,119903],"mapped",[114]],[[119904,119904],"mapped",[115]],[[119905,119905],"mapped",[116]],[[119906,119906],"mapped",[117]],[[119907,119907],"mapped",[118]],[[119908,119908],"mapped",[119]],[[119909,119909],"mapped",[120]],[[119910,119910],"mapped",[121]],[[119911,119911],"mapped",[122]],[[119912,119912],"mapped",[97]],[[119913,119913],"mapped",[98]],[[119914,119914],"mapped",[99]],[[119915,119915],"mapped",[100]],[[119916,119916],"mapped",[101]],[[119917,119917],"mapped",[102]],[[119918,119918],"mapped",[103]],[[119919,119919],"mapped",[104]],[[119920,119920],"mapped",[105]],[[119921,119921],"mapped",[106]],[[119922,119922],"mapped",[107]],[[119923,119923],"mapped",[108]],[[119924,119924],"mapped",[109]],[[119925,119925],"mapped",[110]],[[119926,119926],"mapped",[111]],[[119927,119927],"mapped",[112]],[[119928,119928],"mapped",[113]],[[119929,119929],"mapped",[114]],[[119930,119930],"mapped",[115]],[[119931,119931],"mapped",[116]],[[119932,119932],"mapped",[117]],[[119933,119933],"mapped",[118]],[[119934,119934],"mapped",[119]],[[119935,119935],"mapped",[120]],[[119936,119936],"mapped",[121]],[[119937,119937],"mapped",[122]],[[119938,119938],"mapped",[97]],[[119939,119939],"mapped",[98]],[[119940,119940],"mapped",[99]],[[119941,119941],"mapped",[100]],[[119942,119942],"mapped",[101]],[[119943,119943],"mapped",[102]],[[119944,119944],"mapped",[103]],[[119945,119945],"mapped",[104]],[[119946,119946],"mapped",[105]],[[119947,119947],"mapped",[106]],[[119948,119948],"mapped",[107]],[[119949,119949],"mapped",[108]],[[119950,119950],"mapped",[109]],[[119951,119951],"mapped",[110]],[[119952,119952],"mapped",[111]],[[119953,119953],"mapped",[112]],[[119954,119954],"mapped",[113]],[[119955,119955],"mapped",[114]],[[119956,119956],"mapped",[115]],[[119957,119957],"mapped",[116]],[[119958,119958],"mapped",[117]],[[119959,119959],"mapped",[118]],[[119960,119960],"mapped",[119]],[[119961,119961],"mapped",[120]],[[119962,119962],"mapped",[121]],[[119963,119963],"mapped",[122]],[[119964,119964],"mapped",[97]],[[119965,119965],"disallowed"],[[119966,119966],"mapped",[99]],[[119967,119967],"mapped",[100]],[[119968,119969],"disallowed"],[[119970,119970],"mapped",[103]],[[119971,119972],"disallowed"],[[119973,119973],"mapped",[106]],[[119974,119974],"mapped",[107]],[[119975,119976],"disallowed"],[[119977,119977],"mapped",[110]],[[119978,119978],"mapped",[111]],[[119979,119979],"mapped",[112]],[[119980,119980],"mapped",[113]],[[119981,119981],"disallowed"],[[119982,119982],"mapped",[115]],[[119983,119983],"mapped",[116]],[[119984,119984],"mapped",[117]],[[119985,119985],"mapped",[118]],[[119986,119986],"mapped",[119]],[[119987,119987],"mapped",[120]],[[119988,119988],"mapped",[121]],[[119989,119989],"mapped",[122]],[[119990,119990],"mapped",[97]],[[119991,119991],"mapped",[98]],[[119992,119992],"mapped",[99]],[[119993,119993],"mapped",[100]],[[119994,119994],"disallowed"],[[119995,119995],"mapped",[102]],[[119996,119996],"disallowed"],[[119997,119997],"mapped",[104]],[[119998,119998],"mapped",[105]],[[119999,119999],"mapped",[106]],[[120000,120000],"mapped",[107]],[[120001,120001],"mapped",[108]],[[120002,120002],"mapped",[109]],[[120003,120003],"mapped",[110]],[[120004,120004],"disallowed"],[[120005,120005],"mapped",[112]],[[120006,120006],"mapped",[113]],[[120007,120007],"mapped",[114]],[[120008,120008],"mapped",[115]],[[120009,120009],"mapped",[116]],[[120010,120010],"mapped",[117]],[[120011,120011],"mapped",[118]],[[120012,120012],"mapped",[119]],[[120013,120013],"mapped",[120]],[[120014,120014],"mapped",[121]],[[120015,120015],"mapped",[122]],[[120016,120016],"mapped",[97]],[[120017,120017],"mapped",[98]],[[120018,120018],"mapped",[99]],[[120019,120019],"mapped",[100]],[[120020,120020],"mapped",[101]],[[120021,120021],"mapped",[102]],[[120022,120022],"mapped",[103]],[[120023,120023],"mapped",[104]],[[120024,120024],"mapped",[105]],[[120025,120025],"mapped",[106]],[[120026,120026],"mapped",[107]],[[120027,120027],"mapped",[108]],[[120028,120028],"mapped",[109]],[[120029,120029],"mapped",[110]],[[120030,120030],"mapped",[111]],[[120031,120031],"mapped",[112]],[[120032,120032],"mapped",[113]],[[120033,120033],"mapped",[114]],[[120034,120034],"mapped",[115]],[[120035,120035],"mapped",[116]],[[120036,120036],"mapped",[117]],[[120037,120037],"mapped",[118]],[[120038,120038],"mapped",[119]],[[120039,120039],"mapped",[120]],[[120040,120040],"mapped",[121]],[[120041,120041],"mapped",[122]],[[120042,120042],"mapped",[97]],[[120043,120043],"mapped",[98]],[[120044,120044],"mapped",[99]],[[120045,120045],"mapped",[100]],[[120046,120046],"mapped",[101]],[[120047,120047],"mapped",[102]],[[120048,120048],"mapped",[103]],[[120049,120049],"mapped",[104]],[[120050,120050],"mapped",[105]],[[120051,120051],"mapped",[106]],[[120052,120052],"mapped",[107]],[[120053,120053],"mapped",[108]],[[120054,120054],"mapped",[109]],[[120055,120055],"mapped",[110]],[[120056,120056],"mapped",[111]],[[120057,120057],"mapped",[112]],[[120058,120058],"mapped",[113]],[[120059,120059],"mapped",[114]],[[120060,120060],"mapped",[115]],[[120061,120061],"mapped",[116]],[[120062,120062],"mapped",[117]],[[120063,120063],"mapped",[118]],[[120064,120064],"mapped",[119]],[[120065,120065],"mapped",[120]],[[120066,120066],"mapped",[121]],[[120067,120067],"mapped",[122]],[[120068,120068],"mapped",[97]],[[120069,120069],"mapped",[98]],[[120070,120070],"disallowed"],[[120071,120071],"mapped",[100]],[[120072,120072],"mapped",[101]],[[120073,120073],"mapped",[102]],[[120074,120074],"mapped",[103]],[[120075,120076],"disallowed"],[[120077,120077],"mapped",[106]],[[120078,120078],"mapped",[107]],[[120079,120079],"mapped",[108]],[[120080,120080],"mapped",[109]],[[120081,120081],"mapped",[110]],[[120082,120082],"mapped",[111]],[[120083,120083],"mapped",[112]],[[120084,120084],"mapped",[113]],[[120085,120085],"disallowed"],[[120086,120086],"mapped",[115]],[[120087,120087],"mapped",[116]],[[120088,120088],"mapped",[117]],[[120089,120089],"mapped",[118]],[[120090,120090],"mapped",[119]],[[120091,120091],"mapped",[120]],[[120092,120092],"mapped",[121]],[[120093,120093],"disallowed"],[[120094,120094],"mapped",[97]],[[120095,120095],"mapped",[98]],[[120096,120096],"mapped",[99]],[[120097,120097],"mapped",[100]],[[120098,120098],"mapped",[101]],[[120099,120099],"mapped",[102]],[[120100,120100],"mapped",[103]],[[120101,120101],"mapped",[104]],[[120102,120102],"mapped",[105]],[[120103,120103],"mapped",[106]],[[120104,120104],"mapped",[107]],[[120105,120105],"mapped",[108]],[[120106,120106],"mapped",[109]],[[120107,120107],"mapped",[110]],[[120108,120108],"mapped",[111]],[[120109,120109],"mapped",[112]],[[120110,120110],"mapped",[113]],[[120111,120111],"mapped",[114]],[[120112,120112],"mapped",[115]],[[120113,120113],"mapped",[116]],[[120114,120114],"mapped",[117]],[[120115,120115],"mapped",[118]],[[120116,120116],"mapped",[119]],[[120117,120117],"mapped",[120]],[[120118,120118],"mapped",[121]],[[120119,120119],"mapped",[122]],[[120120,120120],"mapped",[97]],[[120121,120121],"mapped",[98]],[[120122,120122],"disallowed"],[[120123,120123],"mapped",[100]],[[120124,120124],"mapped",[101]],[[120125,120125],"mapped",[102]],[[120126,120126],"mapped",[103]],[[120127,120127],"disallowed"],[[120128,120128],"mapped",[105]],[[120129,120129],"mapped",[106]],[[120130,120130],"mapped",[107]],[[120131,120131],"mapped",[108]],[[120132,120132],"mapped",[109]],[[120133,120133],"disallowed"],[[120134,120134],"mapped",[111]],[[120135,120137],"disallowed"],[[120138,120138],"mapped",[115]],[[120139,120139],"mapped",[116]],[[120140,120140],"mapped",[117]],[[120141,120141],"mapped",[118]],[[120142,120142],"mapped",[119]],[[120143,120143],"mapped",[120]],[[120144,120144],"mapped",[121]],[[120145,120145],"disallowed"],[[120146,120146],"mapped",[97]],[[120147,120147],"mapped",[98]],[[120148,120148],"mapped",[99]],[[120149,120149],"mapped",[100]],[[120150,120150],"mapped",[101]],[[120151,120151],"mapped",[102]],[[120152,120152],"mapped",[103]],[[120153,120153],"mapped",[104]],[[120154,120154],"mapped",[105]],[[120155,120155],"mapped",[106]],[[120156,120156],"mapped",[107]],[[120157,120157],"mapped",[108]],[[120158,120158],"mapped",[109]],[[120159,120159],"mapped",[110]],[[120160,120160],"mapped",[111]],[[120161,120161],"mapped",[112]],[[120162,120162],"mapped",[113]],[[120163,120163],"mapped",[114]],[[120164,120164],"mapped",[115]],[[120165,120165],"mapped",[116]],[[120166,120166],"mapped",[117]],[[120167,120167],"mapped",[118]],[[120168,120168],"mapped",[119]],[[120169,120169],"mapped",[120]],[[120170,120170],"mapped",[121]],[[120171,120171],"mapped",[122]],[[120172,120172],"mapped",[97]],[[120173,120173],"mapped",[98]],[[120174,120174],"mapped",[99]],[[120175,120175],"mapped",[100]],[[120176,120176],"mapped",[101]],[[120177,120177],"mapped",[102]],[[120178,120178],"mapped",[103]],[[120179,120179],"mapped",[104]],[[120180,120180],"mapped",[105]],[[120181,120181],"mapped",[106]],[[120182,120182],"mapped",[107]],[[120183,120183],"mapped",[108]],[[120184,120184],"mapped",[109]],[[120185,120185],"mapped",[110]],[[120186,120186],"mapped",[111]],[[120187,120187],"mapped",[112]],[[120188,120188],"mapped",[113]],[[120189,120189],"mapped",[114]],[[120190,120190],"mapped",[115]],[[120191,120191],"mapped",[116]],[[120192,120192],"mapped",[117]],[[120193,120193],"mapped",[118]],[[120194,120194],"mapped",[119]],[[120195,120195],"mapped",[120]],[[120196,120196],"mapped",[121]],[[120197,120197],"mapped",[122]],[[120198,120198],"mapped",[97]],[[120199,120199],"mapped",[98]],[[120200,120200],"mapped",[99]],[[120201,120201],"mapped",[100]],[[120202,120202],"mapped",[101]],[[120203,120203],"mapped",[102]],[[120204,120204],"mapped",[103]],[[120205,120205],"mapped",[104]],[[120206,120206],"mapped",[105]],[[120207,120207],"mapped",[106]],[[120208,120208],"mapped",[107]],[[120209,120209],"mapped",[108]],[[120210,120210],"mapped",[109]],[[120211,120211],"mapped",[110]],[[120212,120212],"mapped",[111]],[[120213,120213],"mapped",[112]],[[120214,120214],"mapped",[113]],[[120215,120215],"mapped",[114]],[[120216,120216],"mapped",[115]],[[120217,120217],"mapped",[116]],[[120218,120218],"mapped",[117]],[[120219,120219],"mapped",[118]],[[120220,120220],"mapped",[119]],[[120221,120221],"mapped",[120]],[[120222,120222],"mapped",[121]],[[120223,120223],"mapped",[122]],[[120224,120224],"mapped",[97]],[[120225,120225],"mapped",[98]],[[120226,120226],"mapped",[99]],[[120227,120227],"mapped",[100]],[[120228,120228],"mapped",[101]],[[120229,120229],"mapped",[102]],[[120230,120230],"mapped",[103]],[[120231,120231],"mapped",[104]],[[120232,120232],"mapped",[105]],[[120233,120233],"mapped",[106]],[[120234,120234],"mapped",[107]],[[120235,120235],"mapped",[108]],[[120236,120236],"mapped",[109]],[[120237,120237],"mapped",[110]],[[120238,120238],"mapped",[111]],[[120239,120239],"mapped",[112]],[[120240,120240],"mapped",[113]],[[120241,120241],"mapped",[114]],[[120242,120242],"mapped",[115]],[[120243,120243],"mapped",[116]],[[120244,120244],"mapped",[117]],[[120245,120245],"mapped",[118]],[[120246,120246],"mapped",[119]],[[120247,120247],"mapped",[120]],[[120248,120248],"mapped",[121]],[[120249,120249],"mapped",[122]],[[120250,120250],"mapped",[97]],[[120251,120251],"mapped",[98]],[[120252,120252],"mapped",[99]],[[120253,120253],"mapped",[100]],[[120254,120254],"mapped",[101]],[[120255,120255],"mapped",[102]],[[120256,120256],"mapped",[103]],[[120257,120257],"mapped",[104]],[[120258,120258],"mapped",[105]],[[120259,120259],"mapped",[106]],[[120260,120260],"mapped",[107]],[[120261,120261],"mapped",[108]],[[120262,120262],"mapped",[109]],[[120263,120263],"mapped",[110]],[[120264,120264],"mapped",[111]],[[120265,120265],"mapped",[112]],[[120266,120266],"mapped",[113]],[[120267,120267],"mapped",[114]],[[120268,120268],"mapped",[115]],[[120269,120269],"mapped",[116]],[[120270,120270],"mapped",[117]],[[120271,120271],"mapped",[118]],[[120272,120272],"mapped",[119]],[[120273,120273],"mapped",[120]],[[120274,120274],"mapped",[121]],[[120275,120275],"mapped",[122]],[[120276,120276],"mapped",[97]],[[120277,120277],"mapped",[98]],[[120278,120278],"mapped",[99]],[[120279,120279],"mapped",[100]],[[120280,120280],"mapped",[101]],[[120281,120281],"mapped",[102]],[[120282,120282],"mapped",[103]],[[120283,120283],"mapped",[104]],[[120284,120284],"mapped",[105]],[[120285,120285],"mapped",[106]],[[120286,120286],"mapped",[107]],[[120287,120287],"mapped",[108]],[[120288,120288],"mapped",[109]],[[120289,120289],"mapped",[110]],[[120290,120290],"mapped",[111]],[[120291,120291],"mapped",[112]],[[120292,120292],"mapped",[113]],[[120293,120293],"mapped",[114]],[[120294,120294],"mapped",[115]],[[120295,120295],"mapped",[116]],[[120296,120296],"mapped",[117]],[[120297,120297],"mapped",[118]],[[120298,120298],"mapped",[119]],[[120299,120299],"mapped",[120]],[[120300,120300],"mapped",[121]],[[120301,120301],"mapped",[122]],[[120302,120302],"mapped",[97]],[[120303,120303],"mapped",[98]],[[120304,120304],"mapped",[99]],[[120305,120305],"mapped",[100]],[[120306,120306],"mapped",[101]],[[120307,120307],"mapped",[102]],[[120308,120308],"mapped",[103]],[[120309,120309],"mapped",[104]],[[120310,120310],"mapped",[105]],[[120311,120311],"mapped",[106]],[[120312,120312],"mapped",[107]],[[120313,120313],"mapped",[108]],[[120314,120314],"mapped",[109]],[[120315,120315],"mapped",[110]],[[120316,120316],"mapped",[111]],[[120317,120317],"mapped",[112]],[[120318,120318],"mapped",[113]],[[120319,120319],"mapped",[114]],[[120320,120320],"mapped",[115]],[[120321,120321],"mapped",[116]],[[120322,120322],"mapped",[117]],[[120323,120323],"mapped",[118]],[[120324,120324],"mapped",[119]],[[120325,120325],"mapped",[120]],[[120326,120326],"mapped",[121]],[[120327,120327],"mapped",[122]],[[120328,120328],"mapped",[97]],[[120329,120329],"mapped",[98]],[[120330,120330],"mapped",[99]],[[120331,120331],"mapped",[100]],[[120332,120332],"mapped",[101]],[[120333,120333],"mapped",[102]],[[120334,120334],"mapped",[103]],[[120335,120335],"mapped",[104]],[[120336,120336],"mapped",[105]],[[120337,120337],"mapped",[106]],[[120338,120338],"mapped",[107]],[[120339,120339],"mapped",[108]],[[120340,120340],"mapped",[109]],[[120341,120341],"mapped",[110]],[[120342,120342],"mapped",[111]],[[120343,120343],"mapped",[112]],[[120344,120344],"mapped",[113]],[[120345,120345],"mapped",[114]],[[120346,120346],"mapped",[115]],[[120347,120347],"mapped",[116]],[[120348,120348],"mapped",[117]],[[120349,120349],"mapped",[118]],[[120350,120350],"mapped",[119]],[[120351,120351],"mapped",[120]],[[120352,120352],"mapped",[121]],[[120353,120353],"mapped",[122]],[[120354,120354],"mapped",[97]],[[120355,120355],"mapped",[98]],[[120356,120356],"mapped",[99]],[[120357,120357],"mapped",[100]],[[120358,120358],"mapped",[101]],[[120359,120359],"mapped",[102]],[[120360,120360],"mapped",[103]],[[120361,120361],"mapped",[104]],[[120362,120362],"mapped",[105]],[[120363,120363],"mapped",[106]],[[120364,120364],"mapped",[107]],[[120365,120365],"mapped",[108]],[[120366,120366],"mapped",[109]],[[120367,120367],"mapped",[110]],[[120368,120368],"mapped",[111]],[[120369,120369],"mapped",[112]],[[120370,120370],"mapped",[113]],[[120371,120371],"mapped",[114]],[[120372,120372],"mapped",[115]],[[120373,120373],"mapped",[116]],[[120374,120374],"mapped",[117]],[[120375,120375],"mapped",[118]],[[120376,120376],"mapped",[119]],[[120377,120377],"mapped",[120]],[[120378,120378],"mapped",[121]],[[120379,120379],"mapped",[122]],[[120380,120380],"mapped",[97]],[[120381,120381],"mapped",[98]],[[120382,120382],"mapped",[99]],[[120383,120383],"mapped",[100]],[[120384,120384],"mapped",[101]],[[120385,120385],"mapped",[102]],[[120386,120386],"mapped",[103]],[[120387,120387],"mapped",[104]],[[120388,120388],"mapped",[105]],[[120389,120389],"mapped",[106]],[[120390,120390],"mapped",[107]],[[120391,120391],"mapped",[108]],[[120392,120392],"mapped",[109]],[[120393,120393],"mapped",[110]],[[120394,120394],"mapped",[111]],[[120395,120395],"mapped",[112]],[[120396,120396],"mapped",[113]],[[120397,120397],"mapped",[114]],[[120398,120398],"mapped",[115]],[[120399,120399],"mapped",[116]],[[120400,120400],"mapped",[117]],[[120401,120401],"mapped",[118]],[[120402,120402],"mapped",[119]],[[120403,120403],"mapped",[120]],[[120404,120404],"mapped",[121]],[[120405,120405],"mapped",[122]],[[120406,120406],"mapped",[97]],[[120407,120407],"mapped",[98]],[[120408,120408],"mapped",[99]],[[120409,120409],"mapped",[100]],[[120410,120410],"mapped",[101]],[[120411,120411],"mapped",[102]],[[120412,120412],"mapped",[103]],[[120413,120413],"mapped",[104]],[[120414,120414],"mapped",[105]],[[120415,120415],"mapped",[106]],[[120416,120416],"mapped",[107]],[[120417,120417],"mapped",[108]],[[120418,120418],"mapped",[109]],[[120419,120419],"mapped",[110]],[[120420,120420],"mapped",[111]],[[120421,120421],"mapped",[112]],[[120422,120422],"mapped",[113]],[[120423,120423],"mapped",[114]],[[120424,120424],"mapped",[115]],[[120425,120425],"mapped",[116]],[[120426,120426],"mapped",[117]],[[120427,120427],"mapped",[118]],[[120428,120428],"mapped",[119]],[[120429,120429],"mapped",[120]],[[120430,120430],"mapped",[121]],[[120431,120431],"mapped",[122]],[[120432,120432],"mapped",[97]],[[120433,120433],"mapped",[98]],[[120434,120434],"mapped",[99]],[[120435,120435],"mapped",[100]],[[120436,120436],"mapped",[101]],[[120437,120437],"mapped",[102]],[[120438,120438],"mapped",[103]],[[120439,120439],"mapped",[104]],[[120440,120440],"mapped",[105]],[[120441,120441],"mapped",[106]],[[120442,120442],"mapped",[107]],[[120443,120443],"mapped",[108]],[[120444,120444],"mapped",[109]],[[120445,120445],"mapped",[110]],[[120446,120446],"mapped",[111]],[[120447,120447],"mapped",[112]],[[120448,120448],"mapped",[113]],[[120449,120449],"mapped",[114]],[[120450,120450],"mapped",[115]],[[120451,120451],"mapped",[116]],[[120452,120452],"mapped",[117]],[[120453,120453],"mapped",[118]],[[120454,120454],"mapped",[119]],[[120455,120455],"mapped",[120]],[[120456,120456],"mapped",[121]],[[120457,120457],"mapped",[122]],[[120458,120458],"mapped",[97]],[[120459,120459],"mapped",[98]],[[120460,120460],"mapped",[99]],[[120461,120461],"mapped",[100]],[[120462,120462],"mapped",[101]],[[120463,120463],"mapped",[102]],[[120464,120464],"mapped",[103]],[[120465,120465],"mapped",[104]],[[120466,120466],"mapped",[105]],[[120467,120467],"mapped",[106]],[[120468,120468],"mapped",[107]],[[120469,120469],"mapped",[108]],[[120470,120470],"mapped",[109]],[[120471,120471],"mapped",[110]],[[120472,120472],"mapped",[111]],[[120473,120473],"mapped",[112]],[[120474,120474],"mapped",[113]],[[120475,120475],"mapped",[114]],[[120476,120476],"mapped",[115]],[[120477,120477],"mapped",[116]],[[120478,120478],"mapped",[117]],[[120479,120479],"mapped",[118]],[[120480,120480],"mapped",[119]],[[120481,120481],"mapped",[120]],[[120482,120482],"mapped",[121]],[[120483,120483],"mapped",[122]],[[120484,120484],"mapped",[305]],[[120485,120485],"mapped",[567]],[[120486,120487],"disallowed"],[[120488,120488],"mapped",[945]],[[120489,120489],"mapped",[946]],[[120490,120490],"mapped",[947]],[[120491,120491],"mapped",[948]],[[120492,120492],"mapped",[949]],[[120493,120493],"mapped",[950]],[[120494,120494],"mapped",[951]],[[120495,120495],"mapped",[952]],[[120496,120496],"mapped",[953]],[[120497,120497],"mapped",[954]],[[120498,120498],"mapped",[955]],[[120499,120499],"mapped",[956]],[[120500,120500],"mapped",[957]],[[120501,120501],"mapped",[958]],[[120502,120502],"mapped",[959]],[[120503,120503],"mapped",[960]],[[120504,120504],"mapped",[961]],[[120505,120505],"mapped",[952]],[[120506,120506],"mapped",[963]],[[120507,120507],"mapped",[964]],[[120508,120508],"mapped",[965]],[[120509,120509],"mapped",[966]],[[120510,120510],"mapped",[967]],[[120511,120511],"mapped",[968]],[[120512,120512],"mapped",[969]],[[120513,120513],"mapped",[8711]],[[120514,120514],"mapped",[945]],[[120515,120515],"mapped",[946]],[[120516,120516],"mapped",[947]],[[120517,120517],"mapped",[948]],[[120518,120518],"mapped",[949]],[[120519,120519],"mapped",[950]],[[120520,120520],"mapped",[951]],[[120521,120521],"mapped",[952]],[[120522,120522],"mapped",[953]],[[120523,120523],"mapped",[954]],[[120524,120524],"mapped",[955]],[[120525,120525],"mapped",[956]],[[120526,120526],"mapped",[957]],[[120527,120527],"mapped",[958]],[[120528,120528],"mapped",[959]],[[120529,120529],"mapped",[960]],[[120530,120530],"mapped",[961]],[[120531,120532],"mapped",[963]],[[120533,120533],"mapped",[964]],[[120534,120534],"mapped",[965]],[[120535,120535],"mapped",[966]],[[120536,120536],"mapped",[967]],[[120537,120537],"mapped",[968]],[[120538,120538],"mapped",[969]],[[120539,120539],"mapped",[8706]],[[120540,120540],"mapped",[949]],[[120541,120541],"mapped",[952]],[[120542,120542],"mapped",[954]],[[120543,120543],"mapped",[966]],[[120544,120544],"mapped",[961]],[[120545,120545],"mapped",[960]],[[120546,120546],"mapped",[945]],[[120547,120547],"mapped",[946]],[[120548,120548],"mapped",[947]],[[120549,120549],"mapped",[948]],[[120550,120550],"mapped",[949]],[[120551,120551],"mapped",[950]],[[120552,120552],"mapped",[951]],[[120553,120553],"mapped",[952]],[[120554,120554],"mapped",[953]],[[120555,120555],"mapped",[954]],[[120556,120556],"mapped",[955]],[[120557,120557],"mapped",[956]],[[120558,120558],"mapped",[957]],[[120559,120559],"mapped",[958]],[[120560,120560],"mapped",[959]],[[120561,120561],"mapped",[960]],[[120562,120562],"mapped",[961]],[[120563,120563],"mapped",[952]],[[120564,120564],"mapped",[963]],[[120565,120565],"mapped",[964]],[[120566,120566],"mapped",[965]],[[120567,120567],"mapped",[966]],[[120568,120568],"mapped",[967]],[[120569,120569],"mapped",[968]],[[120570,120570],"mapped",[969]],[[120571,120571],"mapped",[8711]],[[120572,120572],"mapped",[945]],[[120573,120573],"mapped",[946]],[[120574,120574],"mapped",[947]],[[120575,120575],"mapped",[948]],[[120576,120576],"mapped",[949]],[[120577,120577],"mapped",[950]],[[120578,120578],"mapped",[951]],[[120579,120579],"mapped",[952]],[[120580,120580],"mapped",[953]],[[120581,120581],"mapped",[954]],[[120582,120582],"mapped",[955]],[[120583,120583],"mapped",[956]],[[120584,120584],"mapped",[957]],[[120585,120585],"mapped",[958]],[[120586,120586],"mapped",[959]],[[120587,120587],"mapped",[960]],[[120588,120588],"mapped",[961]],[[120589,120590],"mapped",[963]],[[120591,120591],"mapped",[964]],[[120592,120592],"mapped",[965]],[[120593,120593],"mapped",[966]],[[120594,120594],"mapped",[967]],[[120595,120595],"mapped",[968]],[[120596,120596],"mapped",[969]],[[120597,120597],"mapped",[8706]],[[120598,120598],"mapped",[949]],[[120599,120599],"mapped",[952]],[[120600,120600],"mapped",[954]],[[120601,120601],"mapped",[966]],[[120602,120602],"mapped",[961]],[[120603,120603],"mapped",[960]],[[120604,120604],"mapped",[945]],[[120605,120605],"mapped",[946]],[[120606,120606],"mapped",[947]],[[120607,120607],"mapped",[948]],[[120608,120608],"mapped",[949]],[[120609,120609],"mapped",[950]],[[120610,120610],"mapped",[951]],[[120611,120611],"mapped",[952]],[[120612,120612],"mapped",[953]],[[120613,120613],"mapped",[954]],[[120614,120614],"mapped",[955]],[[120615,120615],"mapped",[956]],[[120616,120616],"mapped",[957]],[[120617,120617],"mapped",[958]],[[120618,120618],"mapped",[959]],[[120619,120619],"mapped",[960]],[[120620,120620],"mapped",[961]],[[120621,120621],"mapped",[952]],[[120622,120622],"mapped",[963]],[[120623,120623],"mapped",[964]],[[120624,120624],"mapped",[965]],[[120625,120625],"mapped",[966]],[[120626,120626],"mapped",[967]],[[120627,120627],"mapped",[968]],[[120628,120628],"mapped",[969]],[[120629,120629],"mapped",[8711]],[[120630,120630],"mapped",[945]],[[120631,120631],"mapped",[946]],[[120632,120632],"mapped",[947]],[[120633,120633],"mapped",[948]],[[120634,120634],"mapped",[949]],[[120635,120635],"mapped",[950]],[[120636,120636],"mapped",[951]],[[120637,120637],"mapped",[952]],[[120638,120638],"mapped",[953]],[[120639,120639],"mapped",[954]],[[120640,120640],"mapped",[955]],[[120641,120641],"mapped",[956]],[[120642,120642],"mapped",[957]],[[120643,120643],"mapped",[958]],[[120644,120644],"mapped",[959]],[[120645,120645],"mapped",[960]],[[120646,120646],"mapped",[961]],[[120647,120648],"mapped",[963]],[[120649,120649],"mapped",[964]],[[120650,120650],"mapped",[965]],[[120651,120651],"mapped",[966]],[[120652,120652],"mapped",[967]],[[120653,120653],"mapped",[968]],[[120654,120654],"mapped",[969]],[[120655,120655],"mapped",[8706]],[[120656,120656],"mapped",[949]],[[120657,120657],"mapped",[952]],[[120658,120658],"mapped",[954]],[[120659,120659],"mapped",[966]],[[120660,120660],"mapped",[961]],[[120661,120661],"mapped",[960]],[[120662,120662],"mapped",[945]],[[120663,120663],"mapped",[946]],[[120664,120664],"mapped",[947]],[[120665,120665],"mapped",[948]],[[120666,120666],"mapped",[949]],[[120667,120667],"mapped",[950]],[[120668,120668],"mapped",[951]],[[120669,120669],"mapped",[952]],[[120670,120670],"mapped",[953]],[[120671,120671],"mapped",[954]],[[120672,120672],"mapped",[955]],[[120673,120673],"mapped",[956]],[[120674,120674],"mapped",[957]],[[120675,120675],"mapped",[958]],[[120676,120676],"mapped",[959]],[[120677,120677],"mapped",[960]],[[120678,120678],"mapped",[961]],[[120679,120679],"mapped",[952]],[[120680,120680],"mapped",[963]],[[120681,120681],"mapped",[964]],[[120682,120682],"mapped",[965]],[[120683,120683],"mapped",[966]],[[120684,120684],"mapped",[967]],[[120685,120685],"mapped",[968]],[[120686,120686],"mapped",[969]],[[120687,120687],"mapped",[8711]],[[120688,120688],"mapped",[945]],[[120689,120689],"mapped",[946]],[[120690,120690],"mapped",[947]],[[120691,120691],"mapped",[948]],[[120692,120692],"mapped",[949]],[[120693,120693],"mapped",[950]],[[120694,120694],"mapped",[951]],[[120695,120695],"mapped",[952]],[[120696,120696],"mapped",[953]],[[120697,120697],"mapped",[954]],[[120698,120698],"mapped",[955]],[[120699,120699],"mapped",[956]],[[120700,120700],"mapped",[957]],[[120701,120701],"mapped",[958]],[[120702,120702],"mapped",[959]],[[120703,120703],"mapped",[960]],[[120704,120704],"mapped",[961]],[[120705,120706],"mapped",[963]],[[120707,120707],"mapped",[964]],[[120708,120708],"mapped",[965]],[[120709,120709],"mapped",[966]],[[120710,120710],"mapped",[967]],[[120711,120711],"mapped",[968]],[[120712,120712],"mapped",[969]],[[120713,120713],"mapped",[8706]],[[120714,120714],"mapped",[949]],[[120715,120715],"mapped",[952]],[[120716,120716],"mapped",[954]],[[120717,120717],"mapped",[966]],[[120718,120718],"mapped",[961]],[[120719,120719],"mapped",[960]],[[120720,120720],"mapped",[945]],[[120721,120721],"mapped",[946]],[[120722,120722],"mapped",[947]],[[120723,120723],"mapped",[948]],[[120724,120724],"mapped",[949]],[[120725,120725],"mapped",[950]],[[120726,120726],"mapped",[951]],[[120727,120727],"mapped",[952]],[[120728,120728],"mapped",[953]],[[120729,120729],"mapped",[954]],[[120730,120730],"mapped",[955]],[[120731,120731],"mapped",[956]],[[120732,120732],"mapped",[957]],[[120733,120733],"mapped",[958]],[[120734,120734],"mapped",[959]],[[120735,120735],"mapped",[960]],[[120736,120736],"mapped",[961]],[[120737,120737],"mapped",[952]],[[120738,120738],"mapped",[963]],[[120739,120739],"mapped",[964]],[[120740,120740],"mapped",[965]],[[120741,120741],"mapped",[966]],[[120742,120742],"mapped",[967]],[[120743,120743],"mapped",[968]],[[120744,120744],"mapped",[969]],[[120745,120745],"mapped",[8711]],[[120746,120746],"mapped",[945]],[[120747,120747],"mapped",[946]],[[120748,120748],"mapped",[947]],[[120749,120749],"mapped",[948]],[[120750,120750],"mapped",[949]],[[120751,120751],"mapped",[950]],[[120752,120752],"mapped",[951]],[[120753,120753],"mapped",[952]],[[120754,120754],"mapped",[953]],[[120755,120755],"mapped",[954]],[[120756,120756],"mapped",[955]],[[120757,120757],"mapped",[956]],[[120758,120758],"mapped",[957]],[[120759,120759],"mapped",[958]],[[120760,120760],"mapped",[959]],[[120761,120761],"mapped",[960]],[[120762,120762],"mapped",[961]],[[120763,120764],"mapped",[963]],[[120765,120765],"mapped",[964]],[[120766,120766],"mapped",[965]],[[120767,120767],"mapped",[966]],[[120768,120768],"mapped",[967]],[[120769,120769],"mapped",[968]],[[120770,120770],"mapped",[969]],[[120771,120771],"mapped",[8706]],[[120772,120772],"mapped",[949]],[[120773,120773],"mapped",[952]],[[120774,120774],"mapped",[954]],[[120775,120775],"mapped",[966]],[[120776,120776],"mapped",[961]],[[120777,120777],"mapped",[960]],[[120778,120779],"mapped",[989]],[[120780,120781],"disallowed"],[[120782,120782],"mapped",[48]],[[120783,120783],"mapped",[49]],[[120784,120784],"mapped",[50]],[[120785,120785],"mapped",[51]],[[120786,120786],"mapped",[52]],[[120787,120787],"mapped",[53]],[[120788,120788],"mapped",[54]],[[120789,120789],"mapped",[55]],[[120790,120790],"mapped",[56]],[[120791,120791],"mapped",[57]],[[120792,120792],"mapped",[48]],[[120793,120793],"mapped",[49]],[[120794,120794],"mapped",[50]],[[120795,120795],"mapped",[51]],[[120796,120796],"mapped",[52]],[[120797,120797],"mapped",[53]],[[120798,120798],"mapped",[54]],[[120799,120799],"mapped",[55]],[[120800,120800],"mapped",[56]],[[120801,120801],"mapped",[57]],[[120802,120802],"mapped",[48]],[[120803,120803],"mapped",[49]],[[120804,120804],"mapped",[50]],[[120805,120805],"mapped",[51]],[[120806,120806],"mapped",[52]],[[120807,120807],"mapped",[53]],[[120808,120808],"mapped",[54]],[[120809,120809],"mapped",[55]],[[120810,120810],"mapped",[56]],[[120811,120811],"mapped",[57]],[[120812,120812],"mapped",[48]],[[120813,120813],"mapped",[49]],[[120814,120814],"mapped",[50]],[[120815,120815],"mapped",[51]],[[120816,120816],"mapped",[52]],[[120817,120817],"mapped",[53]],[[120818,120818],"mapped",[54]],[[120819,120819],"mapped",[55]],[[120820,120820],"mapped",[56]],[[120821,120821],"mapped",[57]],[[120822,120822],"mapped",[48]],[[120823,120823],"mapped",[49]],[[120824,120824],"mapped",[50]],[[120825,120825],"mapped",[51]],[[120826,120826],"mapped",[52]],[[120827,120827],"mapped",[53]],[[120828,120828],"mapped",[54]],[[120829,120829],"mapped",[55]],[[120830,120830],"mapped",[56]],[[120831,120831],"mapped",[57]],[[120832,121343],"valid",[],"NV8"],[[121344,121398],"valid"],[[121399,121402],"valid",[],"NV8"],[[121403,121452],"valid"],[[121453,121460],"valid",[],"NV8"],[[121461,121461],"valid"],[[121462,121475],"valid",[],"NV8"],[[121476,121476],"valid"],[[121477,121483],"valid",[],"NV8"],[[121484,121498],"disallowed"],[[121499,121503],"valid"],[[121504,121504],"disallowed"],[[121505,121519],"valid"],[[121520,124927],"disallowed"],[[124928,125124],"valid"],[[125125,125126],"disallowed"],[[125127,125135],"valid",[],"NV8"],[[125136,125142],"valid"],[[125143,126463],"disallowed"],[[126464,126464],"mapped",[1575]],[[126465,126465],"mapped",[1576]],[[126466,126466],"mapped",[1580]],[[126467,126467],"mapped",[1583]],[[126468,126468],"disallowed"],[[126469,126469],"mapped",[1608]],[[126470,126470],"mapped",[1586]],[[126471,126471],"mapped",[1581]],[[126472,126472],"mapped",[1591]],[[126473,126473],"mapped",[1610]],[[126474,126474],"mapped",[1603]],[[126475,126475],"mapped",[1604]],[[126476,126476],"mapped",[1605]],[[126477,126477],"mapped",[1606]],[[126478,126478],"mapped",[1587]],[[126479,126479],"mapped",[1593]],[[126480,126480],"mapped",[1601]],[[126481,126481],"mapped",[1589]],[[126482,126482],"mapped",[1602]],[[126483,126483],"mapped",[1585]],[[126484,126484],"mapped",[1588]],[[126485,126485],"mapped",[1578]],[[126486,126486],"mapped",[1579]],[[126487,126487],"mapped",[1582]],[[126488,126488],"mapped",[1584]],[[126489,126489],"mapped",[1590]],[[126490,126490],"mapped",[1592]],[[126491,126491],"mapped",[1594]],[[126492,126492],"mapped",[1646]],[[126493,126493],"mapped",[1722]],[[126494,126494],"mapped",[1697]],[[126495,126495],"mapped",[1647]],[[126496,126496],"disallowed"],[[126497,126497],"mapped",[1576]],[[126498,126498],"mapped",[1580]],[[126499,126499],"disallowed"],[[126500,126500],"mapped",[1607]],[[126501,126502],"disallowed"],[[126503,126503],"mapped",[1581]],[[126504,126504],"disallowed"],[[126505,126505],"mapped",[1610]],[[126506,126506],"mapped",[1603]],[[126507,126507],"mapped",[1604]],[[126508,126508],"mapped",[1605]],[[126509,126509],"mapped",[1606]],[[126510,126510],"mapped",[1587]],[[126511,126511],"mapped",[1593]],[[126512,126512],"mapped",[1601]],[[126513,126513],"mapped",[1589]],[[126514,126514],"mapped",[1602]],[[126515,126515],"disallowed"],[[126516,126516],"mapped",[1588]],[[126517,126517],"mapped",[1578]],[[126518,126518],"mapped",[1579]],[[126519,126519],"mapped",[1582]],[[126520,126520],"disallowed"],[[126521,126521],"mapped",[1590]],[[126522,126522],"disallowed"],[[126523,126523],"mapped",[1594]],[[126524,126529],"disallowed"],[[126530,126530],"mapped",[1580]],[[126531,126534],"disallowed"],[[126535,126535],"mapped",[1581]],[[126536,126536],"disallowed"],[[126537,126537],"mapped",[1610]],[[126538,126538],"disallowed"],[[126539,126539],"mapped",[1604]],[[126540,126540],"disallowed"],[[126541,126541],"mapped",[1606]],[[126542,126542],"mapped",[1587]],[[126543,126543],"mapped",[1593]],[[126544,126544],"disallowed"],[[126545,126545],"mapped",[1589]],[[126546,126546],"mapped",[1602]],[[126547,126547],"disallowed"],[[126548,126548],"mapped",[1588]],[[126549,126550],"disallowed"],[[126551,126551],"mapped",[1582]],[[126552,126552],"disallowed"],[[126553,126553],"mapped",[1590]],[[126554,126554],"disallowed"],[[126555,126555],"mapped",[1594]],[[126556,126556],"disallowed"],[[126557,126557],"mapped",[1722]],[[126558,126558],"disallowed"],[[126559,126559],"mapped",[1647]],[[126560,126560],"disallowed"],[[126561,126561],"mapped",[1576]],[[126562,126562],"mapped",[1580]],[[126563,126563],"disallowed"],[[126564,126564],"mapped",[1607]],[[126565,126566],"disallowed"],[[126567,126567],"mapped",[1581]],[[126568,126568],"mapped",[1591]],[[126569,126569],"mapped",[1610]],[[126570,126570],"mapped",[1603]],[[126571,126571],"disallowed"],[[126572,126572],"mapped",[1605]],[[126573,126573],"mapped",[1606]],[[126574,126574],"mapped",[1587]],[[126575,126575],"mapped",[1593]],[[126576,126576],"mapped",[1601]],[[126577,126577],"mapped",[1589]],[[126578,126578],"mapped",[1602]],[[126579,126579],"disallowed"],[[126580,126580],"mapped",[1588]],[[126581,126581],"mapped",[1578]],[[126582,126582],"mapped",[1579]],[[126583,126583],"mapped",[1582]],[[126584,126584],"disallowed"],[[126585,126585],"mapped",[1590]],[[126586,126586],"mapped",[1592]],[[126587,126587],"mapped",[1594]],[[126588,126588],"mapped",[1646]],[[126589,126589],"disallowed"],[[126590,126590],"mapped",[1697]],[[126591,126591],"disallowed"],[[126592,126592],"mapped",[1575]],[[126593,126593],"mapped",[1576]],[[126594,126594],"mapped",[1580]],[[126595,126595],"mapped",[1583]],[[126596,126596],"mapped",[1607]],[[126597,126597],"mapped",[1608]],[[126598,126598],"mapped",[1586]],[[126599,126599],"mapped",[1581]],[[126600,126600],"mapped",[1591]],[[126601,126601],"mapped",[1610]],[[126602,126602],"disallowed"],[[126603,126603],"mapped",[1604]],[[126604,126604],"mapped",[1605]],[[126605,126605],"mapped",[1606]],[[126606,126606],"mapped",[1587]],[[126607,126607],"mapped",[1593]],[[126608,126608],"mapped",[1601]],[[126609,126609],"mapped",[1589]],[[126610,126610],"mapped",[1602]],[[126611,126611],"mapped",[1585]],[[126612,126612],"mapped",[1588]],[[126613,126613],"mapped",[1578]],[[126614,126614],"mapped",[1579]],[[126615,126615],"mapped",[1582]],[[126616,126616],"mapped",[1584]],[[126617,126617],"mapped",[1590]],[[126618,126618],"mapped",[1592]],[[126619,126619],"mapped",[1594]],[[126620,126624],"disallowed"],[[126625,126625],"mapped",[1576]],[[126626,126626],"mapped",[1580]],[[126627,126627],"mapped",[1583]],[[126628,126628],"disallowed"],[[126629,126629],"mapped",[1608]],[[126630,126630],"mapped",[1586]],[[126631,126631],"mapped",[1581]],[[126632,126632],"mapped",[1591]],[[126633,126633],"mapped",[1610]],[[126634,126634],"disallowed"],[[126635,126635],"mapped",[1604]],[[126636,126636],"mapped",[1605]],[[126637,126637],"mapped",[1606]],[[126638,126638],"mapped",[1587]],[[126639,126639],"mapped",[1593]],[[126640,126640],"mapped",[1601]],[[126641,126641],"mapped",[1589]],[[126642,126642],"mapped",[1602]],[[126643,126643],"mapped",[1585]],[[126644,126644],"mapped",[1588]],[[126645,126645],"mapped",[1578]],[[126646,126646],"mapped",[1579]],[[126647,126647],"mapped",[1582]],[[126648,126648],"mapped",[1584]],[[126649,126649],"mapped",[1590]],[[126650,126650],"mapped",[1592]],[[126651,126651],"mapped",[1594]],[[126652,126703],"disallowed"],[[126704,126705],"valid",[],"NV8"],[[126706,126975],"disallowed"],[[126976,127019],"valid",[],"NV8"],[[127020,127023],"disallowed"],[[127024,127123],"valid",[],"NV8"],[[127124,127135],"disallowed"],[[127136,127150],"valid",[],"NV8"],[[127151,127152],"disallowed"],[[127153,127166],"valid",[],"NV8"],[[127167,127167],"valid",[],"NV8"],[[127168,127168],"disallowed"],[[127169,127183],"valid",[],"NV8"],[[127184,127184],"disallowed"],[[127185,127199],"valid",[],"NV8"],[[127200,127221],"valid",[],"NV8"],[[127222,127231],"disallowed"],[[127232,127232],"disallowed"],[[127233,127233],"disallowed_STD3_mapped",[48,44]],[[127234,127234],"disallowed_STD3_mapped",[49,44]],[[127235,127235],"disallowed_STD3_mapped",[50,44]],[[127236,127236],"disallowed_STD3_mapped",[51,44]],[[127237,127237],"disallowed_STD3_mapped",[52,44]],[[127238,127238],"disallowed_STD3_mapped",[53,44]],[[127239,127239],"disallowed_STD3_mapped",[54,44]],[[127240,127240],"disallowed_STD3_mapped",[55,44]],[[127241,127241],"disallowed_STD3_mapped",[56,44]],[[127242,127242],"disallowed_STD3_mapped",[57,44]],[[127243,127244],"valid",[],"NV8"],[[127245,127247],"disallowed"],[[127248,127248],"disallowed_STD3_mapped",[40,97,41]],[[127249,127249],"disallowed_STD3_mapped",[40,98,41]],[[127250,127250],"disallowed_STD3_mapped",[40,99,41]],[[127251,127251],"disallowed_STD3_mapped",[40,100,41]],[[127252,127252],"disallowed_STD3_mapped",[40,101,41]],[[127253,127253],"disallowed_STD3_mapped",[40,102,41]],[[127254,127254],"disallowed_STD3_mapped",[40,103,41]],[[127255,127255],"disallowed_STD3_mapped",[40,104,41]],[[127256,127256],"disallowed_STD3_mapped",[40,105,41]],[[127257,127257],"disallowed_STD3_mapped",[40,106,41]],[[127258,127258],"disallowed_STD3_mapped",[40,107,41]],[[127259,127259],"disallowed_STD3_mapped",[40,108,41]],[[127260,127260],"disallowed_STD3_mapped",[40,109,41]],[[127261,127261],"disallowed_STD3_mapped",[40,110,41]],[[127262,127262],"disallowed_STD3_mapped",[40,111,41]],[[127263,127263],"disallowed_STD3_mapped",[40,112,41]],[[127264,127264],"disallowed_STD3_mapped",[40,113,41]],[[127265,127265],"disallowed_STD3_mapped",[40,114,41]],[[127266,127266],"disallowed_STD3_mapped",[40,115,41]],[[127267,127267],"disallowed_STD3_mapped",[40,116,41]],[[127268,127268],"disallowed_STD3_mapped",[40,117,41]],[[127269,127269],"disallowed_STD3_mapped",[40,118,41]],[[127270,127270],"disallowed_STD3_mapped",[40,119,41]],[[127271,127271],"disallowed_STD3_mapped",[40,120,41]],[[127272,127272],"disallowed_STD3_mapped",[40,121,41]],[[127273,127273],"disallowed_STD3_mapped",[40,122,41]],[[127274,127274],"mapped",[12308,115,12309]],[[127275,127275],"mapped",[99]],[[127276,127276],"mapped",[114]],[[127277,127277],"mapped",[99,100]],[[127278,127278],"mapped",[119,122]],[[127279,127279],"disallowed"],[[127280,127280],"mapped",[97]],[[127281,127281],"mapped",[98]],[[127282,127282],"mapped",[99]],[[127283,127283],"mapped",[100]],[[127284,127284],"mapped",[101]],[[127285,127285],"mapped",[102]],[[127286,127286],"mapped",[103]],[[127287,127287],"mapped",[104]],[[127288,127288],"mapped",[105]],[[127289,127289],"mapped",[106]],[[127290,127290],"mapped",[107]],[[127291,127291],"mapped",[108]],[[127292,127292],"mapped",[109]],[[127293,127293],"mapped",[110]],[[127294,127294],"mapped",[111]],[[127295,127295],"mapped",[112]],[[127296,127296],"mapped",[113]],[[127297,127297],"mapped",[114]],[[127298,127298],"mapped",[115]],[[127299,127299],"mapped",[116]],[[127300,127300],"mapped",[117]],[[127301,127301],"mapped",[118]],[[127302,127302],"mapped",[119]],[[127303,127303],"mapped",[120]],[[127304,127304],"mapped",[121]],[[127305,127305],"mapped",[122]],[[127306,127306],"mapped",[104,118]],[[127307,127307],"mapped",[109,118]],[[127308,127308],"mapped",[115,100]],[[127309,127309],"mapped",[115,115]],[[127310,127310],"mapped",[112,112,118]],[[127311,127311],"mapped",[119,99]],[[127312,127318],"valid",[],"NV8"],[[127319,127319],"valid",[],"NV8"],[[127320,127326],"valid",[],"NV8"],[[127327,127327],"valid",[],"NV8"],[[127328,127337],"valid",[],"NV8"],[[127338,127338],"mapped",[109,99]],[[127339,127339],"mapped",[109,100]],[[127340,127343],"disallowed"],[[127344,127352],"valid",[],"NV8"],[[127353,127353],"valid",[],"NV8"],[[127354,127354],"valid",[],"NV8"],[[127355,127356],"valid",[],"NV8"],[[127357,127358],"valid",[],"NV8"],[[127359,127359],"valid",[],"NV8"],[[127360,127369],"valid",[],"NV8"],[[127370,127373],"valid",[],"NV8"],[[127374,127375],"valid",[],"NV8"],[[127376,127376],"mapped",[100,106]],[[127377,127386],"valid",[],"NV8"],[[127387,127461],"disallowed"],[[127462,127487],"valid",[],"NV8"],[[127488,127488],"mapped",[12411,12363]],[[127489,127489],"mapped",[12467,12467]],[[127490,127490],"mapped",[12469]],[[127491,127503],"disallowed"],[[127504,127504],"mapped",[25163]],[[127505,127505],"mapped",[23383]],[[127506,127506],"mapped",[21452]],[[127507,127507],"mapped",[12487]],[[127508,127508],"mapped",[20108]],[[127509,127509],"mapped",[22810]],[[127510,127510],"mapped",[35299]],[[127511,127511],"mapped",[22825]],[[127512,127512],"mapped",[20132]],[[127513,127513],"mapped",[26144]],[[127514,127514],"mapped",[28961]],[[127515,127515],"mapped",[26009]],[[127516,127516],"mapped",[21069]],[[127517,127517],"mapped",[24460]],[[127518,127518],"mapped",[20877]],[[127519,127519],"mapped",[26032]],[[127520,127520],"mapped",[21021]],[[127521,127521],"mapped",[32066]],[[127522,127522],"mapped",[29983]],[[127523,127523],"mapped",[36009]],[[127524,127524],"mapped",[22768]],[[127525,127525],"mapped",[21561]],[[127526,127526],"mapped",[28436]],[[127527,127527],"mapped",[25237]],[[127528,127528],"mapped",[25429]],[[127529,127529],"mapped",[19968]],[[127530,127530],"mapped",[19977]],[[127531,127531],"mapped",[36938]],[[127532,127532],"mapped",[24038]],[[127533,127533],"mapped",[20013]],[[127534,127534],"mapped",[21491]],[[127535,127535],"mapped",[25351]],[[127536,127536],"mapped",[36208]],[[127537,127537],"mapped",[25171]],[[127538,127538],"mapped",[31105]],[[127539,127539],"mapped",[31354]],[[127540,127540],"mapped",[21512]],[[127541,127541],"mapped",[28288]],[[127542,127542],"mapped",[26377]],[[127543,127543],"mapped",[26376]],[[127544,127544],"mapped",[30003]],[[127545,127545],"mapped",[21106]],[[127546,127546],"mapped",[21942]],[[127547,127551],"disallowed"],[[127552,127552],"mapped",[12308,26412,12309]],[[127553,127553],"mapped",[12308,19977,12309]],[[127554,127554],"mapped",[12308,20108,12309]],[[127555,127555],"mapped",[12308,23433,12309]],[[127556,127556],"mapped",[12308,28857,12309]],[[127557,127557],"mapped",[12308,25171,12309]],[[127558,127558],"mapped",[12308,30423,12309]],[[127559,127559],"mapped",[12308,21213,12309]],[[127560,127560],"mapped",[12308,25943,12309]],[[127561,127567],"disallowed"],[[127568,127568],"mapped",[24471]],[[127569,127569],"mapped",[21487]],[[127570,127743],"disallowed"],[[127744,127776],"valid",[],"NV8"],[[127777,127788],"valid",[],"NV8"],[[127789,127791],"valid",[],"NV8"],[[127792,127797],"valid",[],"NV8"],[[127798,127798],"valid",[],"NV8"],[[127799,127868],"valid",[],"NV8"],[[127869,127869],"valid",[],"NV8"],[[127870,127871],"valid",[],"NV8"],[[127872,127891],"valid",[],"NV8"],[[127892,127903],"valid",[],"NV8"],[[127904,127940],"valid",[],"NV8"],[[127941,127941],"valid",[],"NV8"],[[127942,127946],"valid",[],"NV8"],[[127947,127950],"valid",[],"NV8"],[[127951,127955],"valid",[],"NV8"],[[127956,127967],"valid",[],"NV8"],[[127968,127984],"valid",[],"NV8"],[[127985,127991],"valid",[],"NV8"],[[127992,127999],"valid",[],"NV8"],[[128000,128062],"valid",[],"NV8"],[[128063,128063],"valid",[],"NV8"],[[128064,128064],"valid",[],"NV8"],[[128065,128065],"valid",[],"NV8"],[[128066,128247],"valid",[],"NV8"],[[128248,128248],"valid",[],"NV8"],[[128249,128252],"valid",[],"NV8"],[[128253,128254],"valid",[],"NV8"],[[128255,128255],"valid",[],"NV8"],[[128256,128317],"valid",[],"NV8"],[[128318,128319],"valid",[],"NV8"],[[128320,128323],"valid",[],"NV8"],[[128324,128330],"valid",[],"NV8"],[[128331,128335],"valid",[],"NV8"],[[128336,128359],"valid",[],"NV8"],[[128360,128377],"valid",[],"NV8"],[[128378,128378],"disallowed"],[[128379,128419],"valid",[],"NV8"],[[128420,128420],"disallowed"],[[128421,128506],"valid",[],"NV8"],[[128507,128511],"valid",[],"NV8"],[[128512,128512],"valid",[],"NV8"],[[128513,128528],"valid",[],"NV8"],[[128529,128529],"valid",[],"NV8"],[[128530,128532],"valid",[],"NV8"],[[128533,128533],"valid",[],"NV8"],[[128534,128534],"valid",[],"NV8"],[[128535,128535],"valid",[],"NV8"],[[128536,128536],"valid",[],"NV8"],[[128537,128537],"valid",[],"NV8"],[[128538,128538],"valid",[],"NV8"],[[128539,128539],"valid",[],"NV8"],[[128540,128542],"valid",[],"NV8"],[[128543,128543],"valid",[],"NV8"],[[128544,128549],"valid",[],"NV8"],[[128550,128551],"valid",[],"NV8"],[[128552,128555],"valid",[],"NV8"],[[128556,128556],"valid",[],"NV8"],[[128557,128557],"valid",[],"NV8"],[[128558,128559],"valid",[],"NV8"],[[128560,128563],"valid",[],"NV8"],[[128564,128564],"valid",[],"NV8"],[[128565,128576],"valid",[],"NV8"],[[128577,128578],"valid",[],"NV8"],[[128579,128580],"valid",[],"NV8"],[[128581,128591],"valid",[],"NV8"],[[128592,128639],"valid",[],"NV8"],[[128640,128709],"valid",[],"NV8"],[[128710,128719],"valid",[],"NV8"],[[128720,128720],"valid",[],"NV8"],[[128721,128735],"disallowed"],[[128736,128748],"valid",[],"NV8"],[[128749,128751],"disallowed"],[[128752,128755],"valid",[],"NV8"],[[128756,128767],"disallowed"],[[128768,128883],"valid",[],"NV8"],[[128884,128895],"disallowed"],[[128896,128980],"valid",[],"NV8"],[[128981,129023],"disallowed"],[[129024,129035],"valid",[],"NV8"],[[129036,129039],"disallowed"],[[129040,129095],"valid",[],"NV8"],[[129096,129103],"disallowed"],[[129104,129113],"valid",[],"NV8"],[[129114,129119],"disallowed"],[[129120,129159],"valid",[],"NV8"],[[129160,129167],"disallowed"],[[129168,129197],"valid",[],"NV8"],[[129198,129295],"disallowed"],[[129296,129304],"valid",[],"NV8"],[[129305,129407],"disallowed"],[[129408,129412],"valid",[],"NV8"],[[129413,129471],"disallowed"],[[129472,129472],"valid",[],"NV8"],[[129473,131069],"disallowed"],[[131070,131071],"disallowed"],[[131072,173782],"valid"],[[173783,173823],"disallowed"],[[173824,177972],"valid"],[[177973,177983],"disallowed"],[[177984,178205],"valid"],[[178206,178207],"disallowed"],[[178208,183969],"valid"],[[183970,194559],"disallowed"],[[194560,194560],"mapped",[20029]],[[194561,194561],"mapped",[20024]],[[194562,194562],"mapped",[20033]],[[194563,194563],"mapped",[131362]],[[194564,194564],"mapped",[20320]],[[194565,194565],"mapped",[20398]],[[194566,194566],"mapped",[20411]],[[194567,194567],"mapped",[20482]],[[194568,194568],"mapped",[20602]],[[194569,194569],"mapped",[20633]],[[194570,194570],"mapped",[20711]],[[194571,194571],"mapped",[20687]],[[194572,194572],"mapped",[13470]],[[194573,194573],"mapped",[132666]],[[194574,194574],"mapped",[20813]],[[194575,194575],"mapped",[20820]],[[194576,194576],"mapped",[20836]],[[194577,194577],"mapped",[20855]],[[194578,194578],"mapped",[132380]],[[194579,194579],"mapped",[13497]],[[194580,194580],"mapped",[20839]],[[194581,194581],"mapped",[20877]],[[194582,194582],"mapped",[132427]],[[194583,194583],"mapped",[20887]],[[194584,194584],"mapped",[20900]],[[194585,194585],"mapped",[20172]],[[194586,194586],"mapped",[20908]],[[194587,194587],"mapped",[20917]],[[194588,194588],"mapped",[168415]],[[194589,194589],"mapped",[20981]],[[194590,194590],"mapped",[20995]],[[194591,194591],"mapped",[13535]],[[194592,194592],"mapped",[21051]],[[194593,194593],"mapped",[21062]],[[194594,194594],"mapped",[21106]],[[194595,194595],"mapped",[21111]],[[194596,194596],"mapped",[13589]],[[194597,194597],"mapped",[21191]],[[194598,194598],"mapped",[21193]],[[194599,194599],"mapped",[21220]],[[194600,194600],"mapped",[21242]],[[194601,194601],"mapped",[21253]],[[194602,194602],"mapped",[21254]],[[194603,194603],"mapped",[21271]],[[194604,194604],"mapped",[21321]],[[194605,194605],"mapped",[21329]],[[194606,194606],"mapped",[21338]],[[194607,194607],"mapped",[21363]],[[194608,194608],"mapped",[21373]],[[194609,194611],"mapped",[21375]],[[194612,194612],"mapped",[133676]],[[194613,194613],"mapped",[28784]],[[194614,194614],"mapped",[21450]],[[194615,194615],"mapped",[21471]],[[194616,194616],"mapped",[133987]],[[194617,194617],"mapped",[21483]],[[194618,194618],"mapped",[21489]],[[194619,194619],"mapped",[21510]],[[194620,194620],"mapped",[21662]],[[194621,194621],"mapped",[21560]],[[194622,194622],"mapped",[21576]],[[194623,194623],"mapped",[21608]],[[194624,194624],"mapped",[21666]],[[194625,194625],"mapped",[21750]],[[194626,194626],"mapped",[21776]],[[194627,194627],"mapped",[21843]],[[194628,194628],"mapped",[21859]],[[194629,194630],"mapped",[21892]],[[194631,194631],"mapped",[21913]],[[194632,194632],"mapped",[21931]],[[194633,194633],"mapped",[21939]],[[194634,194634],"mapped",[21954]],[[194635,194635],"mapped",[22294]],[[194636,194636],"mapped",[22022]],[[194637,194637],"mapped",[22295]],[[194638,194638],"mapped",[22097]],[[194639,194639],"mapped",[22132]],[[194640,194640],"mapped",[20999]],[[194641,194641],"mapped",[22766]],[[194642,194642],"mapped",[22478]],[[194643,194643],"mapped",[22516]],[[194644,194644],"mapped",[22541]],[[194645,194645],"mapped",[22411]],[[194646,194646],"mapped",[22578]],[[194647,194647],"mapped",[22577]],[[194648,194648],"mapped",[22700]],[[194649,194649],"mapped",[136420]],[[194650,194650],"mapped",[22770]],[[194651,194651],"mapped",[22775]],[[194652,194652],"mapped",[22790]],[[194653,194653],"mapped",[22810]],[[194654,194654],"mapped",[22818]],[[194655,194655],"mapped",[22882]],[[194656,194656],"mapped",[136872]],[[194657,194657],"mapped",[136938]],[[194658,194658],"mapped",[23020]],[[194659,194659],"mapped",[23067]],[[194660,194660],"mapped",[23079]],[[194661,194661],"mapped",[23000]],[[194662,194662],"mapped",[23142]],[[194663,194663],"mapped",[14062]],[[194664,194664],"disallowed"],[[194665,194665],"mapped",[23304]],[[194666,194667],"mapped",[23358]],[[194668,194668],"mapped",[137672]],[[194669,194669],"mapped",[23491]],[[194670,194670],"mapped",[23512]],[[194671,194671],"mapped",[23527]],[[194672,194672],"mapped",[23539]],[[194673,194673],"mapped",[138008]],[[194674,194674],"mapped",[23551]],[[194675,194675],"mapped",[23558]],[[194676,194676],"disallowed"],[[194677,194677],"mapped",[23586]],[[194678,194678],"mapped",[14209]],[[194679,194679],"mapped",[23648]],[[194680,194680],"mapped",[23662]],[[194681,194681],"mapped",[23744]],[[194682,194682],"mapped",[23693]],[[194683,194683],"mapped",[138724]],[[194684,194684],"mapped",[23875]],[[194685,194685],"mapped",[138726]],[[194686,194686],"mapped",[23918]],[[194687,194687],"mapped",[23915]],[[194688,194688],"mapped",[23932]],[[194689,194689],"mapped",[24033]],[[194690,194690],"mapped",[24034]],[[194691,194691],"mapped",[14383]],[[194692,194692],"mapped",[24061]],[[194693,194693],"mapped",[24104]],[[194694,194694],"mapped",[24125]],[[194695,194695],"mapped",[24169]],[[194696,194696],"mapped",[14434]],[[194697,194697],"mapped",[139651]],[[194698,194698],"mapped",[14460]],[[194699,194699],"mapped",[24240]],[[194700,194700],"mapped",[24243]],[[194701,194701],"mapped",[24246]],[[194702,194702],"mapped",[24266]],[[194703,194703],"mapped",[172946]],[[194704,194704],"mapped",[24318]],[[194705,194706],"mapped",[140081]],[[194707,194707],"mapped",[33281]],[[194708,194709],"mapped",[24354]],[[194710,194710],"mapped",[14535]],[[194711,194711],"mapped",[144056]],[[194712,194712],"mapped",[156122]],[[194713,194713],"mapped",[24418]],[[194714,194714],"mapped",[24427]],[[194715,194715],"mapped",[14563]],[[194716,194716],"mapped",[24474]],[[194717,194717],"mapped",[24525]],[[194718,194718],"mapped",[24535]],[[194719,194719],"mapped",[24569]],[[194720,194720],"mapped",[24705]],[[194721,194721],"mapped",[14650]],[[194722,194722],"mapped",[14620]],[[194723,194723],"mapped",[24724]],[[194724,194724],"mapped",[141012]],[[194725,194725],"mapped",[24775]],[[194726,194726],"mapped",[24904]],[[194727,194727],"mapped",[24908]],[[194728,194728],"mapped",[24910]],[[194729,194729],"mapped",[24908]],[[194730,194730],"mapped",[24954]],[[194731,194731],"mapped",[24974]],[[194732,194732],"mapped",[25010]],[[194733,194733],"mapped",[24996]],[[194734,194734],"mapped",[25007]],[[194735,194735],"mapped",[25054]],[[194736,194736],"mapped",[25074]],[[194737,194737],"mapped",[25078]],[[194738,194738],"mapped",[25104]],[[194739,194739],"mapped",[25115]],[[194740,194740],"mapped",[25181]],[[194741,194741],"mapped",[25265]],[[194742,194742],"mapped",[25300]],[[194743,194743],"mapped",[25424]],[[194744,194744],"mapped",[142092]],[[194745,194745],"mapped",[25405]],[[194746,194746],"mapped",[25340]],[[194747,194747],"mapped",[25448]],[[194748,194748],"mapped",[25475]],[[194749,194749],"mapped",[25572]],[[194750,194750],"mapped",[142321]],[[194751,194751],"mapped",[25634]],[[194752,194752],"mapped",[25541]],[[194753,194753],"mapped",[25513]],[[194754,194754],"mapped",[14894]],[[194755,194755],"mapped",[25705]],[[194756,194756],"mapped",[25726]],[[194757,194757],"mapped",[25757]],[[194758,194758],"mapped",[25719]],[[194759,194759],"mapped",[14956]],[[194760,194760],"mapped",[25935]],[[194761,194761],"mapped",[25964]],[[194762,194762],"mapped",[143370]],[[194763,194763],"mapped",[26083]],[[194764,194764],"mapped",[26360]],[[194765,194765],"mapped",[26185]],[[194766,194766],"mapped",[15129]],[[194767,194767],"mapped",[26257]],[[194768,194768],"mapped",[15112]],[[194769,194769],"mapped",[15076]],[[194770,194770],"mapped",[20882]],[[194771,194771],"mapped",[20885]],[[194772,194772],"mapped",[26368]],[[194773,194773],"mapped",[26268]],[[194774,194774],"mapped",[32941]],[[194775,194775],"mapped",[17369]],[[194776,194776],"mapped",[26391]],[[194777,194777],"mapped",[26395]],[[194778,194778],"mapped",[26401]],[[194779,194779],"mapped",[26462]],[[194780,194780],"mapped",[26451]],[[194781,194781],"mapped",[144323]],[[194782,194782],"mapped",[15177]],[[194783,194783],"mapped",[26618]],[[194784,194784],"mapped",[26501]],[[194785,194785],"mapped",[26706]],[[194786,194786],"mapped",[26757]],[[194787,194787],"mapped",[144493]],[[194788,194788],"mapped",[26766]],[[194789,194789],"mapped",[26655]],[[194790,194790],"mapped",[26900]],[[194791,194791],"mapped",[15261]],[[194792,194792],"mapped",[26946]],[[194793,194793],"mapped",[27043]],[[194794,194794],"mapped",[27114]],[[194795,194795],"mapped",[27304]],[[194796,194796],"mapped",[145059]],[[194797,194797],"mapped",[27355]],[[194798,194798],"mapped",[15384]],[[194799,194799],"mapped",[27425]],[[194800,194800],"mapped",[145575]],[[194801,194801],"mapped",[27476]],[[194802,194802],"mapped",[15438]],[[194803,194803],"mapped",[27506]],[[194804,194804],"mapped",[27551]],[[194805,194805],"mapped",[27578]],[[194806,194806],"mapped",[27579]],[[194807,194807],"mapped",[146061]],[[194808,194808],"mapped",[138507]],[[194809,194809],"mapped",[146170]],[[194810,194810],"mapped",[27726]],[[194811,194811],"mapped",[146620]],[[194812,194812],"mapped",[27839]],[[194813,194813],"mapped",[27853]],[[194814,194814],"mapped",[27751]],[[194815,194815],"mapped",[27926]],[[194816,194816],"mapped",[27966]],[[194817,194817],"mapped",[28023]],[[194818,194818],"mapped",[27969]],[[194819,194819],"mapped",[28009]],[[194820,194820],"mapped",[28024]],[[194821,194821],"mapped",[28037]],[[194822,194822],"mapped",[146718]],[[194823,194823],"mapped",[27956]],[[194824,194824],"mapped",[28207]],[[194825,194825],"mapped",[28270]],[[194826,194826],"mapped",[15667]],[[194827,194827],"mapped",[28363]],[[194828,194828],"mapped",[28359]],[[194829,194829],"mapped",[147153]],[[194830,194830],"mapped",[28153]],[[194831,194831],"mapped",[28526]],[[194832,194832],"mapped",[147294]],[[194833,194833],"mapped",[147342]],[[194834,194834],"mapped",[28614]],[[194835,194835],"mapped",[28729]],[[194836,194836],"mapped",[28702]],[[194837,194837],"mapped",[28699]],[[194838,194838],"mapped",[15766]],[[194839,194839],"mapped",[28746]],[[194840,194840],"mapped",[28797]],[[194841,194841],"mapped",[28791]],[[194842,194842],"mapped",[28845]],[[194843,194843],"mapped",[132389]],[[194844,194844],"mapped",[28997]],[[194845,194845],"mapped",[148067]],[[194846,194846],"mapped",[29084]],[[194847,194847],"disallowed"],[[194848,194848],"mapped",[29224]],[[194849,194849],"mapped",[29237]],[[194850,194850],"mapped",[29264]],[[194851,194851],"mapped",[149000]],[[194852,194852],"mapped",[29312]],[[194853,194853],"mapped",[29333]],[[194854,194854],"mapped",[149301]],[[194855,194855],"mapped",[149524]],[[194856,194856],"mapped",[29562]],[[194857,194857],"mapped",[29579]],[[194858,194858],"mapped",[16044]],[[194859,194859],"mapped",[29605]],[[194860,194861],"mapped",[16056]],[[194862,194862],"mapped",[29767]],[[194863,194863],"mapped",[29788]],[[194864,194864],"mapped",[29809]],[[194865,194865],"mapped",[29829]],[[194866,194866],"mapped",[29898]],[[194867,194867],"mapped",[16155]],[[194868,194868],"mapped",[29988]],[[194869,194869],"mapped",[150582]],[[194870,194870],"mapped",[30014]],[[194871,194871],"mapped",[150674]],[[194872,194872],"mapped",[30064]],[[194873,194873],"mapped",[139679]],[[194874,194874],"mapped",[30224]],[[194875,194875],"mapped",[151457]],[[194876,194876],"mapped",[151480]],[[194877,194877],"mapped",[151620]],[[194878,194878],"mapped",[16380]],[[194879,194879],"mapped",[16392]],[[194880,194880],"mapped",[30452]],[[194881,194881],"mapped",[151795]],[[194882,194882],"mapped",[151794]],[[194883,194883],"mapped",[151833]],[[194884,194884],"mapped",[151859]],[[194885,194885],"mapped",[30494]],[[194886,194887],"mapped",[30495]],[[194888,194888],"mapped",[30538]],[[194889,194889],"mapped",[16441]],[[194890,194890],"mapped",[30603]],[[194891,194891],"mapped",[16454]],[[194892,194892],"mapped",[16534]],[[194893,194893],"mapped",[152605]],[[194894,194894],"mapped",[30798]],[[194895,194895],"mapped",[30860]],[[194896,194896],"mapped",[30924]],[[194897,194897],"mapped",[16611]],[[194898,194898],"mapped",[153126]],[[194899,194899],"mapped",[31062]],[[194900,194900],"mapped",[153242]],[[194901,194901],"mapped",[153285]],[[194902,194902],"mapped",[31119]],[[194903,194903],"mapped",[31211]],[[194904,194904],"mapped",[16687]],[[194905,194905],"mapped",[31296]],[[194906,194906],"mapped",[31306]],[[194907,194907],"mapped",[31311]],[[194908,194908],"mapped",[153980]],[[194909,194910],"mapped",[154279]],[[194911,194911],"disallowed"],[[194912,194912],"mapped",[16898]],[[194913,194913],"mapped",[154539]],[[194914,194914],"mapped",[31686]],[[194915,194915],"mapped",[31689]],[[194916,194916],"mapped",[16935]],[[194917,194917],"mapped",[154752]],[[194918,194918],"mapped",[31954]],[[194919,194919],"mapped",[17056]],[[194920,194920],"mapped",[31976]],[[194921,194921],"mapped",[31971]],[[194922,194922],"mapped",[32000]],[[194923,194923],"mapped",[155526]],[[194924,194924],"mapped",[32099]],[[194925,194925],"mapped",[17153]],[[194926,194926],"mapped",[32199]],[[194927,194927],"mapped",[32258]],[[194928,194928],"mapped",[32325]],[[194929,194929],"mapped",[17204]],[[194930,194930],"mapped",[156200]],[[194931,194931],"mapped",[156231]],[[194932,194932],"mapped",[17241]],[[194933,194933],"mapped",[156377]],[[194934,194934],"mapped",[32634]],[[194935,194935],"mapped",[156478]],[[194936,194936],"mapped",[32661]],[[194937,194937],"mapped",[32762]],[[194938,194938],"mapped",[32773]],[[194939,194939],"mapped",[156890]],[[194940,194940],"mapped",[156963]],[[194941,194941],"mapped",[32864]],[[194942,194942],"mapped",[157096]],[[194943,194943],"mapped",[32880]],[[194944,194944],"mapped",[144223]],[[194945,194945],"mapped",[17365]],[[194946,194946],"mapped",[32946]],[[194947,194947],"mapped",[33027]],[[194948,194948],"mapped",[17419]],[[194949,194949],"mapped",[33086]],[[194950,194950],"mapped",[23221]],[[194951,194951],"mapped",[157607]],[[194952,194952],"mapped",[157621]],[[194953,194953],"mapped",[144275]],[[194954,194954],"mapped",[144284]],[[194955,194955],"mapped",[33281]],[[194956,194956],"mapped",[33284]],[[194957,194957],"mapped",[36766]],[[194958,194958],"mapped",[17515]],[[194959,194959],"mapped",[33425]],[[194960,194960],"mapped",[33419]],[[194961,194961],"mapped",[33437]],[[194962,194962],"mapped",[21171]],[[194963,194963],"mapped",[33457]],[[194964,194964],"mapped",[33459]],[[194965,194965],"mapped",[33469]],[[194966,194966],"mapped",[33510]],[[194967,194967],"mapped",[158524]],[[194968,194968],"mapped",[33509]],[[194969,194969],"mapped",[33565]],[[194970,194970],"mapped",[33635]],[[194971,194971],"mapped",[33709]],[[194972,194972],"mapped",[33571]],[[194973,194973],"mapped",[33725]],[[194974,194974],"mapped",[33767]],[[194975,194975],"mapped",[33879]],[[194976,194976],"mapped",[33619]],[[194977,194977],"mapped",[33738]],[[194978,194978],"mapped",[33740]],[[194979,194979],"mapped",[33756]],[[194980,194980],"mapped",[158774]],[[194981,194981],"mapped",[159083]],[[194982,194982],"mapped",[158933]],[[194983,194983],"mapped",[17707]],[[194984,194984],"mapped",[34033]],[[194985,194985],"mapped",[34035]],[[194986,194986],"mapped",[34070]],[[194987,194987],"mapped",[160714]],[[194988,194988],"mapped",[34148]],[[194989,194989],"mapped",[159532]],[[194990,194990],"mapped",[17757]],[[194991,194991],"mapped",[17761]],[[194992,194992],"mapped",[159665]],[[194993,194993],"mapped",[159954]],[[194994,194994],"mapped",[17771]],[[194995,194995],"mapped",[34384]],[[194996,194996],"mapped",[34396]],[[194997,194997],"mapped",[34407]],[[194998,194998],"mapped",[34409]],[[194999,194999],"mapped",[34473]],[[195000,195000],"mapped",[34440]],[[195001,195001],"mapped",[34574]],[[195002,195002],"mapped",[34530]],[[195003,195003],"mapped",[34681]],[[195004,195004],"mapped",[34600]],[[195005,195005],"mapped",[34667]],[[195006,195006],"mapped",[34694]],[[195007,195007],"disallowed"],[[195008,195008],"mapped",[34785]],[[195009,195009],"mapped",[34817]],[[195010,195010],"mapped",[17913]],[[195011,195011],"mapped",[34912]],[[195012,195012],"mapped",[34915]],[[195013,195013],"mapped",[161383]],[[195014,195014],"mapped",[35031]],[[195015,195015],"mapped",[35038]],[[195016,195016],"mapped",[17973]],[[195017,195017],"mapped",[35066]],[[195018,195018],"mapped",[13499]],[[195019,195019],"mapped",[161966]],[[195020,195020],"mapped",[162150]],[[195021,195021],"mapped",[18110]],[[195022,195022],"mapped",[18119]],[[195023,195023],"mapped",[35488]],[[195024,195024],"mapped",[35565]],[[195025,195025],"mapped",[35722]],[[195026,195026],"mapped",[35925]],[[195027,195027],"mapped",[162984]],[[195028,195028],"mapped",[36011]],[[195029,195029],"mapped",[36033]],[[195030,195030],"mapped",[36123]],[[195031,195031],"mapped",[36215]],[[195032,195032],"mapped",[163631]],[[195033,195033],"mapped",[133124]],[[195034,195034],"mapped",[36299]],[[195035,195035],"mapped",[36284]],[[195036,195036],"mapped",[36336]],[[195037,195037],"mapped",[133342]],[[195038,195038],"mapped",[36564]],[[195039,195039],"mapped",[36664]],[[195040,195040],"mapped",[165330]],[[195041,195041],"mapped",[165357]],[[195042,195042],"mapped",[37012]],[[195043,195043],"mapped",[37105]],[[195044,195044],"mapped",[37137]],[[195045,195045],"mapped",[165678]],[[195046,195046],"mapped",[37147]],[[195047,195047],"mapped",[37432]],[[195048,195048],"mapped",[37591]],[[195049,195049],"mapped",[37592]],[[195050,195050],"mapped",[37500]],[[195051,195051],"mapped",[37881]],[[195052,195052],"mapped",[37909]],[[195053,195053],"mapped",[166906]],[[195054,195054],"mapped",[38283]],[[195055,195055],"mapped",[18837]],[[195056,195056],"mapped",[38327]],[[195057,195057],"mapped",[167287]],[[195058,195058],"mapped",[18918]],[[195059,195059],"mapped",[38595]],[[195060,195060],"mapped",[23986]],[[195061,195061],"mapped",[38691]],[[195062,195062],"mapped",[168261]],[[195063,195063],"mapped",[168474]],[[195064,195064],"mapped",[19054]],[[195065,195065],"mapped",[19062]],[[195066,195066],"mapped",[38880]],[[195067,195067],"mapped",[168970]],[[195068,195068],"mapped",[19122]],[[195069,195069],"mapped",[169110]],[[195070,195071],"mapped",[38923]],[[195072,195072],"mapped",[38953]],[[195073,195073],"mapped",[169398]],[[195074,195074],"mapped",[39138]],[[195075,195075],"mapped",[19251]],[[195076,195076],"mapped",[39209]],[[195077,195077],"mapped",[39335]],[[195078,195078],"mapped",[39362]],[[195079,195079],"mapped",[39422]],[[195080,195080],"mapped",[19406]],[[195081,195081],"mapped",[170800]],[[195082,195082],"mapped",[39698]],[[195083,195083],"mapped",[40000]],[[195084,195084],"mapped",[40189]],[[195085,195085],"mapped",[19662]],[[195086,195086],"mapped",[19693]],[[195087,195087],"mapped",[40295]],[[195088,195088],"mapped",[172238]],[[195089,195089],"mapped",[19704]],[[195090,195090],"mapped",[172293]],[[195091,195091],"mapped",[172558]],[[195092,195092],"mapped",[172689]],[[195093,195093],"mapped",[40635]],[[195094,195094],"mapped",[19798]],[[195095,195095],"mapped",[40697]],[[195096,195096],"mapped",[40702]],[[195097,195097],"mapped",[40709]],[[195098,195098],"mapped",[40719]],[[195099,195099],"mapped",[40726]],[[195100,195100],"mapped",[40763]],[[195101,195101],"mapped",[173568]],[[195102,196605],"disallowed"],[[196606,196607],"disallowed"],[[196608,262141],"disallowed"],[[262142,262143],"disallowed"],[[262144,327677],"disallowed"],[[327678,327679],"disallowed"],[[327680,393213],"disallowed"],[[393214,393215],"disallowed"],[[393216,458749],"disallowed"],[[458750,458751],"disallowed"],[[458752,524285],"disallowed"],[[524286,524287],"disallowed"],[[524288,589821],"disallowed"],[[589822,589823],"disallowed"],[[589824,655357],"disallowed"],[[655358,655359],"disallowed"],[[655360,720893],"disallowed"],[[720894,720895],"disallowed"],[[720896,786429],"disallowed"],[[786430,786431],"disallowed"],[[786432,851965],"disallowed"],[[851966,851967],"disallowed"],[[851968,917501],"disallowed"],[[917502,917503],"disallowed"],[[917504,917504],"disallowed"],[[917505,917505],"disallowed"],[[917506,917535],"disallowed"],[[917536,917631],"disallowed"],[[917632,917759],"disallowed"],[[917760,917999],"ignored"],[[918000,983037],"disallowed"],[[983038,983039],"disallowed"],[[983040,1048573],"disallowed"],[[1048574,1048575],"disallowed"],[[1048576,1114109],"disallowed"],[[1114110,1114111],"disallowed"]]'); /***/ }) @@ -32651,8 +39753,9 @@ module.exports = require("zlib");; /******/ // The require function /******/ function __nccwpck_require__(moduleId) { /******/ // Check if module is in cache -/******/ if(__webpack_module_cache__[moduleId]) { -/******/ return __webpack_module_cache__[moduleId].exports; +/******/ var cachedModule = __webpack_module_cache__[moduleId]; +/******/ if (cachedModule !== undefined) { +/******/ return cachedModule.exports; /******/ } /******/ // Create a new module (and put it into the cache) /******/ var module = __webpack_module_cache__[moduleId] = { @@ -32675,13 +39778,46 @@ module.exports = require("zlib");; /******/ } /******/ /************************************************************************/ +/******/ /* webpack/runtime/define property getters */ +/******/ (() => { +/******/ // define getter functions for harmony exports +/******/ __nccwpck_require__.d = (exports, definition) => { +/******/ for(var key in definition) { +/******/ if(__nccwpck_require__.o(definition, key) && !__nccwpck_require__.o(exports, key)) { +/******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] }); +/******/ } +/******/ } +/******/ }; +/******/ })(); +/******/ +/******/ /* webpack/runtime/hasOwnProperty shorthand */ +/******/ (() => { +/******/ __nccwpck_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop)) +/******/ })(); +/******/ +/******/ /* webpack/runtime/make namespace object */ +/******/ (() => { +/******/ // define __esModule on exports +/******/ __nccwpck_require__.r = (exports) => { +/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) { +/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' }); +/******/ } +/******/ Object.defineProperty(exports, '__esModule', { value: true }); +/******/ }; +/******/ })(); +/******/ /******/ /* webpack/runtime/compat */ /******/ -/******/ __nccwpck_require__.ab = __dirname + "/";/************************************************************************/ -/******/ // module exports must be returned from runtime so entry inlining is disabled +/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/"; +/******/ +/************************************************************************/ +/******/ /******/ // startup /******/ // Load entry module and return exports -/******/ return __nccwpck_require__(3109); +/******/ // This entry module is referenced by other modules so it can't be inlined +/******/ var __webpack_exports__ = __nccwpck_require__(3109); +/******/ module.exports = __webpack_exports__; +/******/ /******/ })() ; //# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/index.js.map b/dist/index.js.map index 09aac5d..5867385 100644 --- a/dist/index.js.map +++ b/dist/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","sources":["../webpack://test-check/./lib/input-providers/artifact-provider.js","../webpack://test-check/./lib/input-providers/local-file-provider.js","../webpack://test-check/./lib/main.js","../webpack://test-check/./lib/parsers/dart-json/dart-json-parser.js","../webpack://test-check/./lib/parsers/dart-json/dart-json-types.js","../webpack://test-check/./lib/parsers/dotnet-trx/dotnet-trx-parser.js","../webpack://test-check/./lib/parsers/java-junit/java-junit-parser.js","../webpack://test-check/./lib/parsers/jest-junit/jest-junit-parser.js","../webpack://test-check/./lib/parsers/mocha-json/mocha-json-parser.js","../webpack://test-check/./lib/report/get-annotations.js","../webpack://test-check/./lib/report/get-report.js","../webpack://test-check/./lib/test-results.js","../webpack://test-check/./lib/utils/exec.js","../webpack://test-check/./lib/utils/git.js","../webpack://test-check/./lib/utils/github-utils.js","../webpack://test-check/./lib/utils/markdown-utils.js","../webpack://test-check/./lib/utils/node-utils.js","../webpack://test-check/./lib/utils/parse-utils.js","../webpack://test-check/./lib/utils/path-utils.js","../webpack://test-check/./lib/utils/slugger.js","../webpack://test-check/./node_modules/@actions/core/lib/command.js","../webpack://test-check/./node_modules/@actions/core/lib/core.js","../webpack://test-check/./node_modules/@actions/core/lib/file-command.js","../webpack://test-check/./node_modules/@actions/core/lib/utils.js","../webpack://test-check/./node_modules/@actions/exec/lib/exec.js","../webpack://test-check/./node_modules/@actions/exec/lib/toolrunner.js","../webpack://test-check/./node_modules/@actions/github/lib/context.js","../webpack://test-check/./node_modules/@actions/github/lib/github.js","../webpack://test-check/./node_modules/@actions/github/lib/internal/utils.js","../webpack://test-check/./node_modules/@actions/github/lib/utils.js","../webpack://test-check/./node_modules/@actions/http-client/index.js","../webpack://test-check/./node_modules/@actions/http-client/proxy.js","../webpack://test-check/./node_modules/@actions/io/lib/io-util.js","../webpack://test-check/./node_modules/@actions/io/lib/io.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/adapters/fs.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/constants.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/index.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/providers/async.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/providers/common.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/providers/sync.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/settings.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/utils/fs.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/utils/index.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/adapters/fs.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/index.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/providers/async.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/providers/sync.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/settings.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/index.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/providers/async.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/providers/stream.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/providers/sync.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/readers/async.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/readers/common.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/readers/reader.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/readers/sync.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/settings.js","../webpack://test-check/./node_modules/@octokit/auth-token/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/core/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/endpoint/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/endpoint/node_modules/is-plain-object/dist/is-plain-object.js","../webpack://test-check/./node_modules/@octokit/graphql/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/plugin-paginate-rest/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/request-error/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/request/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.js","../webpack://test-check/./node_modules/@sindresorhus/is/dist/index.js","../webpack://test-check/./node_modules/@szmarczak/http-timer/dist/source/index.js","../webpack://test-check/./node_modules/adm-zip/adm-zip.js","../webpack://test-check/./node_modules/adm-zip/headers/entryHeader.js","../webpack://test-check/./node_modules/adm-zip/headers/index.js","../webpack://test-check/./node_modules/adm-zip/headers/mainHeader.js","../webpack://test-check/./node_modules/adm-zip/methods/deflater.js","../webpack://test-check/./node_modules/adm-zip/methods/index.js","../webpack://test-check/./node_modules/adm-zip/methods/inflater.js","../webpack://test-check/./node_modules/adm-zip/methods/zipcrypto.js","../webpack://test-check/./node_modules/adm-zip/util/constants.js","../webpack://test-check/./node_modules/adm-zip/util/errors.js","../webpack://test-check/./node_modules/adm-zip/util/fattr.js","../webpack://test-check/./node_modules/adm-zip/util/fileSystem.js","../webpack://test-check/./node_modules/adm-zip/util/index.js","../webpack://test-check/./node_modules/adm-zip/util/utils.js","../webpack://test-check/./node_modules/adm-zip/zipEntry.js","../webpack://test-check/./node_modules/adm-zip/zipFile.js","../webpack://test-check/./node_modules/before-after-hook/index.js","../webpack://test-check/./node_modules/before-after-hook/lib/add.js","../webpack://test-check/./node_modules/before-after-hook/lib/register.js","../webpack://test-check/./node_modules/before-after-hook/lib/remove.js","../webpack://test-check/./node_modules/cacheable-lookup/source/index.js","../webpack://test-check/./node_modules/cacheable-request/node_modules/get-stream/buffer-stream.js","../webpack://test-check/./node_modules/cacheable-request/node_modules/get-stream/index.js","../webpack://test-check/./node_modules/cacheable-request/src/index.js","../webpack://test-check/./node_modules/clone-response/src/index.js","../webpack://test-check/./node_modules/decompress-response/index.js","../webpack://test-check/./node_modules/decompress-response/node_modules/mimic-response/index.js","../webpack://test-check/./node_modules/defer-to-connect/dist/source/index.js","../webpack://test-check/./node_modules/deprecation/dist-node/index.js","../webpack://test-check/./node_modules/end-of-stream/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/compile.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/constants.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/expand.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/parse.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/stringify.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/utils.js","../webpack://test-check/./node_modules/fast-glob/node_modules/fill-range/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/is-number/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/micromatch/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/to-regex-range/index.js","../webpack://test-check/./node_modules/fast-glob/out/index.js","../webpack://test-check/./node_modules/fast-glob/out/managers/tasks.js","../webpack://test-check/./node_modules/fast-glob/out/providers/async.js","../webpack://test-check/./node_modules/fast-glob/out/providers/filters/deep.js","../webpack://test-check/./node_modules/fast-glob/out/providers/filters/entry.js","../webpack://test-check/./node_modules/fast-glob/out/providers/filters/error.js","../webpack://test-check/./node_modules/fast-glob/out/providers/matchers/matcher.js","../webpack://test-check/./node_modules/fast-glob/out/providers/matchers/partial.js","../webpack://test-check/./node_modules/fast-glob/out/providers/provider.js","../webpack://test-check/./node_modules/fast-glob/out/providers/stream.js","../webpack://test-check/./node_modules/fast-glob/out/providers/sync.js","../webpack://test-check/./node_modules/fast-glob/out/providers/transformers/entry.js","../webpack://test-check/./node_modules/fast-glob/out/readers/reader.js","../webpack://test-check/./node_modules/fast-glob/out/readers/stream.js","../webpack://test-check/./node_modules/fast-glob/out/readers/sync.js","../webpack://test-check/./node_modules/fast-glob/out/settings.js","../webpack://test-check/./node_modules/fast-glob/out/utils/array.js","../webpack://test-check/./node_modules/fast-glob/out/utils/errno.js","../webpack://test-check/./node_modules/fast-glob/out/utils/fs.js","../webpack://test-check/./node_modules/fast-glob/out/utils/index.js","../webpack://test-check/./node_modules/fast-glob/out/utils/path.js","../webpack://test-check/./node_modules/fast-glob/out/utils/pattern.js","../webpack://test-check/./node_modules/fast-glob/out/utils/stream.js","../webpack://test-check/./node_modules/fast-glob/out/utils/string.js","../webpack://test-check/./node_modules/fastq/queue.js","../webpack://test-check/./node_modules/glob-parent/index.js","../webpack://test-check/./node_modules/got/dist/source/as-promise/create-rejection.js","../webpack://test-check/./node_modules/got/dist/source/as-promise/index.js","../webpack://test-check/./node_modules/got/dist/source/as-promise/normalize-arguments.js","../webpack://test-check/./node_modules/got/dist/source/as-promise/parse-body.js","../webpack://test-check/./node_modules/got/dist/source/as-promise/types.js","../webpack://test-check/./node_modules/got/dist/source/core/calculate-retry-delay.js","../webpack://test-check/./node_modules/got/dist/source/core/index.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/dns-ip-version.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/get-body-size.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/get-buffer.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/is-form-data.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/is-response-ok.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/options-to-url.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/proxy-events.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/timed-out.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/unhandle.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/url-to-options.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/weakable-map.js","../webpack://test-check/./node_modules/got/dist/source/create.js","../webpack://test-check/./node_modules/got/dist/source/index.js","../webpack://test-check/./node_modules/got/dist/source/types.js","../webpack://test-check/./node_modules/got/dist/source/utils/deep-freeze.js","../webpack://test-check/./node_modules/got/dist/source/utils/deprecation-warning.js","../webpack://test-check/./node_modules/http-cache-semantics/index.js","../webpack://test-check/./node_modules/http2-wrapper/source/agent.js","../webpack://test-check/./node_modules/http2-wrapper/source/auto.js","../webpack://test-check/./node_modules/http2-wrapper/source/client-request.js","../webpack://test-check/./node_modules/http2-wrapper/source/incoming-message.js","../webpack://test-check/./node_modules/http2-wrapper/source/index.js","../webpack://test-check/./node_modules/http2-wrapper/source/utils/calculate-server-name.js","../webpack://test-check/./node_modules/http2-wrapper/source/utils/errors.js","../webpack://test-check/./node_modules/http2-wrapper/source/utils/is-request-pseudo-header.js","../webpack://test-check/./node_modules/http2-wrapper/source/utils/proxy-events.js","../webpack://test-check/./node_modules/http2-wrapper/source/utils/url-to-options.js","../webpack://test-check/./node_modules/is-extglob/index.js","../webpack://test-check/./node_modules/is-glob/index.js","../webpack://test-check/./node_modules/json-buffer/index.js","../webpack://test-check/./node_modules/keyv/src/index.js","../webpack://test-check/./node_modules/lowercase-keys/index.js","../webpack://test-check/./node_modules/merge2/index.js","../webpack://test-check/./node_modules/mimic-response/index.js","../webpack://test-check/./node_modules/node-fetch/lib/index.js","../webpack://test-check/./node_modules/normalize-url/index.js","../webpack://test-check/./node_modules/once/once.js","../webpack://test-check/./node_modules/p-cancelable/index.js","../webpack://test-check/./node_modules/picomatch/index.js","../webpack://test-check/./node_modules/picomatch/lib/constants.js","../webpack://test-check/./node_modules/picomatch/lib/parse.js","../webpack://test-check/./node_modules/picomatch/lib/picomatch.js","../webpack://test-check/./node_modules/picomatch/lib/scan.js","../webpack://test-check/./node_modules/picomatch/lib/utils.js","../webpack://test-check/./node_modules/pump/index.js","../webpack://test-check/./node_modules/quick-lru/index.js","../webpack://test-check/./node_modules/resolve-alpn/index.js","../webpack://test-check/./node_modules/responselike/src/index.js","../webpack://test-check/./node_modules/reusify/reusify.js","../webpack://test-check/./node_modules/run-parallel/index.js","../webpack://test-check/./node_modules/sax/lib/sax.js","../webpack://test-check/./node_modules/tunnel/index.js","../webpack://test-check/./node_modules/tunnel/lib/tunnel.js","../webpack://test-check/./node_modules/universal-user-agent/dist-node/index.js","../webpack://test-check/./node_modules/wrappy/wrappy.js","../webpack://test-check/./node_modules/xml2js/lib/bom.js","../webpack://test-check/./node_modules/xml2js/lib/builder.js","../webpack://test-check/./node_modules/xml2js/lib/defaults.js","../webpack://test-check/./node_modules/xml2js/lib/parser.js","../webpack://test-check/./node_modules/xml2js/lib/processors.js","../webpack://test-check/./node_modules/xml2js/lib/xml2js.js","../webpack://test-check/./node_modules/xmlbuilder/lib/DocumentPosition.js","../webpack://test-check/./node_modules/xmlbuilder/lib/NodeType.js","../webpack://test-check/./node_modules/xmlbuilder/lib/Utility.js","../webpack://test-check/./node_modules/xmlbuilder/lib/WriterState.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLAttribute.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLCData.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLCharacterData.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLComment.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMConfiguration.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMErrorHandler.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMImplementation.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMStringList.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDAttList.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDElement.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDEntity.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDNotation.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDeclaration.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDocType.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDocument.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDocumentCB.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDummy.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLElement.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLNamedNodeMap.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLNode.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLNodeList.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLProcessingInstruction.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLRaw.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLStreamWriter.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLStringWriter.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLStringifier.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLText.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLWriterBase.js","../webpack://test-check/./node_modules/xmlbuilder/lib/index.js","../webpack://test-check/./node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../webpack://test-check/external \"assert\"","../webpack://test-check/external \"buffer\"","../webpack://test-check/external \"child_process\"","../webpack://test-check/external \"dns\"","../webpack://test-check/external \"events\"","../webpack://test-check/external \"fs\"","../webpack://test-check/external \"http\"","../webpack://test-check/external \"http2\"","../webpack://test-check/external \"https\"","../webpack://test-check/external \"net\"","../webpack://test-check/external \"os\"","../webpack://test-check/external \"path\"","../webpack://test-check/external \"stream\"","../webpack://test-check/external \"string_decoder\"","../webpack://test-check/external \"timers\"","../webpack://test-check/external \"tls\"","../webpack://test-check/external \"url\"","../webpack://test-check/external \"util\"","../webpack://test-check/external \"zlib\"","../webpack://test-check/webpack/bootstrap","../webpack://test-check/webpack/runtime/compat","../webpack://test-check/webpack/startup"],"sourcesContent":["\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nvar __importDefault = (this && this.__importDefault) || function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.ArtifactProvider = void 0;\r\nconst core = __importStar(require(\"@actions/core\"));\r\nconst github = __importStar(require(\"@actions/github\"));\r\nconst adm_zip_1 = __importDefault(require(\"adm-zip\"));\r\nconst picomatch_1 = __importDefault(require(\"picomatch\"));\r\nconst github_utils_1 = require(\"../utils/github-utils\");\r\nclass ArtifactProvider {\r\n constructor(octokit, artifact, name, pattern, sha, runId, token) {\r\n this.octokit = octokit;\r\n this.artifact = artifact;\r\n this.name = name;\r\n this.pattern = pattern;\r\n this.sha = sha;\r\n this.runId = runId;\r\n this.token = token;\r\n if (this.artifact.startsWith('/')) {\r\n const endIndex = this.artifact.lastIndexOf('/');\r\n const rePattern = this.artifact.substring(1, endIndex);\r\n const reOpts = this.artifact.substring(endIndex + 1);\r\n const re = new RegExp(rePattern, reOpts);\r\n this.artifactNameMatch = (str) => re.test(str);\r\n this.getReportName = (str) => {\r\n const match = str.match(re);\r\n if (match === null) {\r\n throw new Error(`Artifact name '${str}' does not match regex ${this.artifact}`);\r\n }\r\n let reportName = this.name;\r\n for (let i = 1; i < match.length; i++) {\r\n reportName = reportName.replace(new RegExp(`\\\\$${i}`, 'g'), match[i]);\r\n }\r\n return reportName;\r\n };\r\n }\r\n else {\r\n this.artifactNameMatch = (str) => str === this.artifact;\r\n this.getReportName = () => this.name;\r\n }\r\n this.fileNameMatch = picomatch_1.default(pattern);\r\n }\r\n async load() {\r\n const result = {};\r\n const resp = await this.octokit.actions.listWorkflowRunArtifacts({\r\n ...github.context.repo,\r\n run_id: this.runId\r\n });\r\n if (resp.data.artifacts.length === 0) {\r\n core.warning(`No artifacts found in run ${this.runId}`);\r\n return {};\r\n }\r\n const artifacts = resp.data.artifacts.filter(a => this.artifactNameMatch(a.name));\r\n if (artifacts.length === 0) {\r\n core.warning(`No artifact matches ${this.artifact}`);\r\n return {};\r\n }\r\n for (const art of artifacts) {\r\n const fileName = `${art.name}.zip`;\r\n await github_utils_1.downloadArtifact(this.octokit, art.id, fileName, this.token);\r\n core.startGroup(`Reading archive ${fileName}`);\r\n try {\r\n const reportName = this.getReportName(art.name);\r\n core.info(`Report name: ${reportName}`);\r\n const files = [];\r\n const zip = new adm_zip_1.default(fileName);\r\n for (const entry of zip.getEntries()) {\r\n const file = entry.entryName;\r\n if (entry.isDirectory) {\r\n core.info(`Skipping ${file}: entry is a directory`);\r\n continue;\r\n }\r\n if (!this.fileNameMatch(file)) {\r\n core.info(`Skipping ${file}: filename does not match pattern`);\r\n continue;\r\n }\r\n const content = zip.readAsText(entry);\r\n files.push({ file, content });\r\n core.info(`Read ${file}: ${content.length} chars`);\r\n }\r\n if (result[reportName]) {\r\n result[reportName].push(...files);\r\n }\r\n else {\r\n result[reportName] = files;\r\n }\r\n }\r\n finally {\r\n core.endGroup();\r\n }\r\n }\r\n return result;\r\n }\r\n async listTrackedFiles() {\r\n return github_utils_1.listFiles(this.octokit, this.sha);\r\n }\r\n}\r\nexports.ArtifactProvider = ArtifactProvider;\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nvar __importDefault = (this && this.__importDefault) || function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.LocalFileProvider = void 0;\r\nconst fs = __importStar(require(\"fs\"));\r\nconst fast_glob_1 = __importDefault(require(\"fast-glob\"));\r\nconst git_1 = require(\"../utils/git\");\r\nclass LocalFileProvider {\r\n constructor(name, pattern) {\r\n this.name = name;\r\n this.pattern = pattern;\r\n }\r\n async load() {\r\n const result = [];\r\n for (const pat of this.pattern) {\r\n const paths = await fast_glob_1.default(pat, { dot: true });\r\n for (const file of paths) {\r\n const content = await fs.promises.readFile(file, { encoding: 'utf8' });\r\n result.push({ file, content });\r\n }\r\n }\r\n return { [this.name]: result };\r\n }\r\n async listTrackedFiles() {\r\n return git_1.listFiles();\r\n }\r\n}\r\nexports.LocalFileProvider = LocalFileProvider;\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst core = __importStar(require(\"@actions/core\"));\r\nconst github = __importStar(require(\"@actions/github\"));\r\nconst artifact_provider_1 = require(\"./input-providers/artifact-provider\");\r\nconst local_file_provider_1 = require(\"./input-providers/local-file-provider\");\r\nconst get_annotations_1 = require(\"./report/get-annotations\");\r\nconst get_report_1 = require(\"./report/get-report\");\r\nconst dart_json_parser_1 = require(\"./parsers/dart-json/dart-json-parser\");\r\nconst dotnet_trx_parser_1 = require(\"./parsers/dotnet-trx/dotnet-trx-parser\");\r\nconst java_junit_parser_1 = require(\"./parsers/java-junit/java-junit-parser\");\r\nconst jest_junit_parser_1 = require(\"./parsers/jest-junit/jest-junit-parser\");\r\nconst mocha_json_parser_1 = require(\"./parsers/mocha-json/mocha-json-parser\");\r\nconst path_utils_1 = require(\"./utils/path-utils\");\r\nconst github_utils_1 = require(\"./utils/github-utils\");\r\nconst markdown_utils_1 = require(\"./utils/markdown-utils\");\r\nasync function main() {\r\n try {\r\n const testReporter = new TestReporter();\r\n await testReporter.run();\r\n }\r\n catch (error) {\r\n core.setFailed(error.message);\r\n }\r\n}\r\nclass TestReporter {\r\n constructor() {\r\n this.artifact = core.getInput('artifact', { required: false });\r\n this.name = core.getInput('name', { required: true });\r\n this.path = core.getInput('path', { required: true });\r\n this.pathReplaceBackslashes = core.getInput('path-replace-backslashes', { required: false }) === 'true';\r\n this.reporter = core.getInput('reporter', { required: true });\r\n this.listSuites = core.getInput('list-suites', { required: true });\r\n this.listTests = core.getInput('list-tests', { required: true });\r\n this.maxAnnotations = parseInt(core.getInput('max-annotations', { required: true }));\r\n this.failOnError = core.getInput('fail-on-error', { required: true }) === 'true';\r\n this.workDirInput = core.getInput('working-directory', { required: false });\r\n this.onlySummary = core.getInput('only-summary', { required: false }) === 'true';\r\n this.token = core.getInput('token', { required: true });\r\n this.context = github_utils_1.getCheckRunContext();\r\n this.octokit = github.getOctokit(this.token);\r\n if (this.listSuites !== 'all' && this.listSuites !== 'failed') {\r\n core.setFailed(`Input parameter 'list-suites' has invalid value`);\r\n return;\r\n }\r\n if (this.listTests !== 'all' && this.listTests !== 'failed' && this.listTests !== 'none') {\r\n core.setFailed(`Input parameter 'list-tests' has invalid value`);\r\n return;\r\n }\r\n if (isNaN(this.maxAnnotations) || this.maxAnnotations < 0 || this.maxAnnotations > 50) {\r\n core.setFailed(`Input parameter 'max-annotations' has invalid value`);\r\n return;\r\n }\r\n }\r\n async run() {\r\n if (this.workDirInput) {\r\n core.info(`Changing directory to '${this.workDirInput}'`);\r\n process.chdir(this.workDirInput);\r\n }\r\n core.info(`Check runs will be created with SHA=${this.context.sha}`);\r\n // Split path pattern by ',' and optionally convert all backslashes to forward slashes\r\n // fast-glob (micromatch) always interprets backslashes as escape characters instead of directory separators\r\n const pathsList = this.path.split(',');\r\n const pattern = this.pathReplaceBackslashes ? pathsList.map(path_utils_1.normalizeFilePath) : pathsList;\r\n const inputProvider = this.artifact\r\n ? new artifact_provider_1.ArtifactProvider(this.octokit, this.artifact, this.name, pattern, this.context.sha, this.context.runId, this.token)\r\n : new local_file_provider_1.LocalFileProvider(this.name, pattern);\r\n const parseErrors = this.maxAnnotations > 0;\r\n const trackedFiles = await inputProvider.listTrackedFiles();\r\n const workDir = this.artifact ? undefined : path_utils_1.normalizeDirPath(process.cwd(), true);\r\n core.info(`Found ${trackedFiles.length} files tracked by GitHub`);\r\n const options = {\r\n workDir,\r\n trackedFiles,\r\n parseErrors\r\n };\r\n core.info(`Using test report parser '${this.reporter}'`);\r\n const parser = this.getParser(this.reporter, options);\r\n const results = [];\r\n const input = await inputProvider.load();\r\n for (const [reportName, files] of Object.entries(input)) {\r\n try {\r\n core.startGroup(`Creating test report ${reportName}`);\r\n const tr = await this.createReport(parser, reportName, files);\r\n results.push(...tr);\r\n }\r\n finally {\r\n core.endGroup();\r\n }\r\n }\r\n const isFailed = results.some(tr => tr.result === 'failed');\r\n const conclusion = isFailed ? 'failure' : 'success';\r\n const passed = results.reduce((sum, tr) => sum + tr.passed, 0);\r\n const failed = results.reduce((sum, tr) => sum + tr.failed, 0);\r\n const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0);\r\n const time = results.reduce((sum, tr) => sum + tr.time, 0);\r\n core.setOutput('conclusion', conclusion);\r\n core.setOutput('passed', passed);\r\n core.setOutput('failed', failed);\r\n core.setOutput('skipped', skipped);\r\n core.setOutput('time', time);\r\n if (this.failOnError && isFailed) {\r\n core.setFailed(`Failed test were found and 'fail-on-error' option is set to ${this.failOnError}`);\r\n return;\r\n }\r\n if (results.length === 0) {\r\n core.setFailed(`No test report files were found`);\r\n return;\r\n }\r\n }\r\n async createReport(parser, name, files) {\r\n if (files.length === 0) {\r\n core.warning(`No file matches path ${this.path}`);\r\n return [];\r\n }\r\n const results = [];\r\n for (const { file, content } of files) {\r\n core.info(`Processing test results from ${file}`);\r\n const tr = await parser.parse(file, content);\r\n results.push(tr);\r\n }\r\n core.info(`Creating check run ${name}`);\r\n const createResp = await this.octokit.checks.create({\r\n head_sha: this.context.sha,\r\n name,\r\n status: 'in_progress',\r\n output: {\r\n title: name,\r\n summary: ''\r\n },\r\n ...github.context.repo\r\n });\r\n core.info('Creating report summary');\r\n const { listSuites, listTests, onlySummary } = this;\r\n const baseUrl = createResp.data.html_url;\r\n const summary = get_report_1.getReport(results, { listSuites, listTests, baseUrl, onlySummary });\r\n core.info('Creating annotations');\r\n const annotations = get_annotations_1.getAnnotations(results, this.maxAnnotations);\r\n const isFailed = results.some(tr => tr.result === 'failed');\r\n const conclusion = isFailed ? 'failure' : 'success';\r\n const icon = isFailed ? markdown_utils_1.Icon.fail : markdown_utils_1.Icon.success;\r\n core.info(`Updating check run conclusion (${conclusion}) and output`);\r\n const resp = await this.octokit.checks.update({\r\n check_run_id: createResp.data.id,\r\n conclusion,\r\n status: 'completed',\r\n output: {\r\n title: `${name} ${icon}`,\r\n summary,\r\n annotations\r\n },\r\n ...github.context.repo\r\n });\r\n core.info(`Check run create response: ${resp.status}`);\r\n core.info(`Check run URL: ${resp.data.url}`);\r\n core.info(`Check run HTML: ${resp.data.html_url}`);\r\n return results;\r\n }\r\n getParser(reporter, options) {\r\n switch (reporter) {\r\n case 'dart-json':\r\n return new dart_json_parser_1.DartJsonParser(options, 'dart');\r\n case 'dotnet-trx':\r\n return new dotnet_trx_parser_1.DotnetTrxParser(options);\r\n case 'flutter-json':\r\n return new dart_json_parser_1.DartJsonParser(options, 'flutter');\r\n case 'java-junit':\r\n return new java_junit_parser_1.JavaJunitParser(options);\r\n case 'jest-junit':\r\n return new jest_junit_parser_1.JestJunitParser(options);\r\n case 'mocha-json':\r\n return new mocha_json_parser_1.MochaJsonParser(options);\r\n default:\r\n throw new Error(`Input variable 'reporter' is set to invalid value '${reporter}'`);\r\n }\r\n }\r\n}\r\nmain();\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.DartJsonParser = void 0;\r\nconst path_utils_1 = require(\"../../utils/path-utils\");\r\nconst dart_json_types_1 = require(\"./dart-json-types\");\r\nconst test_results_1 = require(\"../../test-results\");\r\nclass TestRun {\r\n constructor(path, suites, success, time) {\r\n this.path = path;\r\n this.suites = suites;\r\n this.success = success;\r\n this.time = time;\r\n }\r\n}\r\nclass TestSuite {\r\n constructor(suite) {\r\n this.suite = suite;\r\n this.groups = {};\r\n }\r\n}\r\nclass TestGroup {\r\n constructor(group) {\r\n this.group = group;\r\n this.tests = [];\r\n }\r\n}\r\nclass TestCase {\r\n constructor(testStart) {\r\n this.testStart = testStart;\r\n this.print = [];\r\n this.groupId = testStart.test.groupIDs[testStart.test.groupIDs.length - 1];\r\n }\r\n get result() {\r\n var _a, _b, _c, _d;\r\n if ((_a = this.testDone) === null || _a === void 0 ? void 0 : _a.skipped) {\r\n return 'skipped';\r\n }\r\n if (((_b = this.testDone) === null || _b === void 0 ? void 0 : _b.result) === 'success') {\r\n return 'success';\r\n }\r\n if (((_c = this.testDone) === null || _c === void 0 ? void 0 : _c.result) === 'error' || ((_d = this.testDone) === null || _d === void 0 ? void 0 : _d.result) === 'failure') {\r\n return 'failed';\r\n }\r\n return undefined;\r\n }\r\n get time() {\r\n return this.testDone !== undefined ? this.testDone.time - this.testStart.time : 0;\r\n }\r\n}\r\nclass DartJsonParser {\r\n constructor(options, sdk) {\r\n this.options = options;\r\n this.sdk = sdk;\r\n }\r\n async parse(path, content) {\r\n const tr = this.getTestRun(path, content);\r\n const result = this.getTestRunResult(tr);\r\n return Promise.resolve(result);\r\n }\r\n getTestRun(path, content) {\r\n const lines = content.split(/\\n\\r?/g);\r\n const events = lines\r\n .map((str, i) => {\r\n if (str.trim() === '') {\r\n return null;\r\n }\r\n try {\r\n return JSON.parse(str);\r\n }\r\n catch (e) {\r\n const col = e.columnNumber !== undefined ? `:${e.columnNumber}` : '';\r\n throw new Error(`Invalid JSON at ${path}:${i + 1}${col}\\n\\n${e}`);\r\n }\r\n })\r\n .filter(evt => evt != null);\r\n let success = false;\r\n let totalTime = 0;\r\n const suites = {};\r\n const tests = {};\r\n for (const evt of events) {\r\n if (dart_json_types_1.isSuiteEvent(evt)) {\r\n suites[evt.suite.id] = new TestSuite(evt.suite);\r\n }\r\n else if (dart_json_types_1.isGroupEvent(evt)) {\r\n suites[evt.group.suiteID].groups[evt.group.id] = new TestGroup(evt.group);\r\n }\r\n else if (dart_json_types_1.isTestStartEvent(evt) && evt.test.url !== null) {\r\n const test = new TestCase(evt);\r\n const suite = suites[evt.test.suiteID];\r\n const group = suite.groups[evt.test.groupIDs[evt.test.groupIDs.length - 1]];\r\n group.tests.push(test);\r\n tests[evt.test.id] = test;\r\n }\r\n else if (dart_json_types_1.isTestDoneEvent(evt) && !evt.hidden && tests[evt.testID]) {\r\n tests[evt.testID].testDone = evt;\r\n }\r\n else if (dart_json_types_1.isErrorEvent(evt) && tests[evt.testID]) {\r\n tests[evt.testID].error = evt;\r\n }\r\n else if (dart_json_types_1.isMessageEvent(evt) && tests[evt.testID]) {\r\n tests[evt.testID].print.push(evt);\r\n }\r\n else if (dart_json_types_1.isDoneEvent(evt)) {\r\n success = evt.success;\r\n totalTime = evt.time;\r\n }\r\n }\r\n return new TestRun(path, Object.values(suites), success, totalTime);\r\n }\r\n getTestRunResult(tr) {\r\n const suites = tr.suites.map(s => {\r\n return new test_results_1.TestSuiteResult(this.getRelativePath(s.suite.path), this.getGroups(s));\r\n });\r\n return new test_results_1.TestRunResult(tr.path, suites, tr.time);\r\n }\r\n getGroups(suite) {\r\n const groups = Object.values(suite.groups).filter(grp => grp.tests.length > 0);\r\n groups.sort((a, b) => { var _a, _b; return ((_a = a.group.line) !== null && _a !== void 0 ? _a : 0) - ((_b = b.group.line) !== null && _b !== void 0 ? _b : 0); });\r\n return groups.map(group => {\r\n group.tests.sort((a, b) => { var _a, _b; return ((_a = a.testStart.test.line) !== null && _a !== void 0 ? _a : 0) - ((_b = b.testStart.test.line) !== null && _b !== void 0 ? _b : 0); });\r\n const tests = group.tests.map(tc => {\r\n const error = this.getError(suite, tc);\r\n const testName = group.group.name !== undefined && tc.testStart.test.name.startsWith(group.group.name)\r\n ? tc.testStart.test.name.slice(group.group.name.length).trim()\r\n : tc.testStart.test.name.trim();\r\n return new test_results_1.TestCaseResult(testName, tc.result, tc.time, error);\r\n });\r\n return new test_results_1.TestGroupResult(group.group.name, tests);\r\n });\r\n }\r\n getError(testSuite, test) {\r\n var _a, _b, _c, _d, _e, _f;\r\n if (!this.options.parseErrors || !test.error) {\r\n return undefined;\r\n }\r\n const { trackedFiles } = this.options;\r\n const stackTrace = (_b = (_a = test.error) === null || _a === void 0 ? void 0 : _a.stackTrace) !== null && _b !== void 0 ? _b : '';\r\n const print = test.print\r\n .filter(p => p.messageType === 'print')\r\n .map(p => p.message)\r\n .join('\\n');\r\n const details = [print, stackTrace].filter(str => str !== '').join('\\n');\r\n const src = this.exceptionThrowSource(details, trackedFiles);\r\n const message = this.getErrorMessage((_d = (_c = test.error) === null || _c === void 0 ? void 0 : _c.error) !== null && _d !== void 0 ? _d : '', print);\r\n let path;\r\n let line;\r\n if (src !== undefined) {\r\n path = src.path;\r\n line = src.line;\r\n }\r\n else {\r\n const testStartPath = this.getRelativePath(testSuite.suite.path);\r\n if (trackedFiles.includes(testStartPath)) {\r\n path = testStartPath;\r\n line = (_f = (_e = test.testStart.test.root_line) !== null && _e !== void 0 ? _e : test.testStart.test.line) !== null && _f !== void 0 ? _f : undefined;\r\n }\r\n }\r\n return {\r\n path,\r\n line,\r\n message,\r\n details\r\n };\r\n }\r\n getErrorMessage(message, print) {\r\n if (this.sdk === 'flutter') {\r\n const uselessMessageRe = /^Test failed\\. See exception logs above\\.\\nThe test description was:/m;\r\n const flutterPrintRe = /^══╡ EXCEPTION CAUGHT BY FLUTTER TEST FRAMEWORK ╞═+\\s+(.*)\\s+When the exception was thrown, this was the stack:/ms;\r\n if (uselessMessageRe.test(message)) {\r\n const match = print.match(flutterPrintRe);\r\n if (match !== null) {\r\n return match[1];\r\n }\r\n }\r\n }\r\n return message || print;\r\n }\r\n exceptionThrowSource(ex, trackedFiles) {\r\n const lines = ex.split(/\\r?\\n/g);\r\n // regexp to extract file path and line number from stack trace\r\n const dartRe = /^(?!package:)(.*)\\s+(\\d+):\\d+\\s+/;\r\n const flutterRe = /^#\\d+\\s+.*\\((?!package:)(.*):(\\d+):\\d+\\)$/;\r\n const re = this.sdk === 'dart' ? dartRe : flutterRe;\r\n for (const str of lines) {\r\n const match = str.match(re);\r\n if (match !== null) {\r\n const [_, pathStr, lineStr] = match;\r\n const path = path_utils_1.normalizeFilePath(this.getRelativePath(pathStr));\r\n if (trackedFiles.includes(path)) {\r\n const line = parseInt(lineStr);\r\n return { path, line };\r\n }\r\n }\r\n }\r\n }\r\n getRelativePath(path) {\r\n const prefix = 'file://';\r\n if (path.startsWith(prefix)) {\r\n path = path.substr(prefix.length);\r\n }\r\n path = path_utils_1.normalizeFilePath(path);\r\n const workDir = this.getWorkDir(path);\r\n if (workDir !== undefined && path.startsWith(workDir)) {\r\n path = path.substr(workDir.length);\r\n }\r\n return path;\r\n }\r\n getWorkDir(path) {\r\n var _a, _b;\r\n return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles)));\r\n }\r\n}\r\nexports.DartJsonParser = DartJsonParser;\r\n","\"use strict\";\r\n/// reflects documentation at https://github.com/dart-lang/test/blob/master/pkgs/test/doc/json_reporter.md\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.isMessageEvent = exports.isDoneEvent = exports.isErrorEvent = exports.isTestDoneEvent = exports.isTestStartEvent = exports.isGroupEvent = exports.isSuiteEvent = void 0;\r\nfunction isSuiteEvent(event) {\r\n return event.type === 'suite';\r\n}\r\nexports.isSuiteEvent = isSuiteEvent;\r\nfunction isGroupEvent(event) {\r\n return event.type === 'group';\r\n}\r\nexports.isGroupEvent = isGroupEvent;\r\nfunction isTestStartEvent(event) {\r\n return event.type === 'testStart';\r\n}\r\nexports.isTestStartEvent = isTestStartEvent;\r\nfunction isTestDoneEvent(event) {\r\n return event.type === 'testDone';\r\n}\r\nexports.isTestDoneEvent = isTestDoneEvent;\r\nfunction isErrorEvent(event) {\r\n return event.type === 'error';\r\n}\r\nexports.isErrorEvent = isErrorEvent;\r\nfunction isDoneEvent(event) {\r\n return event.type === 'done';\r\n}\r\nexports.isDoneEvent = isDoneEvent;\r\nfunction isMessageEvent(event) {\r\n return event.type === 'print';\r\n}\r\nexports.isMessageEvent = isMessageEvent;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.DotnetTrxParser = void 0;\r\nconst xml2js_1 = require(\"xml2js\");\r\nconst path_utils_1 = require(\"../../utils/path-utils\");\r\nconst parse_utils_1 = require(\"../../utils/parse-utils\");\r\nconst test_results_1 = require(\"../../test-results\");\r\nclass TestClass {\r\n constructor(name) {\r\n this.name = name;\r\n this.tests = [];\r\n }\r\n}\r\nclass Test {\r\n constructor(name, outcome, duration, error) {\r\n this.name = name;\r\n this.outcome = outcome;\r\n this.duration = duration;\r\n this.error = error;\r\n }\r\n get result() {\r\n switch (this.outcome) {\r\n case 'Passed':\r\n return 'success';\r\n case 'NotExecuted':\r\n return 'skipped';\r\n case 'Failed':\r\n return 'failed';\r\n }\r\n }\r\n}\r\nclass DotnetTrxParser {\r\n constructor(options) {\r\n this.options = options;\r\n }\r\n async parse(path, content) {\r\n const trx = await this.getTrxReport(path, content);\r\n const tc = this.getTestClasses(trx);\r\n const tr = this.getTestRunResult(path, trx, tc);\r\n tr.sort(true);\r\n return tr;\r\n }\r\n async getTrxReport(path, content) {\r\n try {\r\n return (await xml2js_1.parseStringPromise(content));\r\n }\r\n catch (e) {\r\n throw new Error(`Invalid XML at ${path}\\n\\n${e}`);\r\n }\r\n }\r\n getTestClasses(trx) {\r\n if (trx.TestRun.TestDefinitions === undefined || trx.TestRun.Results === undefined) {\r\n return [];\r\n }\r\n const unitTests = {};\r\n for (const td of trx.TestRun.TestDefinitions) {\r\n for (const ut of td.UnitTest) {\r\n unitTests[ut.$.id] = ut;\r\n }\r\n }\r\n const unitTestsResults = trx.TestRun.Results.flatMap(r => r.UnitTestResult).flatMap(result => ({\r\n result,\r\n test: unitTests[result.$.testId]\r\n }));\r\n const testClasses = {};\r\n for (const r of unitTestsResults) {\r\n const className = r.test.TestMethod[0].$.className;\r\n let tc = testClasses[className];\r\n if (tc === undefined) {\r\n tc = new TestClass(className);\r\n testClasses[tc.name] = tc;\r\n }\r\n const error = this.getErrorInfo(r.result);\r\n const durationAttr = r.result.$.duration;\r\n const duration = durationAttr ? parse_utils_1.parseNetDuration(durationAttr) : 0;\r\n const resultTestName = r.result.$.testName;\r\n const testName = resultTestName.startsWith(className) && resultTestName[className.length] === '.'\r\n ? resultTestName.substr(className.length + 1)\r\n : resultTestName;\r\n const test = new Test(testName, r.result.$.outcome, duration, error);\r\n tc.tests.push(test);\r\n }\r\n const result = Object.values(testClasses);\r\n return result;\r\n }\r\n getTestRunResult(path, trx, testClasses) {\r\n const times = trx.TestRun.Times[0].$;\r\n const totalTime = parse_utils_1.parseIsoDate(times.finish).getTime() - parse_utils_1.parseIsoDate(times.start).getTime();\r\n const suites = testClasses.map(testClass => {\r\n const tests = testClass.tests.map(test => {\r\n const error = this.getError(test);\r\n return new test_results_1.TestCaseResult(test.name, test.result, test.duration, error);\r\n });\r\n const group = new test_results_1.TestGroupResult(null, tests);\r\n return new test_results_1.TestSuiteResult(testClass.name, [group]);\r\n });\r\n return new test_results_1.TestRunResult(path, suites, totalTime);\r\n }\r\n getErrorInfo(testResult) {\r\n var _a;\r\n if (testResult.$.outcome !== 'Failed') {\r\n return undefined;\r\n }\r\n const output = testResult.Output;\r\n const error = (output === null || output === void 0 ? void 0 : output.length) > 0 && ((_a = output[0].ErrorInfo) === null || _a === void 0 ? void 0 : _a.length) > 0 ? output[0].ErrorInfo[0] : undefined;\r\n return error;\r\n }\r\n getError(test) {\r\n if (!this.options.parseErrors || !test.error) {\r\n return undefined;\r\n }\r\n const error = test.error;\r\n if (!Array.isArray(error.Message) ||\r\n error.Message.length === 0 ||\r\n !Array.isArray(error.StackTrace) ||\r\n error.StackTrace.length === 0) {\r\n return undefined;\r\n }\r\n const message = test.error.Message[0];\r\n const stackTrace = test.error.StackTrace[0];\r\n let path;\r\n let line;\r\n const src = this.exceptionThrowSource(stackTrace);\r\n if (src) {\r\n path = src.path;\r\n line = src.line;\r\n }\r\n return {\r\n path,\r\n line,\r\n message,\r\n details: `${message}\\n${stackTrace}`\r\n };\r\n }\r\n exceptionThrowSource(stackTrace) {\r\n const lines = stackTrace.split(/\\r*\\n/);\r\n const re = / in (.+):line (\\d+)$/;\r\n const { trackedFiles } = this.options;\r\n for (const str of lines) {\r\n const match = str.match(re);\r\n if (match !== null) {\r\n const [_, fileStr, lineStr] = match;\r\n const filePath = path_utils_1.normalizeFilePath(fileStr);\r\n const workDir = this.getWorkDir(filePath);\r\n if (workDir) {\r\n const file = filePath.substr(workDir.length);\r\n if (trackedFiles.includes(file)) {\r\n const line = parseInt(lineStr);\r\n return { path: file, line };\r\n }\r\n }\r\n }\r\n }\r\n }\r\n getWorkDir(path) {\r\n var _a, _b;\r\n return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles)));\r\n }\r\n}\r\nexports.DotnetTrxParser = DotnetTrxParser;\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.JavaJunitParser = void 0;\r\nconst path = __importStar(require(\"path\"));\r\nconst xml2js_1 = require(\"xml2js\");\r\nconst path_utils_1 = require(\"../../utils/path-utils\");\r\nconst test_results_1 = require(\"../../test-results\");\r\nclass JavaJunitParser {\r\n constructor(options) {\r\n var _a;\r\n this.options = options;\r\n // Map to efficient lookup of all paths with given file name\r\n this.trackedFiles = {};\r\n for (const filePath of options.trackedFiles) {\r\n const fileName = path.basename(filePath);\r\n const files = (_a = this.trackedFiles[fileName]) !== null && _a !== void 0 ? _a : (this.trackedFiles[fileName] = []);\r\n files.push(path_utils_1.normalizeFilePath(filePath));\r\n }\r\n }\r\n async parse(filePath, content) {\r\n const reportOrSuite = await this.getJunitReport(filePath, content);\r\n const isReport = reportOrSuite.testsuites !== undefined;\r\n // XML might contain:\r\n // - multiple suites under root node\r\n // - single as root node\r\n let ju;\r\n if (isReport) {\r\n ju = reportOrSuite;\r\n }\r\n else {\r\n // Make it behave the same way as if suite was inside root node\r\n const suite = reportOrSuite.testsuite;\r\n ju = {\r\n testsuites: {\r\n $: { time: suite.$.time },\r\n testsuite: [suite]\r\n }\r\n };\r\n }\r\n return this.getTestRunResult(filePath, ju);\r\n }\r\n async getJunitReport(filePath, content) {\r\n try {\r\n return await xml2js_1.parseStringPromise(content);\r\n }\r\n catch (e) {\r\n throw new Error(`Invalid XML at ${filePath}\\n\\n${e}`);\r\n }\r\n }\r\n getTestRunResult(filePath, junit) {\r\n var _a;\r\n const suites = junit.testsuites.testsuite === undefined\r\n ? []\r\n : junit.testsuites.testsuite.map(ts => {\r\n const name = ts.$.name.trim();\r\n const time = parseFloat(ts.$.time) * 1000;\r\n const sr = new test_results_1.TestSuiteResult(name, this.getGroups(ts), time);\r\n return sr;\r\n });\r\n const seconds = parseFloat((_a = junit.testsuites.$) === null || _a === void 0 ? void 0 : _a.time);\r\n const time = isNaN(seconds) ? undefined : seconds * 1000;\r\n return new test_results_1.TestRunResult(filePath, suites, time);\r\n }\r\n getGroups(suite) {\r\n if (suite.testcase === undefined) {\r\n return [];\r\n }\r\n const groups = [];\r\n for (const tc of suite.testcase) {\r\n // Normally classname is same as suite name - both refer to same Java class\r\n // Therefore it doesn't make sense to process it as a group\r\n // and tests will be added to default group with empty name\r\n const className = tc.$.classname === suite.$.name ? '' : tc.$.classname;\r\n let grp = groups.find(g => g.name === className);\r\n if (grp === undefined) {\r\n grp = { name: className, tests: [] };\r\n groups.push(grp);\r\n }\r\n grp.tests.push(tc);\r\n }\r\n return groups.map(grp => {\r\n const tests = grp.tests.map(tc => {\r\n const name = tc.$.name.trim();\r\n const result = this.getTestCaseResult(tc);\r\n const time = parseFloat(tc.$.time) * 1000;\r\n const error = this.getTestCaseError(tc);\r\n return new test_results_1.TestCaseResult(name, result, time, error);\r\n });\r\n return new test_results_1.TestGroupResult(grp.name, tests);\r\n });\r\n }\r\n getTestCaseResult(test) {\r\n if (test.failure || test.error)\r\n return 'failed';\r\n if (test.skipped)\r\n return 'skipped';\r\n return 'success';\r\n }\r\n getTestCaseError(tc) {\r\n var _a;\r\n if (!this.options.parseErrors) {\r\n return undefined;\r\n }\r\n // We process and the same way\r\n const failures = (_a = tc.failure) !== null && _a !== void 0 ? _a : tc.error;\r\n if (!failures) {\r\n return undefined;\r\n }\r\n const failure = failures[0];\r\n const details = typeof failure === 'object' ? failure._ : failure;\r\n let filePath;\r\n let line;\r\n const src = this.exceptionThrowSource(details);\r\n if (src) {\r\n filePath = src.filePath;\r\n line = src.line;\r\n }\r\n return {\r\n path: filePath,\r\n line,\r\n details,\r\n message: typeof failure === 'object' ? failure.message : undefined\r\n };\r\n }\r\n exceptionThrowSource(stackTrace) {\r\n const lines = stackTrace.split(/\\r?\\n/);\r\n const re = /^at (.*)\\((.*):(\\d+)\\)$/;\r\n for (const str of lines) {\r\n const match = str.match(re);\r\n if (match !== null) {\r\n const [_, tracePath, fileName, lineStr] = match;\r\n const filePath = this.getFilePath(tracePath, fileName);\r\n if (filePath !== undefined) {\r\n const line = parseInt(lineStr);\r\n return { filePath, line };\r\n }\r\n }\r\n }\r\n }\r\n // Stacktrace in Java doesn't contain full paths to source file.\r\n // There are only package, file name and line.\r\n // Assuming folder structure matches package name (as it should in Java),\r\n // we can try to match tracked file.\r\n getFilePath(tracePath, fileName) {\r\n // Check if there is any tracked file with given name\r\n const files = this.trackedFiles[fileName];\r\n if (files === undefined) {\r\n return undefined;\r\n }\r\n // Remove class name and method name from trace.\r\n // Take parts until first item with capital letter - package names are lowercase while class name is CamelCase.\r\n const packageParts = tracePath.split(/\\./g);\r\n const packageIndex = packageParts.findIndex(part => part[0] <= 'Z');\r\n if (packageIndex !== -1) {\r\n packageParts.splice(packageIndex, packageParts.length - packageIndex);\r\n }\r\n if (packageParts.length === 0) {\r\n return undefined;\r\n }\r\n // Get right file\r\n // - file name matches\r\n // - parent folders structure must reflect the package name\r\n for (const filePath of files) {\r\n const dirs = path.dirname(filePath).split(/\\//g);\r\n if (packageParts.length > dirs.length) {\r\n continue;\r\n }\r\n // get only N parent folders, where N = length of package name parts\r\n if (dirs.length > packageParts.length) {\r\n dirs.splice(0, dirs.length - packageParts.length);\r\n }\r\n // check if parent folder structure matches package name\r\n const isMatch = packageParts.every((part, i) => part === dirs[i]);\r\n if (isMatch) {\r\n return filePath;\r\n }\r\n }\r\n return undefined;\r\n }\r\n}\r\nexports.JavaJunitParser = JavaJunitParser;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.JestJunitParser = void 0;\r\nconst xml2js_1 = require(\"xml2js\");\r\nconst node_utils_1 = require(\"../../utils/node-utils\");\r\nconst path_utils_1 = require(\"../../utils/path-utils\");\r\nconst test_results_1 = require(\"../../test-results\");\r\nclass JestJunitParser {\r\n constructor(options) {\r\n this.options = options;\r\n }\r\n async parse(path, content) {\r\n const ju = await this.getJunitReport(path, content);\r\n return this.getTestRunResult(path, ju);\r\n }\r\n async getJunitReport(path, content) {\r\n try {\r\n return (await xml2js_1.parseStringPromise(content));\r\n }\r\n catch (e) {\r\n throw new Error(`Invalid XML at ${path}\\n\\n${e}`);\r\n }\r\n }\r\n getTestRunResult(path, junit) {\r\n const suites = junit.testsuites.testsuite === undefined\r\n ? []\r\n : junit.testsuites.testsuite.map(ts => {\r\n const name = ts.$.name.trim();\r\n const time = parseFloat(ts.$.time) * 1000;\r\n const sr = new test_results_1.TestSuiteResult(name, this.getGroups(ts), time);\r\n return sr;\r\n });\r\n const time = parseFloat(junit.testsuites.$.time) * 1000;\r\n return new test_results_1.TestRunResult(path, suites, time);\r\n }\r\n getGroups(suite) {\r\n const groups = [];\r\n for (const tc of suite.testcase) {\r\n let grp = groups.find(g => g.describe === tc.$.classname);\r\n if (grp === undefined) {\r\n grp = { describe: tc.$.classname, tests: [] };\r\n groups.push(grp);\r\n }\r\n grp.tests.push(tc);\r\n }\r\n return groups.map(grp => {\r\n const tests = grp.tests.map(tc => {\r\n const name = tc.$.name.trim();\r\n const result = this.getTestCaseResult(tc);\r\n const time = parseFloat(tc.$.time) * 1000;\r\n const error = this.getTestCaseError(tc);\r\n return new test_results_1.TestCaseResult(name, result, time, error);\r\n });\r\n return new test_results_1.TestGroupResult(grp.describe, tests);\r\n });\r\n }\r\n getTestCaseResult(test) {\r\n if (test.failure)\r\n return 'failed';\r\n if (test.skipped)\r\n return 'skipped';\r\n return 'success';\r\n }\r\n getTestCaseError(tc) {\r\n if (!this.options.parseErrors || !tc.failure) {\r\n return undefined;\r\n }\r\n const details = tc.failure[0];\r\n let path;\r\n let line;\r\n const src = node_utils_1.getExceptionSource(details, this.options.trackedFiles, file => this.getRelativePath(file));\r\n if (src) {\r\n path = src.path;\r\n line = src.line;\r\n }\r\n return {\r\n path,\r\n line,\r\n details\r\n };\r\n }\r\n getRelativePath(path) {\r\n path = path_utils_1.normalizeFilePath(path);\r\n const workDir = this.getWorkDir(path);\r\n if (workDir !== undefined && path.startsWith(workDir)) {\r\n path = path.substr(workDir.length);\r\n }\r\n return path;\r\n }\r\n getWorkDir(path) {\r\n var _a, _b;\r\n return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles)));\r\n }\r\n}\r\nexports.JestJunitParser = JestJunitParser;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.MochaJsonParser = void 0;\r\nconst test_results_1 = require(\"../../test-results\");\r\nconst node_utils_1 = require(\"../../utils/node-utils\");\r\nconst path_utils_1 = require(\"../../utils/path-utils\");\r\nclass MochaJsonParser {\r\n constructor(options) {\r\n this.options = options;\r\n }\r\n async parse(path, content) {\r\n const mocha = this.getMochaJson(path, content);\r\n const result = this.getTestRunResult(path, mocha);\r\n result.sort(true);\r\n return Promise.resolve(result);\r\n }\r\n getMochaJson(path, content) {\r\n try {\r\n return JSON.parse(content);\r\n }\r\n catch (e) {\r\n throw new Error(`Invalid JSON at ${path}\\n\\n${e}`);\r\n }\r\n }\r\n getTestRunResult(resultsPath, mocha) {\r\n const suitesMap = {};\r\n const getSuite = (test) => {\r\n var _a;\r\n const path = this.getRelativePath(test.file);\r\n return (_a = suitesMap[path]) !== null && _a !== void 0 ? _a : (suitesMap[path] = new test_results_1.TestSuiteResult(path, []));\r\n };\r\n for (const test of mocha.passes) {\r\n const suite = getSuite(test);\r\n this.processTest(suite, test, 'success');\r\n }\r\n for (const test of mocha.failures) {\r\n const suite = getSuite(test);\r\n this.processTest(suite, test, 'failed');\r\n }\r\n for (const test of mocha.pending) {\r\n const suite = getSuite(test);\r\n this.processTest(suite, test, 'skipped');\r\n }\r\n const suites = Object.values(suitesMap);\r\n return new test_results_1.TestRunResult(resultsPath, suites, mocha.stats.duration);\r\n }\r\n processTest(suite, test, result) {\r\n var _a;\r\n const groupName = test.fullTitle !== test.title\r\n ? test.fullTitle.substr(0, test.fullTitle.length - test.title.length).trimEnd()\r\n : null;\r\n let group = suite.groups.find(grp => grp.name === groupName);\r\n if (group === undefined) {\r\n group = new test_results_1.TestGroupResult(groupName, []);\r\n suite.groups.push(group);\r\n }\r\n const error = this.getTestCaseError(test);\r\n const testCase = new test_results_1.TestCaseResult(test.title, result, (_a = test.duration) !== null && _a !== void 0 ? _a : 0, error);\r\n group.tests.push(testCase);\r\n }\r\n getTestCaseError(test) {\r\n const details = test.err.stack;\r\n const message = test.err.message;\r\n if (details === undefined) {\r\n return undefined;\r\n }\r\n let path;\r\n let line;\r\n const src = node_utils_1.getExceptionSource(details, this.options.trackedFiles, file => this.getRelativePath(file));\r\n if (src) {\r\n path = src.path;\r\n line = src.line;\r\n }\r\n return {\r\n path,\r\n line,\r\n message,\r\n details\r\n };\r\n }\r\n getRelativePath(path) {\r\n path = path_utils_1.normalizeFilePath(path);\r\n const workDir = this.getWorkDir(path);\r\n if (workDir !== undefined && path.startsWith(workDir)) {\r\n path = path.substr(workDir.length);\r\n }\r\n return path;\r\n }\r\n getWorkDir(path) {\r\n var _a, _b;\r\n return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles)));\r\n }\r\n}\r\nexports.MochaJsonParser = MochaJsonParser;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.getAnnotations = void 0;\r\nconst markdown_utils_1 = require(\"../utils/markdown-utils\");\r\nconst parse_utils_1 = require(\"../utils/parse-utils\");\r\nfunction getAnnotations(results, maxCount) {\r\n var _a, _b, _c, _d;\r\n if (maxCount === 0) {\r\n return [];\r\n }\r\n // Collect errors from TestRunResults\r\n // Merge duplicates if there are more test results files processed\r\n const errors = [];\r\n const mergeDup = results.length > 1;\r\n for (const tr of results) {\r\n for (const ts of tr.suites) {\r\n for (const tg of ts.groups) {\r\n for (const tc of tg.tests) {\r\n const err = tc.error;\r\n if (err === undefined) {\r\n continue;\r\n }\r\n const path = (_a = err.path) !== null && _a !== void 0 ? _a : tr.path;\r\n const line = (_b = err.line) !== null && _b !== void 0 ? _b : 0;\r\n if (mergeDup) {\r\n const dup = errors.find(e => path === e.path && line === e.line && err.details === e.details);\r\n if (dup !== undefined) {\r\n dup.testRunPaths.push(tr.path);\r\n continue;\r\n }\r\n }\r\n errors.push({\r\n testRunPaths: [tr.path],\r\n suiteName: ts.name,\r\n testName: tg.name ? `${tg.name} ► ${tc.name}` : tc.name,\r\n details: err.details,\r\n message: (_d = (_c = err.message) !== null && _c !== void 0 ? _c : parse_utils_1.getFirstNonEmptyLine(err.details)) !== null && _d !== void 0 ? _d : 'Test failed',\r\n path,\r\n line\r\n });\r\n }\r\n }\r\n }\r\n }\r\n // Limit number of created annotations\r\n errors.splice(maxCount + 1);\r\n const annotations = errors.map(e => {\r\n const message = [\r\n 'Failed test found in:',\r\n e.testRunPaths.map(p => ` ${p}`).join('\\n'),\r\n 'Error:',\r\n ident(markdown_utils_1.fixEol(e.message), ' ')\r\n ].join('\\n');\r\n return enforceCheckRunLimits({\r\n path: e.path,\r\n start_line: e.line,\r\n end_line: e.line,\r\n annotation_level: 'failure',\r\n title: `${e.suiteName} ► ${e.testName}`,\r\n raw_details: markdown_utils_1.fixEol(e.details),\r\n message\r\n });\r\n });\r\n return annotations;\r\n}\r\nexports.getAnnotations = getAnnotations;\r\nfunction enforceCheckRunLimits(err) {\r\n err.title = markdown_utils_1.ellipsis(err.title || '', 255);\r\n err.message = markdown_utils_1.ellipsis(err.message, 65535);\r\n if (err.raw_details) {\r\n err.raw_details = markdown_utils_1.ellipsis(err.raw_details, 65535);\r\n }\r\n return err;\r\n}\r\nfunction ident(text, prefix) {\r\n return text\r\n .split(/\\n/g)\r\n .map(line => prefix + line)\r\n .join('\\n');\r\n}\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.getReport = void 0;\r\nconst core = __importStar(require(\"@actions/core\"));\r\nconst markdown_utils_1 = require(\"../utils/markdown-utils\");\r\nconst parse_utils_1 = require(\"../utils/parse-utils\");\r\nconst slugger_1 = require(\"../utils/slugger\");\r\nconst MAX_REPORT_LENGTH = 65535;\r\nconst defaultOptions = {\r\n listSuites: 'all',\r\n listTests: 'all',\r\n baseUrl: '',\r\n onlySummary: false\r\n};\r\nfunction getReport(results, options = defaultOptions) {\r\n core.info('Generating check run summary');\r\n applySort(results);\r\n const opts = { ...options };\r\n let lines = renderReport(results, opts);\r\n let report = lines.join('\\n');\r\n if (getByteLength(report) <= MAX_REPORT_LENGTH) {\r\n return report;\r\n }\r\n if (opts.listTests === 'all') {\r\n core.info(\"Test report summary is too big - setting 'listTests' to 'failed'\");\r\n opts.listTests = 'failed';\r\n lines = renderReport(results, opts);\r\n report = lines.join('\\n');\r\n if (getByteLength(report) <= MAX_REPORT_LENGTH) {\r\n return report;\r\n }\r\n }\r\n core.warning(`Test report summary exceeded limit of ${MAX_REPORT_LENGTH} bytes and will be trimmed`);\r\n return trimReport(lines);\r\n}\r\nexports.getReport = getReport;\r\nfunction trimReport(lines) {\r\n const closingBlock = '```';\r\n const errorMsg = `**Report exceeded GitHub limit of ${MAX_REPORT_LENGTH} bytes and has been trimmed**`;\r\n const maxErrorMsgLength = closingBlock.length + errorMsg.length + 2;\r\n const maxReportLength = MAX_REPORT_LENGTH - maxErrorMsgLength;\r\n let reportLength = 0;\r\n let codeBlock = false;\r\n let endLineIndex = 0;\r\n for (endLineIndex = 0; endLineIndex < lines.length; endLineIndex++) {\r\n const line = lines[endLineIndex];\r\n const lineLength = getByteLength(line);\r\n reportLength += lineLength + 1;\r\n if (reportLength > maxReportLength) {\r\n break;\r\n }\r\n if (line === '```') {\r\n codeBlock = !codeBlock;\r\n }\r\n }\r\n const reportLines = lines.slice(0, endLineIndex);\r\n if (codeBlock) {\r\n reportLines.push('```');\r\n }\r\n reportLines.push(errorMsg);\r\n return reportLines.join('\\n');\r\n}\r\nfunction applySort(results) {\r\n results.sort((a, b) => a.path.localeCompare(b.path));\r\n for (const res of results) {\r\n res.suites.sort((a, b) => a.name.localeCompare(b.name));\r\n }\r\n}\r\nfunction getByteLength(text) {\r\n return Buffer.byteLength(text, 'utf8');\r\n}\r\nfunction renderReport(results, options) {\r\n const sections = [];\r\n const badge = getReportBadge(results);\r\n sections.push(badge);\r\n const runs = getTestRunsReport(results, options);\r\n sections.push(...runs);\r\n return sections;\r\n}\r\nfunction getReportBadge(results) {\r\n const passed = results.reduce((sum, tr) => sum + tr.passed, 0);\r\n const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0);\r\n const failed = results.reduce((sum, tr) => sum + tr.failed, 0);\r\n return getBadge(passed, failed, skipped);\r\n}\r\nfunction getBadge(passed, failed, skipped) {\r\n const text = [];\r\n if (passed > 0) {\r\n text.push(`${passed} passed`);\r\n }\r\n if (failed > 0) {\r\n text.push(`${failed} failed`);\r\n }\r\n if (skipped > 0) {\r\n text.push(`${skipped} skipped`);\r\n }\r\n const message = text.length > 0 ? text.join(', ') : 'none';\r\n let color = 'success';\r\n if (failed > 0) {\r\n color = 'critical';\r\n }\r\n else if (passed === 0 && failed === 0) {\r\n color = 'yellow';\r\n }\r\n const hint = failed > 0 ? 'Tests failed' : 'Tests passed successfully';\r\n const uri = encodeURIComponent(`tests-${message}-${color}`);\r\n return `![${hint}](https://img.shields.io/badge/${uri})`;\r\n}\r\nfunction getTestRunsReport(testRuns, options) {\r\n const sections = [];\r\n if (testRuns.length > 1 || options.onlySummary) {\r\n const tableData = testRuns.map((tr, runIndex) => {\r\n const time = markdown_utils_1.formatTime(tr.time);\r\n const name = tr.path;\r\n const addr = options.baseUrl + makeRunSlug(runIndex).link;\r\n const nameLink = markdown_utils_1.link(name, addr);\r\n const passed = tr.passed > 0 ? `${tr.passed}${markdown_utils_1.Icon.success}` : '';\r\n const failed = tr.failed > 0 ? `${tr.failed}${markdown_utils_1.Icon.fail}` : '';\r\n const skipped = tr.skipped > 0 ? `${tr.skipped}${markdown_utils_1.Icon.skip}` : '';\r\n return [nameLink, passed, failed, skipped, time];\r\n });\r\n const resultsTable = markdown_utils_1.table(['Report', 'Passed', 'Failed', 'Skipped', 'Time'], [markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...tableData);\r\n sections.push(resultsTable);\r\n }\r\n if (options.onlySummary === false) {\r\n const suitesReports = testRuns.map((tr, i) => getSuitesReport(tr, i, options)).flat();\r\n sections.push(...suitesReports);\r\n }\r\n return sections;\r\n}\r\nfunction getSuitesReport(tr, runIndex, options) {\r\n const sections = [];\r\n const trSlug = makeRunSlug(runIndex);\r\n const nameLink = `${tr.path}`;\r\n const icon = getResultIcon(tr.result);\r\n sections.push(`## ${icon}\\xa0${nameLink}`);\r\n const time = markdown_utils_1.formatTime(tr.time);\r\n const headingLine2 = tr.tests > 0\r\n ? `**${tr.tests}** tests were completed in **${time}** with **${tr.passed}** passed, **${tr.failed}** failed and **${tr.skipped}** skipped.`\r\n : 'No tests found';\r\n sections.push(headingLine2);\r\n const suites = options.listSuites === 'failed' ? tr.failedSuites : tr.suites;\r\n if (suites.length > 0) {\r\n const suitesTable = markdown_utils_1.table(['Test suite', 'Passed', 'Failed', 'Skipped', 'Time'], [markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...suites.map((s, suiteIndex) => {\r\n const tsTime = markdown_utils_1.formatTime(s.time);\r\n const tsName = s.name;\r\n const skipLink = options.listTests === 'none' || (options.listTests === 'failed' && s.result !== 'failed');\r\n const tsAddr = options.baseUrl + makeSuiteSlug(runIndex, suiteIndex).link;\r\n const tsNameLink = skipLink ? tsName : markdown_utils_1.link(tsName, tsAddr);\r\n const passed = s.passed > 0 ? `${s.passed}${markdown_utils_1.Icon.success}` : '';\r\n const failed = s.failed > 0 ? `${s.failed}${markdown_utils_1.Icon.fail}` : '';\r\n const skipped = s.skipped > 0 ? `${s.skipped}${markdown_utils_1.Icon.skip}` : '';\r\n return [tsNameLink, passed, failed, skipped, tsTime];\r\n }));\r\n sections.push(suitesTable);\r\n }\r\n if (options.listTests !== 'none') {\r\n const tests = suites.map((ts, suiteIndex) => getTestsReport(ts, runIndex, suiteIndex, options)).flat();\r\n if (tests.length > 1) {\r\n sections.push(...tests);\r\n }\r\n }\r\n return sections;\r\n}\r\nfunction getTestsReport(ts, runIndex, suiteIndex, options) {\r\n var _a, _b, _c;\r\n if (options.listTests === 'failed' && ts.result !== 'failed') {\r\n return [];\r\n }\r\n const groups = ts.groups;\r\n if (groups.length === 0) {\r\n return [];\r\n }\r\n const sections = [];\r\n const tsName = ts.name;\r\n const tsSlug = makeSuiteSlug(runIndex, suiteIndex);\r\n const tsNameLink = `${tsName}`;\r\n const icon = getResultIcon(ts.result);\r\n sections.push(`### ${icon}\\xa0${tsNameLink}`);\r\n sections.push('```');\r\n for (const grp of groups) {\r\n if (grp.name) {\r\n sections.push(grp.name);\r\n }\r\n const space = grp.name ? ' ' : '';\r\n for (const tc of grp.tests) {\r\n const result = getResultIcon(tc.result);\r\n sections.push(`${space}${result} ${tc.name}`);\r\n if (tc.error) {\r\n const lines = (_c = ((_a = tc.error.message) !== null && _a !== void 0 ? _a : (_b = parse_utils_1.getFirstNonEmptyLine(tc.error.details)) === null || _b === void 0 ? void 0 : _b.trim())) === null || _c === void 0 ? void 0 : _c.split(/\\r?\\n/g).map(l => '\\t' + l);\r\n if (lines) {\r\n sections.push(...lines);\r\n }\r\n }\r\n }\r\n }\r\n sections.push('```');\r\n return sections;\r\n}\r\nfunction makeRunSlug(runIndex) {\r\n // use prefix to avoid slug conflicts after escaping the paths\r\n return slugger_1.slug(`r${runIndex}`);\r\n}\r\nfunction makeSuiteSlug(runIndex, suiteIndex) {\r\n // use prefix to avoid slug conflicts after escaping the paths\r\n return slugger_1.slug(`r${runIndex}s${suiteIndex}`);\r\n}\r\nfunction getResultIcon(result) {\r\n switch (result) {\r\n case 'success':\r\n return markdown_utils_1.Icon.success;\r\n case 'skipped':\r\n return markdown_utils_1.Icon.skip;\r\n case 'failed':\r\n return markdown_utils_1.Icon.fail;\r\n default:\r\n return '';\r\n }\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.TestCaseResult = exports.TestGroupResult = exports.TestSuiteResult = exports.TestRunResult = void 0;\r\nclass TestRunResult {\r\n constructor(path, suites, totalTime) {\r\n this.path = path;\r\n this.suites = suites;\r\n this.totalTime = totalTime;\r\n }\r\n get tests() {\r\n return this.suites.reduce((sum, g) => sum + g.tests, 0);\r\n }\r\n get passed() {\r\n return this.suites.reduce((sum, g) => sum + g.passed, 0);\r\n }\r\n get failed() {\r\n return this.suites.reduce((sum, g) => sum + g.failed, 0);\r\n }\r\n get skipped() {\r\n return this.suites.reduce((sum, g) => sum + g.skipped, 0);\r\n }\r\n get time() {\r\n var _a;\r\n return (_a = this.totalTime) !== null && _a !== void 0 ? _a : this.suites.reduce((sum, g) => sum + g.time, 0);\r\n }\r\n get result() {\r\n return this.suites.some(t => t.result === 'failed') ? 'failed' : 'success';\r\n }\r\n get failedSuites() {\r\n return this.suites.filter(s => s.result === 'failed');\r\n }\r\n sort(deep) {\r\n this.suites.sort((a, b) => a.name.localeCompare(b.name));\r\n if (deep) {\r\n for (const suite of this.suites) {\r\n suite.sort(deep);\r\n }\r\n }\r\n }\r\n}\r\nexports.TestRunResult = TestRunResult;\r\nclass TestSuiteResult {\r\n constructor(name, groups, totalTime) {\r\n this.name = name;\r\n this.groups = groups;\r\n this.totalTime = totalTime;\r\n }\r\n get tests() {\r\n return this.groups.reduce((sum, g) => sum + g.tests.length, 0);\r\n }\r\n get passed() {\r\n return this.groups.reduce((sum, g) => sum + g.passed, 0);\r\n }\r\n get failed() {\r\n return this.groups.reduce((sum, g) => sum + g.failed, 0);\r\n }\r\n get skipped() {\r\n return this.groups.reduce((sum, g) => sum + g.skipped, 0);\r\n }\r\n get time() {\r\n var _a;\r\n return (_a = this.totalTime) !== null && _a !== void 0 ? _a : this.groups.reduce((sum, g) => sum + g.time, 0);\r\n }\r\n get result() {\r\n return this.groups.some(t => t.result === 'failed') ? 'failed' : 'success';\r\n }\r\n get failedGroups() {\r\n return this.groups.filter(grp => grp.result === 'failed');\r\n }\r\n sort(deep) {\r\n this.groups.sort((a, b) => { var _a, _b; return ((_a = a.name) !== null && _a !== void 0 ? _a : '').localeCompare((_b = b.name) !== null && _b !== void 0 ? _b : ''); });\r\n if (deep) {\r\n for (const grp of this.groups) {\r\n grp.sort();\r\n }\r\n }\r\n }\r\n}\r\nexports.TestSuiteResult = TestSuiteResult;\r\nclass TestGroupResult {\r\n constructor(name, tests) {\r\n this.name = name;\r\n this.tests = tests;\r\n }\r\n get passed() {\r\n return this.tests.reduce((sum, t) => (t.result === 'success' ? sum + 1 : sum), 0);\r\n }\r\n get failed() {\r\n return this.tests.reduce((sum, t) => (t.result === 'failed' ? sum + 1 : sum), 0);\r\n }\r\n get skipped() {\r\n return this.tests.reduce((sum, t) => (t.result === 'skipped' ? sum + 1 : sum), 0);\r\n }\r\n get time() {\r\n return this.tests.reduce((sum, t) => sum + t.time, 0);\r\n }\r\n get result() {\r\n return this.tests.some(t => t.result === 'failed') ? 'failed' : 'success';\r\n }\r\n get failedTests() {\r\n return this.tests.filter(tc => tc.result === 'failed');\r\n }\r\n sort() {\r\n this.tests.sort((a, b) => a.name.localeCompare(b.name));\r\n }\r\n}\r\nexports.TestGroupResult = TestGroupResult;\r\nclass TestCaseResult {\r\n constructor(name, result, time, error) {\r\n this.name = name;\r\n this.result = result;\r\n this.time = time;\r\n this.error = error;\r\n }\r\n}\r\nexports.TestCaseResult = TestCaseResult;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst exec_1 = require(\"@actions/exec\");\r\n// Wraps original exec() function\r\n// Returns exit code and whole stdout/stderr\r\nasync function exec(commandLine, args, options) {\r\n options = options || {};\r\n let stdout = '';\r\n let stderr = '';\r\n options.listeners = {\r\n stdout: (data) => (stdout += data.toString()),\r\n stderr: (data) => (stderr += data.toString())\r\n };\r\n const code = await exec_1.exec(commandLine, args, options);\r\n return { code, stdout, stderr };\r\n}\r\nexports.default = exec;\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nvar __importDefault = (this && this.__importDefault) || function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.listFiles = void 0;\r\nconst core = __importStar(require(\"@actions/core\"));\r\nconst exec_1 = __importDefault(require(\"./exec\"));\r\nasync function listFiles() {\r\n core.startGroup('Listing all files tracked by git');\r\n let output = '';\r\n try {\r\n output = (await exec_1.default('git', ['ls-files', '-z'])).stdout;\r\n }\r\n finally {\r\n fixStdOutNullTermination();\r\n core.endGroup();\r\n }\r\n return output.split('\\u0000').filter(s => s.length > 0);\r\n}\r\nexports.listFiles = listFiles;\r\nfunction fixStdOutNullTermination() {\r\n // Previous command uses NULL as delimiters and output is printed to stdout.\r\n // We have to make sure next thing written to stdout will start on new line.\r\n // Otherwise things like ::set-output wouldn't work.\r\n core.info('');\r\n}\r\n","\"use strict\";\r\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}));\r\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n});\r\nvar __importStar = (this && this.__importStar) || function (mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n};\r\nvar __importDefault = (this && this.__importDefault) || function (mod) {\r\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\r\n};\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.listFiles = exports.downloadArtifact = exports.getCheckRunContext = void 0;\r\nconst fs_1 = require(\"fs\");\r\nconst core = __importStar(require(\"@actions/core\"));\r\nconst github = __importStar(require(\"@actions/github\"));\r\nconst stream = __importStar(require(\"stream\"));\r\nconst util_1 = require(\"util\");\r\nconst got_1 = __importDefault(require(\"got\"));\r\nconst asyncStream = util_1.promisify(stream.pipeline);\r\nfunction getCheckRunContext() {\r\n if (github.context.eventName === 'workflow_run') {\r\n core.info('Action was triggered by workflow_run: using SHA and RUN_ID from triggering workflow');\r\n const event = github.context.payload;\r\n if (!event.workflow_run) {\r\n throw new Error(\"Event of type 'workflow_run' is missing 'workflow_run' field\");\r\n }\r\n return {\r\n sha: event.workflow_run.head_commit.id,\r\n runId: event.workflow_run.id\r\n };\r\n }\r\n const runId = github.context.runId;\r\n if (github.context.payload.pull_request) {\r\n core.info(`Action was triggered by ${github.context.eventName}: using SHA from head of source branch`);\r\n const pr = github.context.payload.pull_request;\r\n return { sha: pr.head.sha, runId };\r\n }\r\n return { sha: github.context.sha, runId };\r\n}\r\nexports.getCheckRunContext = getCheckRunContext;\r\nasync function downloadArtifact(octokit, artifactId, fileName, token) {\r\n core.startGroup(`Downloading artifact ${fileName}`);\r\n try {\r\n core.info(`Artifact ID: ${artifactId}`);\r\n const req = octokit.actions.downloadArtifact.endpoint({\r\n ...github.context.repo,\r\n artifact_id: artifactId,\r\n archive_format: 'zip'\r\n });\r\n const headers = {\r\n Authorization: `Bearer ${token}`\r\n };\r\n const resp = await got_1.default(req.url, {\r\n headers,\r\n followRedirect: false\r\n });\r\n core.info(`Fetch artifact URL: ${resp.statusCode} ${resp.statusMessage}`);\r\n if (resp.statusCode !== 302) {\r\n throw new Error('Fetch artifact URL failed: received unexpected status code');\r\n }\r\n const url = resp.headers.location;\r\n if (url === undefined) {\r\n const receivedHeaders = Object.keys(resp.headers);\r\n core.info(`Received headers: ${receivedHeaders.join(', ')}`);\r\n throw new Error('Location header was not found in API response');\r\n }\r\n if (typeof url !== 'string') {\r\n throw new Error(`Location header has unexpected value: ${url}`);\r\n }\r\n const downloadStream = got_1.default.stream(url, { headers });\r\n const fileWriterStream = fs_1.createWriteStream(fileName);\r\n core.info(`Downloading ${url}`);\r\n downloadStream.on('downloadProgress', ({ transferred }) => {\r\n core.info(`Progress: ${transferred} B`);\r\n });\r\n await asyncStream(downloadStream, fileWriterStream);\r\n }\r\n finally {\r\n core.endGroup();\r\n }\r\n}\r\nexports.downloadArtifact = downloadArtifact;\r\nasync function listFiles(octokit, sha) {\r\n core.startGroup('Fetching list of tracked files from GitHub');\r\n try {\r\n const commit = await octokit.git.getCommit({\r\n commit_sha: sha,\r\n ...github.context.repo\r\n });\r\n const files = await listGitTree(octokit, commit.data.tree.sha, '');\r\n return files;\r\n }\r\n finally {\r\n core.endGroup();\r\n }\r\n}\r\nexports.listFiles = listFiles;\r\nasync function listGitTree(octokit, sha, path) {\r\n const pathLog = path ? ` at ${path}` : '';\r\n core.info(`Fetching tree ${sha}${pathLog}`);\r\n let truncated = false;\r\n let tree = await octokit.git.getTree({\r\n recursive: 'true',\r\n tree_sha: sha,\r\n ...github.context.repo\r\n });\r\n if (tree.data.truncated) {\r\n truncated = true;\r\n tree = await octokit.git.getTree({\r\n tree_sha: sha,\r\n ...github.context.repo\r\n });\r\n }\r\n const result = [];\r\n for (const tr of tree.data.tree) {\r\n const file = `${path}${tr.path}`;\r\n if (tr.type === 'blob') {\r\n result.push(file);\r\n }\r\n else if (tr.type === 'tree' && truncated) {\r\n const files = await listGitTree(octokit, tr.sha, `${file}/`);\r\n result.push(...files);\r\n }\r\n }\r\n return result;\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.formatTime = exports.ellipsis = exports.fixEol = exports.tableEscape = exports.table = exports.link = exports.Icon = exports.Align = void 0;\r\nvar Align;\r\n(function (Align) {\r\n Align[\"Left\"] = \":---\";\r\n Align[\"Center\"] = \":---:\";\r\n Align[\"Right\"] = \"---:\";\r\n Align[\"None\"] = \"---\";\r\n})(Align = exports.Align || (exports.Align = {}));\r\nexports.Icon = {\r\n skip: '✖️',\r\n success: '✔️',\r\n fail: '❌' // ':x:'\r\n};\r\nfunction link(title, address) {\r\n return `[${title}](${address})`;\r\n}\r\nexports.link = link;\r\nfunction table(headers, align, ...rows) {\r\n const headerRow = `|${headers.map(tableEscape).join('|')}|`;\r\n const alignRow = `|${align.join('|')}|`;\r\n const contentRows = rows.map(row => `|${row.map(tableEscape).join('|')}|`).join('\\n');\r\n return [headerRow, alignRow, contentRows].join('\\n');\r\n}\r\nexports.table = table;\r\nfunction tableEscape(content) {\r\n return content.toString().replace('|', '\\\\|');\r\n}\r\nexports.tableEscape = tableEscape;\r\nfunction fixEol(text) {\r\n var _a;\r\n return (_a = text === null || text === void 0 ? void 0 : text.replace(/\\r/g, '')) !== null && _a !== void 0 ? _a : '';\r\n}\r\nexports.fixEol = fixEol;\r\nfunction ellipsis(text, maxLength) {\r\n if (text.length <= maxLength) {\r\n return text;\r\n }\r\n return text.substr(0, maxLength - 3) + '...';\r\n}\r\nexports.ellipsis = ellipsis;\r\nfunction formatTime(ms) {\r\n if (ms > 1000) {\r\n return `${Math.round(ms / 1000)}s`;\r\n }\r\n return `${Math.round(ms)}ms`;\r\n}\r\nexports.formatTime = formatTime;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.getExceptionSource = void 0;\r\nconst path_utils_1 = require(\"./path-utils\");\r\nfunction getExceptionSource(stackTrace, trackedFiles, getRelativePath) {\r\n const lines = stackTrace.split(/\\r?\\n/);\r\n const re = /\\((.*):(\\d+):\\d+\\)$/;\r\n for (const str of lines) {\r\n const match = str.match(re);\r\n if (match !== null) {\r\n const [_, fileStr, lineStr] = match;\r\n const filePath = path_utils_1.normalizeFilePath(fileStr);\r\n if (filePath.startsWith('internal/') || filePath.includes('/node_modules/')) {\r\n continue;\r\n }\r\n const path = getRelativePath(filePath);\r\n if (!path) {\r\n continue;\r\n }\r\n if (trackedFiles.includes(path)) {\r\n const line = parseInt(lineStr);\r\n return { path, line };\r\n }\r\n }\r\n }\r\n}\r\nexports.getExceptionSource = getExceptionSource;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.getFirstNonEmptyLine = exports.parseIsoDate = exports.parseNetDuration = void 0;\r\nfunction parseNetDuration(str) {\r\n const durationRe = /^(\\d\\d):(\\d\\d):(\\d\\d(?:\\.\\d+)?)$/;\r\n const durationMatch = str.match(durationRe);\r\n if (durationMatch === null) {\r\n throw new Error(`Invalid format: \"${str}\" is not NET duration`);\r\n }\r\n const [_, hourStr, minStr, secStr] = durationMatch;\r\n return (parseInt(hourStr) * 3600 + parseInt(minStr) * 60 + parseFloat(secStr)) * 1000;\r\n}\r\nexports.parseNetDuration = parseNetDuration;\r\nfunction parseIsoDate(str) {\r\n const isoDateRe = /^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d:[0-5]\\d|Z)$/;\r\n if (str === undefined || !isoDateRe.test(str)) {\r\n throw new Error(`Invalid format: \"${str}\" is not ISO date`);\r\n }\r\n return new Date(str);\r\n}\r\nexports.parseIsoDate = parseIsoDate;\r\nfunction getFirstNonEmptyLine(stackTrace) {\r\n const lines = stackTrace.split(/\\r?\\n/g);\r\n return lines.find(str => !/^\\s*$/.test(str));\r\n}\r\nexports.getFirstNonEmptyLine = getFirstNonEmptyLine;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.getBasePath = exports.normalizeFilePath = exports.normalizeDirPath = void 0;\r\nfunction normalizeDirPath(path, addTrailingSlash) {\r\n if (!path) {\r\n return path;\r\n }\r\n path = normalizeFilePath(path);\r\n if (addTrailingSlash && !path.endsWith('/')) {\r\n path += '/';\r\n }\r\n return path;\r\n}\r\nexports.normalizeDirPath = normalizeDirPath;\r\nfunction normalizeFilePath(path) {\r\n if (!path) {\r\n return path;\r\n }\r\n return path.trim().replace(/\\\\/g, '/');\r\n}\r\nexports.normalizeFilePath = normalizeFilePath;\r\nfunction getBasePath(path, trackedFiles) {\r\n if (trackedFiles.includes(path)) {\r\n return '';\r\n }\r\n let max = '';\r\n for (const file of trackedFiles) {\r\n if (path.endsWith(file) && file.length > max.length) {\r\n max = file;\r\n }\r\n }\r\n if (max === '') {\r\n return undefined;\r\n }\r\n const base = path.substr(0, path.length - max.length);\r\n return base;\r\n}\r\nexports.getBasePath = getBasePath;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.slug = void 0;\r\n// Returns HTML element id and href link usable as manual anchor links\r\n// This is needed because Github in check run summary doesn't automatically\r\n// create links out of headings as it normally does for other markdown content\r\nfunction slug(name) {\r\n const slugId = name\r\n .trim()\r\n .replace(/_/g, '')\r\n .replace(/[./\\\\]/g, '-')\r\n .replace(/[^\\w-]/g, '');\r\n const id = `user-content-${slugId}`;\r\n const link = `#${slugId}`;\r\n return { id, link };\r\n}\r\nexports.slug = slug;\r\n","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n const delimiter = '_GitHubActionsFileCommandDelimeter_';\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;\n file_command_1.issueCommand('ENV', commandValue);\n }\n else {\n command_1.issueCommand('set-env', { name }, convertedVal);\n }\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input. The value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n */\nfunction error(message) {\n command_1.issue('error', message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds an warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n */\nfunction warning(message) {\n command_1.issue('warning', message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\nfunction issueCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueCommand = issueCommand;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst tr = __importStar(require(\"./toolrunner\"));\n/**\n * Exec a command.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param commandLine command to execute (can include additional args). Must be correctly escaped.\n * @param args optional arguments for tool. Escaping is handled by the lib.\n * @param options optional exec options. See ExecOptions\n * @returns Promise exit code\n */\nfunction exec(commandLine, args, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const commandArgs = tr.argStringToArray(commandLine);\n if (commandArgs.length === 0) {\n throw new Error(`Parameter 'commandLine' cannot be null or empty.`);\n }\n // Path to tool to execute should be first arg\n const toolPath = commandArgs[0];\n args = commandArgs.slice(1).concat(args || []);\n const runner = new tr.ToolRunner(toolPath, args, options);\n return runner.exec();\n });\n}\nexports.exec = exec;\n//# sourceMappingURL=exec.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst events = __importStar(require(\"events\"));\nconst child = __importStar(require(\"child_process\"));\nconst path = __importStar(require(\"path\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst ioUtil = __importStar(require(\"@actions/io/lib/io-util\"));\n/* eslint-disable @typescript-eslint/unbound-method */\nconst IS_WINDOWS = process.platform === 'win32';\n/*\n * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.\n */\nclass ToolRunner extends events.EventEmitter {\n constructor(toolPath, args, options) {\n super();\n if (!toolPath) {\n throw new Error(\"Parameter 'toolPath' cannot be null or empty.\");\n }\n this.toolPath = toolPath;\n this.args = args || [];\n this.options = options || {};\n }\n _debug(message) {\n if (this.options.listeners && this.options.listeners.debug) {\n this.options.listeners.debug(message);\n }\n }\n _getCommandString(options, noPrefix) {\n const toolPath = this._getSpawnFileName();\n const args = this._getSpawnArgs(options);\n let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool\n if (IS_WINDOWS) {\n // Windows + cmd file\n if (this._isCmdFile()) {\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows + verbatim\n else if (options.windowsVerbatimArguments) {\n cmd += `\"${toolPath}\"`;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows (regular)\n else {\n cmd += this._windowsQuoteCmdArg(toolPath);\n for (const a of args) {\n cmd += ` ${this._windowsQuoteCmdArg(a)}`;\n }\n }\n }\n else {\n // OSX/Linux - this can likely be improved with some form of quoting.\n // creating processes on Unix is fundamentally different than Windows.\n // on Unix, execvp() takes an arg array.\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n return cmd;\n }\n _processLineBuffer(data, strBuffer, onLine) {\n try {\n let s = strBuffer + data.toString();\n let n = s.indexOf(os.EOL);\n while (n > -1) {\n const line = s.substring(0, n);\n onLine(line);\n // the rest of the string ...\n s = s.substring(n + os.EOL.length);\n n = s.indexOf(os.EOL);\n }\n strBuffer = s;\n }\n catch (err) {\n // streaming lines to console is best effort. Don't fail a build.\n this._debug(`error processing line. Failed with error ${err}`);\n }\n }\n _getSpawnFileName() {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n return process.env['COMSPEC'] || 'cmd.exe';\n }\n }\n return this.toolPath;\n }\n _getSpawnArgs(options) {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n let argline = `/D /S /C \"${this._windowsQuoteCmdArg(this.toolPath)}`;\n for (const a of this.args) {\n argline += ' ';\n argline += options.windowsVerbatimArguments\n ? a\n : this._windowsQuoteCmdArg(a);\n }\n argline += '\"';\n return [argline];\n }\n }\n return this.args;\n }\n _endsWith(str, end) {\n return str.endsWith(end);\n }\n _isCmdFile() {\n const upperToolPath = this.toolPath.toUpperCase();\n return (this._endsWith(upperToolPath, '.CMD') ||\n this._endsWith(upperToolPath, '.BAT'));\n }\n _windowsQuoteCmdArg(arg) {\n // for .exe, apply the normal quoting rules that libuv applies\n if (!this._isCmdFile()) {\n return this._uvQuoteCmdArg(arg);\n }\n // otherwise apply quoting rules specific to the cmd.exe command line parser.\n // the libuv rules are generic and are not designed specifically for cmd.exe\n // command line parser.\n //\n // for a detailed description of the cmd.exe command line parser, refer to\n // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912\n // need quotes for empty arg\n if (!arg) {\n return '\"\"';\n }\n // determine whether the arg needs to be quoted\n const cmdSpecialChars = [\n ' ',\n '\\t',\n '&',\n '(',\n ')',\n '[',\n ']',\n '{',\n '}',\n '^',\n '=',\n ';',\n '!',\n \"'\",\n '+',\n ',',\n '`',\n '~',\n '|',\n '<',\n '>',\n '\"'\n ];\n let needsQuotes = false;\n for (const char of arg) {\n if (cmdSpecialChars.some(x => x === char)) {\n needsQuotes = true;\n break;\n }\n }\n // short-circuit if quotes not needed\n if (!needsQuotes) {\n return arg;\n }\n // the following quoting rules are very similar to the rules that by libuv applies.\n //\n // 1) wrap the string in quotes\n //\n // 2) double-up quotes - i.e. \" => \"\"\n //\n // this is different from the libuv quoting rules. libuv replaces \" with \\\", which unfortunately\n // doesn't work well with a cmd.exe command line.\n //\n // note, replacing \" with \"\" also works well if the arg is passed to a downstream .NET console app.\n // for example, the command line:\n // foo.exe \"myarg:\"\"my val\"\"\"\n // is parsed by a .NET console app into an arg array:\n // [ \"myarg:\\\"my val\\\"\" ]\n // which is the same end result when applying libuv quoting rules. although the actual\n // command line from libuv quoting rules would look like:\n // foo.exe \"myarg:\\\"my val\\\"\"\n //\n // 3) double-up slashes that precede a quote,\n // e.g. hello \\world => \"hello \\world\"\n // hello\\\"world => \"hello\\\\\"\"world\"\n // hello\\\\\"world => \"hello\\\\\\\\\"\"world\"\n // hello world\\ => \"hello world\\\\\"\n //\n // technically this is not required for a cmd.exe command line, or the batch argument parser.\n // the reasons for including this as a .cmd quoting rule are:\n //\n // a) this is optimized for the scenario where the argument is passed from the .cmd file to an\n // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.\n //\n // b) it's what we've been doing previously (by deferring to node default behavior) and we\n // haven't heard any complaints about that aspect.\n //\n // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be\n // escaped when used on the command line directly - even though within a .cmd file % can be escaped\n // by using %%.\n //\n // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts\n // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.\n //\n // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would\n // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the\n // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args\n // to an external program.\n //\n // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.\n // % can be escaped within a .cmd file.\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\'; // double the slash\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\"'; // double the quote\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _uvQuoteCmdArg(arg) {\n // Tool runner wraps child_process.spawn() and needs to apply the same quoting as\n // Node in certain cases where the undocumented spawn option windowsVerbatimArguments\n // is used.\n //\n // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,\n // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),\n // pasting copyright notice from Node within this function:\n //\n // Copyright Joyent, Inc. and other Node contributors. All rights reserved.\n //\n // Permission is hereby granted, free of charge, to any person obtaining a copy\n // of this software and associated documentation files (the \"Software\"), to\n // deal in the Software without restriction, including without limitation the\n // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n // sell copies of the Software, and to permit persons to whom the Software is\n // furnished to do so, subject to the following conditions:\n //\n // The above copyright notice and this permission notice shall be included in\n // all copies or substantial portions of the Software.\n //\n // THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n // IN THE SOFTWARE.\n if (!arg) {\n // Need double quotation for empty argument\n return '\"\"';\n }\n if (!arg.includes(' ') && !arg.includes('\\t') && !arg.includes('\"')) {\n // No quotation needed\n return arg;\n }\n if (!arg.includes('\"') && !arg.includes('\\\\')) {\n // No embedded double quotes or backslashes, so I can just wrap\n // quote marks around the whole thing.\n return `\"${arg}\"`;\n }\n // Expected input/output:\n // input : hello\"world\n // output: \"hello\\\"world\"\n // input : hello\"\"world\n // output: \"hello\\\"\\\"world\"\n // input : hello\\world\n // output: hello\\world\n // input : hello\\\\world\n // output: hello\\\\world\n // input : hello\\\"world\n // output: \"hello\\\\\\\"world\"\n // input : hello\\\\\"world\n // output: \"hello\\\\\\\\\\\"world\"\n // input : hello world\\\n // output: \"hello world\\\\\" - note the comment in libuv actually reads \"hello world\\\"\n // but it appears the comment is wrong, it should be \"hello world\\\\\"\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\';\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\\\\';\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _cloneExecOptions(options) {\n options = options || {};\n const result = {\n cwd: options.cwd || process.cwd(),\n env: options.env || process.env,\n silent: options.silent || false,\n windowsVerbatimArguments: options.windowsVerbatimArguments || false,\n failOnStdErr: options.failOnStdErr || false,\n ignoreReturnCode: options.ignoreReturnCode || false,\n delay: options.delay || 10000\n };\n result.outStream = options.outStream || process.stdout;\n result.errStream = options.errStream || process.stderr;\n return result;\n }\n _getSpawnOptions(options, toolPath) {\n options = options || {};\n const result = {};\n result.cwd = options.cwd;\n result.env = options.env;\n result['windowsVerbatimArguments'] =\n options.windowsVerbatimArguments || this._isCmdFile();\n if (options.windowsVerbatimArguments) {\n result.argv0 = `\"${toolPath}\"`;\n }\n return result;\n }\n /**\n * Exec a tool.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param tool path to tool to exec\n * @param options optional exec options. See ExecOptions\n * @returns number\n */\n exec() {\n return __awaiter(this, void 0, void 0, function* () {\n // root the tool path if it is unrooted and contains relative pathing\n if (!ioUtil.isRooted(this.toolPath) &&\n (this.toolPath.includes('/') ||\n (IS_WINDOWS && this.toolPath.includes('\\\\')))) {\n // prefer options.cwd if it is specified, however options.cwd may also need to be rooted\n this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);\n }\n // if the tool is only a file name, then resolve it from the PATH\n // otherwise verify it exists (add extension on Windows if necessary)\n this.toolPath = yield io.which(this.toolPath, true);\n return new Promise((resolve, reject) => {\n this._debug(`exec tool: ${this.toolPath}`);\n this._debug('arguments:');\n for (const arg of this.args) {\n this._debug(` ${arg}`);\n }\n const optionsNonNull = this._cloneExecOptions(this.options);\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);\n }\n const state = new ExecState(optionsNonNull, this.toolPath);\n state.on('debug', (message) => {\n this._debug(message);\n });\n const fileName = this._getSpawnFileName();\n const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));\n const stdbuffer = '';\n if (cp.stdout) {\n cp.stdout.on('data', (data) => {\n if (this.options.listeners && this.options.listeners.stdout) {\n this.options.listeners.stdout(data);\n }\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(data);\n }\n this._processLineBuffer(data, stdbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.stdline) {\n this.options.listeners.stdline(line);\n }\n });\n });\n }\n const errbuffer = '';\n if (cp.stderr) {\n cp.stderr.on('data', (data) => {\n state.processStderr = true;\n if (this.options.listeners && this.options.listeners.stderr) {\n this.options.listeners.stderr(data);\n }\n if (!optionsNonNull.silent &&\n optionsNonNull.errStream &&\n optionsNonNull.outStream) {\n const s = optionsNonNull.failOnStdErr\n ? optionsNonNull.errStream\n : optionsNonNull.outStream;\n s.write(data);\n }\n this._processLineBuffer(data, errbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.errline) {\n this.options.listeners.errline(line);\n }\n });\n });\n }\n cp.on('error', (err) => {\n state.processError = err.message;\n state.processExited = true;\n state.processClosed = true;\n state.CheckComplete();\n });\n cp.on('exit', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n cp.on('close', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n state.processClosed = true;\n this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n state.on('done', (error, exitCode) => {\n if (stdbuffer.length > 0) {\n this.emit('stdline', stdbuffer);\n }\n if (errbuffer.length > 0) {\n this.emit('errline', errbuffer);\n }\n cp.removeAllListeners();\n if (error) {\n reject(error);\n }\n else {\n resolve(exitCode);\n }\n });\n if (this.options.input) {\n if (!cp.stdin) {\n throw new Error('child process missing stdin');\n }\n cp.stdin.end(this.options.input);\n }\n });\n });\n }\n}\nexports.ToolRunner = ToolRunner;\n/**\n * Convert an arg string to an array of args. Handles escaping\n *\n * @param argString string of arguments\n * @returns string[] array of arguments\n */\nfunction argStringToArray(argString) {\n const args = [];\n let inQuotes = false;\n let escaped = false;\n let arg = '';\n function append(c) {\n // we only escape double quotes.\n if (escaped && c !== '\"') {\n arg += '\\\\';\n }\n arg += c;\n escaped = false;\n }\n for (let i = 0; i < argString.length; i++) {\n const c = argString.charAt(i);\n if (c === '\"') {\n if (!escaped) {\n inQuotes = !inQuotes;\n }\n else {\n append(c);\n }\n continue;\n }\n if (c === '\\\\' && escaped) {\n append(c);\n continue;\n }\n if (c === '\\\\' && inQuotes) {\n escaped = true;\n continue;\n }\n if (c === ' ' && !inQuotes) {\n if (arg.length > 0) {\n args.push(arg);\n arg = '';\n }\n continue;\n }\n append(c);\n }\n if (arg.length > 0) {\n args.push(arg.trim());\n }\n return args;\n}\nexports.argStringToArray = argStringToArray;\nclass ExecState extends events.EventEmitter {\n constructor(options, toolPath) {\n super();\n this.processClosed = false; // tracks whether the process has exited and stdio is closed\n this.processError = '';\n this.processExitCode = 0;\n this.processExited = false; // tracks whether the process has exited\n this.processStderr = false; // tracks whether stderr was written to\n this.delay = 10000; // 10 seconds\n this.done = false;\n this.timeout = null;\n if (!toolPath) {\n throw new Error('toolPath must not be empty');\n }\n this.options = options;\n this.toolPath = toolPath;\n if (options.delay) {\n this.delay = options.delay;\n }\n }\n CheckComplete() {\n if (this.done) {\n return;\n }\n if (this.processClosed) {\n this._setResult();\n }\n else if (this.processExited) {\n this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this);\n }\n }\n _debug(message) {\n this.emit('debug', message);\n }\n _setResult() {\n // determine whether there is an error\n let error;\n if (this.processExited) {\n if (this.processError) {\n error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);\n }\n else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {\n error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);\n }\n else if (this.processStderr && this.options.failOnStdErr) {\n error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);\n }\n }\n // clear the timeout\n if (this.timeout) {\n clearTimeout(this.timeout);\n this.timeout = null;\n }\n this.done = true;\n this.emit('done', error, this.processExitCode);\n }\n static HandleTimeout(state) {\n if (state.done) {\n return;\n }\n if (!state.processClosed && state.processExited) {\n const message = `The STDIO streams did not close within ${state.delay /\n 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;\n state._debug(message);\n }\n state._setResult();\n }\n}\n//# sourceMappingURL=toolrunner.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Context = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\nclass Context {\n /**\n * Hydrate the context from the environment\n */\n constructor() {\n this.payload = {};\n if (process.env.GITHUB_EVENT_PATH) {\n if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {\n this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));\n }\n else {\n const path = process.env.GITHUB_EVENT_PATH;\n process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);\n }\n }\n this.eventName = process.env.GITHUB_EVENT_NAME;\n this.sha = process.env.GITHUB_SHA;\n this.ref = process.env.GITHUB_REF;\n this.workflow = process.env.GITHUB_WORKFLOW;\n this.action = process.env.GITHUB_ACTION;\n this.actor = process.env.GITHUB_ACTOR;\n this.job = process.env.GITHUB_JOB;\n this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);\n this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);\n }\n get issue() {\n const payload = this.payload;\n return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });\n }\n get repo() {\n if (process.env.GITHUB_REPOSITORY) {\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');\n return { owner, repo };\n }\n if (this.payload.repository) {\n return {\n owner: this.payload.repository.owner.login,\n repo: this.payload.repository.name\n };\n }\n throw new Error(\"context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'\");\n }\n}\nexports.Context = Context;\n//# sourceMappingURL=context.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokit = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst utils_1 = require(\"./utils\");\nexports.context = new Context.Context();\n/**\n * Returns a hydrated octokit ready to use for GitHub Actions\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokit(token, options) {\n return new utils_1.GitHub(utils_1.getOctokitOptions(token, options));\n}\nexports.getOctokit = getOctokit;\n//# sourceMappingURL=github.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;\nconst httpClient = __importStar(require(\"@actions/http-client\"));\nfunction getAuthString(token, options) {\n if (!token && !options.auth) {\n throw new Error('Parameter token or opts.auth is required');\n }\n else if (token && options.auth) {\n throw new Error('Parameters token and opts.auth may not both be specified');\n }\n return typeof options.auth === 'string' ? options.auth : `token ${token}`;\n}\nexports.getAuthString = getAuthString;\nfunction getProxyAgent(destinationUrl) {\n const hc = new httpClient.HttpClient();\n return hc.getAgent(destinationUrl);\n}\nexports.getProxyAgent = getProxyAgent;\nfunction getApiBaseUrl() {\n return process.env['GITHUB_API_URL'] || 'https://api.github.com';\n}\nexports.getApiBaseUrl = getApiBaseUrl;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokitOptions = exports.GitHub = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst Utils = __importStar(require(\"./internal/utils\"));\n// octokit + plugins\nconst core_1 = require(\"@octokit/core\");\nconst plugin_rest_endpoint_methods_1 = require(\"@octokit/plugin-rest-endpoint-methods\");\nconst plugin_paginate_rest_1 = require(\"@octokit/plugin-paginate-rest\");\nexports.context = new Context.Context();\nconst baseUrl = Utils.getApiBaseUrl();\nconst defaults = {\n baseUrl,\n request: {\n agent: Utils.getProxyAgent(baseUrl)\n }\n};\nexports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults);\n/**\n * Convience function to correctly format Octokit Options to pass into the constructor.\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokitOptions(token, options) {\n const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller\n // Auth\n const auth = Utils.getAuthString(token, opts);\n if (auth) {\n opts.auth = auth;\n }\n return opts;\n}\nexports.getOctokitOptions = getOctokitOptions;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst http = require(\"http\");\nconst https = require(\"https\");\nconst pm = require(\"./proxy\");\nlet tunnel;\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n let proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return new Promise(async (resolve, reject) => {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n let parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n }\n get(requestUrl, additionalHeaders) {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n }\n del(requestUrl, additionalHeaders) {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n }\n post(requestUrl, data, additionalHeaders) {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n }\n patch(requestUrl, data, additionalHeaders) {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n }\n put(requestUrl, data, additionalHeaders) {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n }\n head(requestUrl, additionalHeaders) {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n async getJson(requestUrl, additionalHeaders = {}) {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n let res = await this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async postJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async putJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async patchJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n async request(verb, requestUrl, data, headers) {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n let parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n while (numTries < maxTries) {\n response = await this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (let i = 0; i < this.handlers.length; i++) {\n if (this.handlers[i].canHandleAuthentication(response)) {\n authenticationHandler = this.handlers[i];\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n let parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol == 'https:' &&\n parsedUrl.protocol != parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n await response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (let header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = await this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n await response.readBody();\n await this._performExponentialBackoff(numTries);\n }\n }\n return response;\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return new Promise((resolve, reject) => {\n let callbackForResult = function (err, res) {\n if (err) {\n reject(err);\n }\n resolve(res);\n };\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n let socket;\n if (typeof data === 'string') {\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n let handleResult = (err, res) => {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n };\n let req = info.httpModule.request(info.options, (msg) => {\n let res = new HttpClientResponse(msg);\n handleResult(null, res);\n });\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error('Request timeout: ' + info.options.path), null);\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err, null);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n let parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n this.handlers.forEach(handler => {\n handler.prepareRequest(info.options);\n });\n }\n return info;\n }\n _mergeHeaders(headers) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n let proxyUrl = pm.getProxyUrl(parsedUrl);\n let useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (!!agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (!!this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n if (useProxy) {\n // If using proxy, need tunnel\n if (!tunnel) {\n tunnel = require('tunnel');\n }\n const agentOptions = {\n maxSockets: maxSockets,\n keepAlive: this._keepAlive,\n proxy: {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`,\n host: proxyUrl.hostname,\n port: proxyUrl.port\n }\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n }\n static dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n let a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n async _processResponse(res, options) {\n return new Promise(async (resolve, reject) => {\n const statusCode = res.message.statusCode;\n const response = {\n statusCode: statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode == HttpCodes.NotFound) {\n resolve(response);\n }\n let obj;\n let contents;\n // get the result from the body\n try {\n contents = await res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = 'Failed request: (' + statusCode + ')';\n }\n let err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n });\n }\n}\nexports.HttpClient = HttpClient;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction getProxyUrl(reqUrl) {\n let usingSsl = reqUrl.protocol === 'https:';\n let proxyUrl;\n if (checkBypass(reqUrl)) {\n return proxyUrl;\n }\n let proxyVar;\n if (usingSsl) {\n proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n if (proxyVar) {\n proxyUrl = new URL(proxyVar);\n }\n return proxyUrl;\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n let upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (let upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar _a;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst assert_1 = require(\"assert\");\nconst fs = require(\"fs\");\nconst path = require(\"path\");\n_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;\nexports.IS_WINDOWS = process.platform === 'win32';\nfunction exists(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n yield exports.stat(fsPath);\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n return false;\n }\n throw err;\n }\n return true;\n });\n}\nexports.exists = exists;\nfunction isDirectory(fsPath, useStat = false) {\n return __awaiter(this, void 0, void 0, function* () {\n const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);\n return stats.isDirectory();\n });\n}\nexports.isDirectory = isDirectory;\n/**\n * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:\n * \\, \\hello, \\\\hello\\share, C:, and C:\\hello (and corresponding alternate separator cases).\n */\nfunction isRooted(p) {\n p = normalizeSeparators(p);\n if (!p) {\n throw new Error('isRooted() parameter \"p\" cannot be empty');\n }\n if (exports.IS_WINDOWS) {\n return (p.startsWith('\\\\') || /^[A-Z]:/i.test(p) // e.g. \\ or \\hello or \\\\hello\n ); // e.g. C: or C:\\hello\n }\n return p.startsWith('/');\n}\nexports.isRooted = isRooted;\n/**\n * Recursively create a directory at `fsPath`.\n *\n * This implementation is optimistic, meaning it attempts to create the full\n * path first, and backs up the path stack from there.\n *\n * @param fsPath The path to create\n * @param maxDepth The maximum recursion depth\n * @param depth The current recursion depth\n */\nfunction mkdirP(fsPath, maxDepth = 1000, depth = 1) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(fsPath, 'a path argument must be provided');\n fsPath = path.resolve(fsPath);\n if (depth >= maxDepth)\n return exports.mkdir(fsPath);\n try {\n yield exports.mkdir(fsPath);\n return;\n }\n catch (err) {\n switch (err.code) {\n case 'ENOENT': {\n yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1);\n yield exports.mkdir(fsPath);\n return;\n }\n default: {\n let stats;\n try {\n stats = yield exports.stat(fsPath);\n }\n catch (err2) {\n throw err;\n }\n if (!stats.isDirectory())\n throw err;\n }\n }\n }\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Best effort attempt to determine whether a file exists and is executable.\n * @param filePath file path to check\n * @param extensions additional file extensions to try\n * @return if file exists and is executable, returns the file path. otherwise empty string.\n */\nfunction tryGetExecutablePath(filePath, extensions) {\n return __awaiter(this, void 0, void 0, function* () {\n let stats = undefined;\n try {\n // test file exists\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // on Windows, test for valid extension\n const upperExt = path.extname(filePath).toUpperCase();\n if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {\n return filePath;\n }\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n // try each extension\n const originalFilePath = filePath;\n for (const extension of extensions) {\n filePath = originalFilePath + extension;\n stats = undefined;\n try {\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // preserve the case of the actual file (since an extension was appended)\n try {\n const directory = path.dirname(filePath);\n const upperName = path.basename(filePath).toUpperCase();\n for (const actualName of yield exports.readdir(directory)) {\n if (upperName === actualName.toUpperCase()) {\n filePath = path.join(directory, actualName);\n break;\n }\n }\n }\n catch (err) {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);\n }\n return filePath;\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n }\n return '';\n });\n}\nexports.tryGetExecutablePath = tryGetExecutablePath;\nfunction normalizeSeparators(p) {\n p = p || '';\n if (exports.IS_WINDOWS) {\n // convert slashes on Windows\n p = p.replace(/\\//g, '\\\\');\n // remove redundant slashes\n return p.replace(/\\\\\\\\+/g, '\\\\');\n }\n // remove redundant slashes\n return p.replace(/\\/\\/+/g, '/');\n}\n// on Mac/Linux, test the execute bit\n// R W X R W X R W X\n// 256 128 64 32 16 8 4 2 1\nfunction isUnixExecutable(stats) {\n return ((stats.mode & 1) > 0 ||\n ((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||\n ((stats.mode & 64) > 0 && stats.uid === process.getuid()));\n}\n//# sourceMappingURL=io-util.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst childProcess = require(\"child_process\");\nconst path = require(\"path\");\nconst util_1 = require(\"util\");\nconst ioUtil = require(\"./io-util\");\nconst exec = util_1.promisify(childProcess.exec);\n/**\n * Copies a file or folder.\n * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See CopyOptions.\n */\nfunction cp(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const { force, recursive } = readCopyOptions(options);\n const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;\n // Dest is an existing file, but not forcing\n if (destStat && destStat.isFile() && !force) {\n return;\n }\n // If dest is an existing directory, should copy inside.\n const newDest = destStat && destStat.isDirectory()\n ? path.join(dest, path.basename(source))\n : dest;\n if (!(yield ioUtil.exists(source))) {\n throw new Error(`no such file or directory: ${source}`);\n }\n const sourceStat = yield ioUtil.stat(source);\n if (sourceStat.isDirectory()) {\n if (!recursive) {\n throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);\n }\n else {\n yield cpDirRecursive(source, newDest, 0, force);\n }\n }\n else {\n if (path.relative(source, newDest) === '') {\n // a file cannot be copied to itself\n throw new Error(`'${newDest}' and '${source}' are the same file`);\n }\n yield copyFile(source, newDest, force);\n }\n });\n}\nexports.cp = cp;\n/**\n * Moves a path.\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See MoveOptions.\n */\nfunction mv(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n if (yield ioUtil.exists(dest)) {\n let destExists = true;\n if (yield ioUtil.isDirectory(dest)) {\n // If dest is directory copy src into dest\n dest = path.join(dest, path.basename(source));\n destExists = yield ioUtil.exists(dest);\n }\n if (destExists) {\n if (options.force == null || options.force) {\n yield rmRF(dest);\n }\n else {\n throw new Error('Destination already exists');\n }\n }\n }\n yield mkdirP(path.dirname(dest));\n yield ioUtil.rename(source, dest);\n });\n}\nexports.mv = mv;\n/**\n * Remove a path recursively with force\n *\n * @param inputPath path to remove\n */\nfunction rmRF(inputPath) {\n return __awaiter(this, void 0, void 0, function* () {\n if (ioUtil.IS_WINDOWS) {\n // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another\n // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.\n try {\n if (yield ioUtil.isDirectory(inputPath, true)) {\n yield exec(`rd /s /q \"${inputPath}\"`);\n }\n else {\n yield exec(`del /f /a \"${inputPath}\"`);\n }\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n }\n // Shelling out fails to remove a symlink folder with missing source, this unlink catches that\n try {\n yield ioUtil.unlink(inputPath);\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n }\n }\n else {\n let isDir = false;\n try {\n isDir = yield ioUtil.isDirectory(inputPath);\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n return;\n }\n if (isDir) {\n yield exec(`rm -rf \"${inputPath}\"`);\n }\n else {\n yield ioUtil.unlink(inputPath);\n }\n }\n });\n}\nexports.rmRF = rmRF;\n/**\n * Make a directory. Creates the full path with folders in between\n * Will throw if it fails\n *\n * @param fsPath path to create\n * @returns Promise\n */\nfunction mkdirP(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n yield ioUtil.mkdirP(fsPath);\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Returns path of a tool had the tool actually been invoked. Resolves via paths.\n * If you check and the tool does not exist, it will throw.\n *\n * @param tool name of the tool\n * @param check whether to check if tool exists\n * @returns Promise path to tool\n */\nfunction which(tool, check) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!tool) {\n throw new Error(\"parameter 'tool' is required\");\n }\n // recursive when check=true\n if (check) {\n const result = yield which(tool, false);\n if (!result) {\n if (ioUtil.IS_WINDOWS) {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);\n }\n else {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);\n }\n }\n }\n try {\n // build the list of extensions to try\n const extensions = [];\n if (ioUtil.IS_WINDOWS && process.env.PATHEXT) {\n for (const extension of process.env.PATHEXT.split(path.delimiter)) {\n if (extension) {\n extensions.push(extension);\n }\n }\n }\n // if it's rooted, return it if exists. otherwise return empty.\n if (ioUtil.isRooted(tool)) {\n const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);\n if (filePath) {\n return filePath;\n }\n return '';\n }\n // if any path separators, return empty\n if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\\\'))) {\n return '';\n }\n // build the list of directories\n //\n // Note, technically \"where\" checks the current directory on Windows. From a toolkit perspective,\n // it feels like we should not do this. Checking the current directory seems like more of a use\n // case of a shell, and the which() function exposed by the toolkit should strive for consistency\n // across platforms.\n const directories = [];\n if (process.env.PATH) {\n for (const p of process.env.PATH.split(path.delimiter)) {\n if (p) {\n directories.push(p);\n }\n }\n }\n // return the first match\n for (const directory of directories) {\n const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions);\n if (filePath) {\n return filePath;\n }\n }\n return '';\n }\n catch (err) {\n throw new Error(`which failed with message ${err.message}`);\n }\n });\n}\nexports.which = which;\nfunction readCopyOptions(options) {\n const force = options.force == null ? true : options.force;\n const recursive = Boolean(options.recursive);\n return { force, recursive };\n}\nfunction cpDirRecursive(sourceDir, destDir, currentDepth, force) {\n return __awaiter(this, void 0, void 0, function* () {\n // Ensure there is not a run away recursive copy\n if (currentDepth >= 255)\n return;\n currentDepth++;\n yield mkdirP(destDir);\n const files = yield ioUtil.readdir(sourceDir);\n for (const fileName of files) {\n const srcFile = `${sourceDir}/${fileName}`;\n const destFile = `${destDir}/${fileName}`;\n const srcFileStat = yield ioUtil.lstat(srcFile);\n if (srcFileStat.isDirectory()) {\n // Recurse\n yield cpDirRecursive(srcFile, destFile, currentDepth, force);\n }\n else {\n yield copyFile(srcFile, destFile, force);\n }\n }\n // Change the mode for the newly created directory\n yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode);\n });\n}\n// Buffered file copy\nfunction copyFile(srcFile, destFile, force) {\n return __awaiter(this, void 0, void 0, function* () {\n if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) {\n // unlink/re-link it\n try {\n yield ioUtil.lstat(destFile);\n yield ioUtil.unlink(destFile);\n }\n catch (e) {\n // Try to override file permission\n if (e.code === 'EPERM') {\n yield ioUtil.chmod(destFile, '0666');\n yield ioUtil.unlink(destFile);\n }\n // other errors = it doesn't exist, no work to do\n }\n // Copy over symlink\n const symlinkFull = yield ioUtil.readlink(srcFile);\n yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null);\n }\n else if (!(yield ioUtil.exists(destFile)) || force) {\n yield ioUtil.copyFile(srcFile, destFile);\n }\n });\n}\n//# sourceMappingURL=io.js.map","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;\r\nconst fs = require(\"fs\");\r\nexports.FILE_SYSTEM_ADAPTER = {\r\n lstat: fs.lstat,\r\n stat: fs.stat,\r\n lstatSync: fs.lstatSync,\r\n statSync: fs.statSync,\r\n readdir: fs.readdir,\r\n readdirSync: fs.readdirSync\r\n};\r\nfunction createFileSystemAdapter(fsMethods) {\r\n if (fsMethods === undefined) {\r\n return exports.FILE_SYSTEM_ADAPTER;\r\n }\r\n return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);\r\n}\r\nexports.createFileSystemAdapter = createFileSystemAdapter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0;\r\nconst NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.');\r\nconst MAJOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[0], 10);\r\nconst MINOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[1], 10);\r\nconst SUPPORTED_MAJOR_VERSION = 10;\r\nconst SUPPORTED_MINOR_VERSION = 10;\r\nconst IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION;\r\nconst IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION;\r\n/**\r\n * IS `true` for Node.js 10.10 and greater.\r\n */\r\nexports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.Settings = exports.scandirSync = exports.scandir = void 0;\r\nconst async = require(\"./providers/async\");\r\nconst sync = require(\"./providers/sync\");\r\nconst settings_1 = require(\"./settings\");\r\nexports.Settings = settings_1.default;\r\nfunction scandir(path, optionsOrSettingsOrCallback, callback) {\r\n if (typeof optionsOrSettingsOrCallback === 'function') {\r\n return async.read(path, getSettings(), optionsOrSettingsOrCallback);\r\n }\r\n async.read(path, getSettings(optionsOrSettingsOrCallback), callback);\r\n}\r\nexports.scandir = scandir;\r\nfunction scandirSync(path, optionsOrSettings) {\r\n const settings = getSettings(optionsOrSettings);\r\n return sync.read(path, settings);\r\n}\r\nexports.scandirSync = scandirSync;\r\nfunction getSettings(settingsOrOptions = {}) {\r\n if (settingsOrOptions instanceof settings_1.default) {\r\n return settingsOrOptions;\r\n }\r\n return new settings_1.default(settingsOrOptions);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.readdir = exports.readdirWithFileTypes = exports.read = void 0;\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst rpl = require(\"run-parallel\");\r\nconst constants_1 = require(\"../constants\");\r\nconst utils = require(\"../utils\");\r\nconst common = require(\"./common\");\r\nfunction read(directory, settings, callback) {\r\n if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {\r\n return readdirWithFileTypes(directory, settings, callback);\r\n }\r\n return readdir(directory, settings, callback);\r\n}\r\nexports.read = read;\r\nfunction readdirWithFileTypes(directory, settings, callback) {\r\n settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => {\r\n if (readdirError !== null) {\r\n return callFailureCallback(callback, readdirError);\r\n }\r\n const entries = dirents.map((dirent) => ({\r\n dirent,\r\n name: dirent.name,\r\n path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)\r\n }));\r\n if (!settings.followSymbolicLinks) {\r\n return callSuccessCallback(callback, entries);\r\n }\r\n const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings));\r\n rpl(tasks, (rplError, rplEntries) => {\r\n if (rplError !== null) {\r\n return callFailureCallback(callback, rplError);\r\n }\r\n callSuccessCallback(callback, rplEntries);\r\n });\r\n });\r\n}\r\nexports.readdirWithFileTypes = readdirWithFileTypes;\r\nfunction makeRplTaskEntry(entry, settings) {\r\n return (done) => {\r\n if (!entry.dirent.isSymbolicLink()) {\r\n return done(null, entry);\r\n }\r\n settings.fs.stat(entry.path, (statError, stats) => {\r\n if (statError !== null) {\r\n if (settings.throwErrorOnBrokenSymbolicLink) {\r\n return done(statError);\r\n }\r\n return done(null, entry);\r\n }\r\n entry.dirent = utils.fs.createDirentFromStats(entry.name, stats);\r\n return done(null, entry);\r\n });\r\n };\r\n}\r\nfunction readdir(directory, settings, callback) {\r\n settings.fs.readdir(directory, (readdirError, names) => {\r\n if (readdirError !== null) {\r\n return callFailureCallback(callback, readdirError);\r\n }\r\n const filepaths = names.map((name) => common.joinPathSegments(directory, name, settings.pathSegmentSeparator));\r\n const tasks = filepaths.map((filepath) => {\r\n return (done) => fsStat.stat(filepath, settings.fsStatSettings, done);\r\n });\r\n rpl(tasks, (rplError, results) => {\r\n if (rplError !== null) {\r\n return callFailureCallback(callback, rplError);\r\n }\r\n const entries = [];\r\n names.forEach((name, index) => {\r\n const stats = results[index];\r\n const entry = {\r\n name,\r\n path: filepaths[index],\r\n dirent: utils.fs.createDirentFromStats(name, stats)\r\n };\r\n if (settings.stats) {\r\n entry.stats = stats;\r\n }\r\n entries.push(entry);\r\n });\r\n callSuccessCallback(callback, entries);\r\n });\r\n });\r\n}\r\nexports.readdir = readdir;\r\nfunction callFailureCallback(callback, error) {\r\n callback(error);\r\n}\r\nfunction callSuccessCallback(callback, result) {\r\n callback(null, result);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.joinPathSegments = void 0;\r\nfunction joinPathSegments(a, b, separator) {\r\n /**\r\n * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).\r\n */\r\n if (a.endsWith(separator)) {\r\n return a + b;\r\n }\r\n return a + separator + b;\r\n}\r\nexports.joinPathSegments = joinPathSegments;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.readdir = exports.readdirWithFileTypes = exports.read = void 0;\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst constants_1 = require(\"../constants\");\r\nconst utils = require(\"../utils\");\r\nconst common = require(\"./common\");\r\nfunction read(directory, settings) {\r\n if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {\r\n return readdirWithFileTypes(directory, settings);\r\n }\r\n return readdir(directory, settings);\r\n}\r\nexports.read = read;\r\nfunction readdirWithFileTypes(directory, settings) {\r\n const dirents = settings.fs.readdirSync(directory, { withFileTypes: true });\r\n return dirents.map((dirent) => {\r\n const entry = {\r\n dirent,\r\n name: dirent.name,\r\n path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)\r\n };\r\n if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) {\r\n try {\r\n const stats = settings.fs.statSync(entry.path);\r\n entry.dirent = utils.fs.createDirentFromStats(entry.name, stats);\r\n }\r\n catch (error) {\r\n if (settings.throwErrorOnBrokenSymbolicLink) {\r\n throw error;\r\n }\r\n }\r\n }\r\n return entry;\r\n });\r\n}\r\nexports.readdirWithFileTypes = readdirWithFileTypes;\r\nfunction readdir(directory, settings) {\r\n const names = settings.fs.readdirSync(directory);\r\n return names.map((name) => {\r\n const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator);\r\n const stats = fsStat.statSync(entryPath, settings.fsStatSettings);\r\n const entry = {\r\n name,\r\n path: entryPath,\r\n dirent: utils.fs.createDirentFromStats(name, stats)\r\n };\r\n if (settings.stats) {\r\n entry.stats = stats;\r\n }\r\n return entry;\r\n });\r\n}\r\nexports.readdir = readdir;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst path = require(\"path\");\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst fs = require(\"./adapters/fs\");\r\nclass Settings {\r\n constructor(_options = {}) {\r\n this._options = _options;\r\n this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false);\r\n this.fs = fs.createFileSystemAdapter(this._options.fs);\r\n this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep);\r\n this.stats = this._getValue(this._options.stats, false);\r\n this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);\r\n this.fsStatSettings = new fsStat.Settings({\r\n followSymbolicLink: this.followSymbolicLinks,\r\n fs: this.fs,\r\n throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink\r\n });\r\n }\r\n _getValue(option, value) {\r\n return option !== null && option !== void 0 ? option : value;\r\n }\r\n}\r\nexports.default = Settings;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.createDirentFromStats = void 0;\r\nclass DirentFromStats {\r\n constructor(name, stats) {\r\n this.name = name;\r\n this.isBlockDevice = stats.isBlockDevice.bind(stats);\r\n this.isCharacterDevice = stats.isCharacterDevice.bind(stats);\r\n this.isDirectory = stats.isDirectory.bind(stats);\r\n this.isFIFO = stats.isFIFO.bind(stats);\r\n this.isFile = stats.isFile.bind(stats);\r\n this.isSocket = stats.isSocket.bind(stats);\r\n this.isSymbolicLink = stats.isSymbolicLink.bind(stats);\r\n }\r\n}\r\nfunction createDirentFromStats(name, stats) {\r\n return new DirentFromStats(name, stats);\r\n}\r\nexports.createDirentFromStats = createDirentFromStats;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.fs = void 0;\r\nconst fs = require(\"./fs\");\r\nexports.fs = fs;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;\r\nconst fs = require(\"fs\");\r\nexports.FILE_SYSTEM_ADAPTER = {\r\n lstat: fs.lstat,\r\n stat: fs.stat,\r\n lstatSync: fs.lstatSync,\r\n statSync: fs.statSync\r\n};\r\nfunction createFileSystemAdapter(fsMethods) {\r\n if (fsMethods === undefined) {\r\n return exports.FILE_SYSTEM_ADAPTER;\r\n }\r\n return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);\r\n}\r\nexports.createFileSystemAdapter = createFileSystemAdapter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.statSync = exports.stat = exports.Settings = void 0;\r\nconst async = require(\"./providers/async\");\r\nconst sync = require(\"./providers/sync\");\r\nconst settings_1 = require(\"./settings\");\r\nexports.Settings = settings_1.default;\r\nfunction stat(path, optionsOrSettingsOrCallback, callback) {\r\n if (typeof optionsOrSettingsOrCallback === 'function') {\r\n return async.read(path, getSettings(), optionsOrSettingsOrCallback);\r\n }\r\n async.read(path, getSettings(optionsOrSettingsOrCallback), callback);\r\n}\r\nexports.stat = stat;\r\nfunction statSync(path, optionsOrSettings) {\r\n const settings = getSettings(optionsOrSettings);\r\n return sync.read(path, settings);\r\n}\r\nexports.statSync = statSync;\r\nfunction getSettings(settingsOrOptions = {}) {\r\n if (settingsOrOptions instanceof settings_1.default) {\r\n return settingsOrOptions;\r\n }\r\n return new settings_1.default(settingsOrOptions);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.read = void 0;\r\nfunction read(path, settings, callback) {\r\n settings.fs.lstat(path, (lstatError, lstat) => {\r\n if (lstatError !== null) {\r\n return callFailureCallback(callback, lstatError);\r\n }\r\n if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {\r\n return callSuccessCallback(callback, lstat);\r\n }\r\n settings.fs.stat(path, (statError, stat) => {\r\n if (statError !== null) {\r\n if (settings.throwErrorOnBrokenSymbolicLink) {\r\n return callFailureCallback(callback, statError);\r\n }\r\n return callSuccessCallback(callback, lstat);\r\n }\r\n if (settings.markSymbolicLink) {\r\n stat.isSymbolicLink = () => true;\r\n }\r\n callSuccessCallback(callback, stat);\r\n });\r\n });\r\n}\r\nexports.read = read;\r\nfunction callFailureCallback(callback, error) {\r\n callback(error);\r\n}\r\nfunction callSuccessCallback(callback, result) {\r\n callback(null, result);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.read = void 0;\r\nfunction read(path, settings) {\r\n const lstat = settings.fs.lstatSync(path);\r\n if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {\r\n return lstat;\r\n }\r\n try {\r\n const stat = settings.fs.statSync(path);\r\n if (settings.markSymbolicLink) {\r\n stat.isSymbolicLink = () => true;\r\n }\r\n return stat;\r\n }\r\n catch (error) {\r\n if (!settings.throwErrorOnBrokenSymbolicLink) {\r\n return lstat;\r\n }\r\n throw error;\r\n }\r\n}\r\nexports.read = read;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst fs = require(\"./adapters/fs\");\r\nclass Settings {\r\n constructor(_options = {}) {\r\n this._options = _options;\r\n this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true);\r\n this.fs = fs.createFileSystemAdapter(this._options.fs);\r\n this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false);\r\n this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);\r\n }\r\n _getValue(option, value) {\r\n return option !== null && option !== void 0 ? option : value;\r\n }\r\n}\r\nexports.default = Settings;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.Settings = exports.walkStream = exports.walkSync = exports.walk = void 0;\r\nconst async_1 = require(\"./providers/async\");\r\nconst stream_1 = require(\"./providers/stream\");\r\nconst sync_1 = require(\"./providers/sync\");\r\nconst settings_1 = require(\"./settings\");\r\nexports.Settings = settings_1.default;\r\nfunction walk(directory, optionsOrSettingsOrCallback, callback) {\r\n if (typeof optionsOrSettingsOrCallback === 'function') {\r\n return new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback);\r\n }\r\n new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback);\r\n}\r\nexports.walk = walk;\r\nfunction walkSync(directory, optionsOrSettings) {\r\n const settings = getSettings(optionsOrSettings);\r\n const provider = new sync_1.default(directory, settings);\r\n return provider.read();\r\n}\r\nexports.walkSync = walkSync;\r\nfunction walkStream(directory, optionsOrSettings) {\r\n const settings = getSettings(optionsOrSettings);\r\n const provider = new stream_1.default(directory, settings);\r\n return provider.read();\r\n}\r\nexports.walkStream = walkStream;\r\nfunction getSettings(settingsOrOptions = {}) {\r\n if (settingsOrOptions instanceof settings_1.default) {\r\n return settingsOrOptions;\r\n }\r\n return new settings_1.default(settingsOrOptions);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst async_1 = require(\"../readers/async\");\r\nclass AsyncProvider {\r\n constructor(_root, _settings) {\r\n this._root = _root;\r\n this._settings = _settings;\r\n this._reader = new async_1.default(this._root, this._settings);\r\n this._storage = new Set();\r\n }\r\n read(callback) {\r\n this._reader.onError((error) => {\r\n callFailureCallback(callback, error);\r\n });\r\n this._reader.onEntry((entry) => {\r\n this._storage.add(entry);\r\n });\r\n this._reader.onEnd(() => {\r\n callSuccessCallback(callback, [...this._storage]);\r\n });\r\n this._reader.read();\r\n }\r\n}\r\nexports.default = AsyncProvider;\r\nfunction callFailureCallback(callback, error) {\r\n callback(error);\r\n}\r\nfunction callSuccessCallback(callback, entries) {\r\n callback(null, entries);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst stream_1 = require(\"stream\");\r\nconst async_1 = require(\"../readers/async\");\r\nclass StreamProvider {\r\n constructor(_root, _settings) {\r\n this._root = _root;\r\n this._settings = _settings;\r\n this._reader = new async_1.default(this._root, this._settings);\r\n this._stream = new stream_1.Readable({\r\n objectMode: true,\r\n read: () => { },\r\n destroy: () => {\r\n if (!this._reader.isDestroyed) {\r\n this._reader.destroy();\r\n }\r\n }\r\n });\r\n }\r\n read() {\r\n this._reader.onError((error) => {\r\n this._stream.emit('error', error);\r\n });\r\n this._reader.onEntry((entry) => {\r\n this._stream.push(entry);\r\n });\r\n this._reader.onEnd(() => {\r\n this._stream.push(null);\r\n });\r\n this._reader.read();\r\n return this._stream;\r\n }\r\n}\r\nexports.default = StreamProvider;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst sync_1 = require(\"../readers/sync\");\r\nclass SyncProvider {\r\n constructor(_root, _settings) {\r\n this._root = _root;\r\n this._settings = _settings;\r\n this._reader = new sync_1.default(this._root, this._settings);\r\n }\r\n read() {\r\n return this._reader.read();\r\n }\r\n}\r\nexports.default = SyncProvider;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst events_1 = require(\"events\");\r\nconst fsScandir = require(\"@nodelib/fs.scandir\");\r\nconst fastq = require(\"fastq\");\r\nconst common = require(\"./common\");\r\nconst reader_1 = require(\"./reader\");\r\nclass AsyncReader extends reader_1.default {\r\n constructor(_root, _settings) {\r\n super(_root, _settings);\r\n this._settings = _settings;\r\n this._scandir = fsScandir.scandir;\r\n this._emitter = new events_1.EventEmitter();\r\n this._queue = fastq(this._worker.bind(this), this._settings.concurrency);\r\n this._isFatalError = false;\r\n this._isDestroyed = false;\r\n this._queue.drain = () => {\r\n if (!this._isFatalError) {\r\n this._emitter.emit('end');\r\n }\r\n };\r\n }\r\n read() {\r\n this._isFatalError = false;\r\n this._isDestroyed = false;\r\n setImmediate(() => {\r\n this._pushToQueue(this._root, this._settings.basePath);\r\n });\r\n return this._emitter;\r\n }\r\n get isDestroyed() {\r\n return this._isDestroyed;\r\n }\r\n destroy() {\r\n if (this._isDestroyed) {\r\n throw new Error('The reader is already destroyed');\r\n }\r\n this._isDestroyed = true;\r\n this._queue.killAndDrain();\r\n }\r\n onEntry(callback) {\r\n this._emitter.on('entry', callback);\r\n }\r\n onError(callback) {\r\n this._emitter.once('error', callback);\r\n }\r\n onEnd(callback) {\r\n this._emitter.once('end', callback);\r\n }\r\n _pushToQueue(directory, base) {\r\n const queueItem = { directory, base };\r\n this._queue.push(queueItem, (error) => {\r\n if (error !== null) {\r\n this._handleError(error);\r\n }\r\n });\r\n }\r\n _worker(item, done) {\r\n this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => {\r\n if (error !== null) {\r\n return done(error, undefined);\r\n }\r\n for (const entry of entries) {\r\n this._handleEntry(entry, item.base);\r\n }\r\n done(null, undefined);\r\n });\r\n }\r\n _handleError(error) {\r\n if (this._isDestroyed || !common.isFatalError(this._settings, error)) {\r\n return;\r\n }\r\n this._isFatalError = true;\r\n this._isDestroyed = true;\r\n this._emitter.emit('error', error);\r\n }\r\n _handleEntry(entry, base) {\r\n if (this._isDestroyed || this._isFatalError) {\r\n return;\r\n }\r\n const fullpath = entry.path;\r\n if (base !== undefined) {\r\n entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);\r\n }\r\n if (common.isAppliedFilter(this._settings.entryFilter, entry)) {\r\n this._emitEntry(entry);\r\n }\r\n if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) {\r\n this._pushToQueue(fullpath, entry.path);\r\n }\r\n }\r\n _emitEntry(entry) {\r\n this._emitter.emit('entry', entry);\r\n }\r\n}\r\nexports.default = AsyncReader;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.joinPathSegments = exports.replacePathSegmentSeparator = exports.isAppliedFilter = exports.isFatalError = void 0;\r\nfunction isFatalError(settings, error) {\r\n if (settings.errorFilter === null) {\r\n return true;\r\n }\r\n return !settings.errorFilter(error);\r\n}\r\nexports.isFatalError = isFatalError;\r\nfunction isAppliedFilter(filter, value) {\r\n return filter === null || filter(value);\r\n}\r\nexports.isAppliedFilter = isAppliedFilter;\r\nfunction replacePathSegmentSeparator(filepath, separator) {\r\n return filepath.split(/[/\\\\]/).join(separator);\r\n}\r\nexports.replacePathSegmentSeparator = replacePathSegmentSeparator;\r\nfunction joinPathSegments(a, b, separator) {\r\n if (a === '') {\r\n return b;\r\n }\r\n /**\r\n * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).\r\n */\r\n if (a.endsWith(separator)) {\r\n return a + b;\r\n }\r\n return a + separator + b;\r\n}\r\nexports.joinPathSegments = joinPathSegments;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst common = require(\"./common\");\r\nclass Reader {\r\n constructor(_root, _settings) {\r\n this._root = _root;\r\n this._settings = _settings;\r\n this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator);\r\n }\r\n}\r\nexports.default = Reader;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst fsScandir = require(\"@nodelib/fs.scandir\");\r\nconst common = require(\"./common\");\r\nconst reader_1 = require(\"./reader\");\r\nclass SyncReader extends reader_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._scandir = fsScandir.scandirSync;\r\n this._storage = new Set();\r\n this._queue = new Set();\r\n }\r\n read() {\r\n this._pushToQueue(this._root, this._settings.basePath);\r\n this._handleQueue();\r\n return [...this._storage];\r\n }\r\n _pushToQueue(directory, base) {\r\n this._queue.add({ directory, base });\r\n }\r\n _handleQueue() {\r\n for (const item of this._queue.values()) {\r\n this._handleDirectory(item.directory, item.base);\r\n }\r\n }\r\n _handleDirectory(directory, base) {\r\n try {\r\n const entries = this._scandir(directory, this._settings.fsScandirSettings);\r\n for (const entry of entries) {\r\n this._handleEntry(entry, base);\r\n }\r\n }\r\n catch (error) {\r\n this._handleError(error);\r\n }\r\n }\r\n _handleError(error) {\r\n if (!common.isFatalError(this._settings, error)) {\r\n return;\r\n }\r\n throw error;\r\n }\r\n _handleEntry(entry, base) {\r\n const fullpath = entry.path;\r\n if (base !== undefined) {\r\n entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);\r\n }\r\n if (common.isAppliedFilter(this._settings.entryFilter, entry)) {\r\n this._pushToStorage(entry);\r\n }\r\n if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) {\r\n this._pushToQueue(fullpath, entry.path);\r\n }\r\n }\r\n _pushToStorage(entry) {\r\n this._storage.add(entry);\r\n }\r\n}\r\nexports.default = SyncReader;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst path = require(\"path\");\r\nconst fsScandir = require(\"@nodelib/fs.scandir\");\r\nclass Settings {\r\n constructor(_options = {}) {\r\n this._options = _options;\r\n this.basePath = this._getValue(this._options.basePath, undefined);\r\n this.concurrency = this._getValue(this._options.concurrency, Infinity);\r\n this.deepFilter = this._getValue(this._options.deepFilter, null);\r\n this.entryFilter = this._getValue(this._options.entryFilter, null);\r\n this.errorFilter = this._getValue(this._options.errorFilter, null);\r\n this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep);\r\n this.fsScandirSettings = new fsScandir.Settings({\r\n followSymbolicLinks: this._options.followSymbolicLinks,\r\n fs: this._options.fs,\r\n pathSegmentSeparator: this._options.pathSegmentSeparator,\r\n stats: this._options.stats,\r\n throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink\r\n });\r\n }\r\n _getValue(option, value) {\r\n return option !== null && option !== void 0 ? option : value;\r\n }\r\n}\r\nexports.default = Settings;\r\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nasync function auth(token) {\n const tokenType = token.split(/\\./).length === 3 ? \"app\" : /^v\\d+\\./.test(token) ? \"installation\" : \"oauth\";\n return {\n type: \"token\",\n token: token,\n tokenType\n };\n}\n\n/**\n * Prefix token for usage in the Authorization header\n *\n * @param token OAuth token or JSON Web Token\n */\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n\n return `token ${token}`;\n}\n\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\nconst createTokenAuth = function createTokenAuth(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n\n if (typeof token !== \"string\") {\n throw new Error(\"[@octokit/auth-token] Token passed to createTokenAuth is not a string\");\n }\n\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n\nexports.createTokenAuth = createTokenAuth;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar universalUserAgent = require('universal-user-agent');\nvar beforeAfterHook = require('before-after-hook');\nvar request = require('@octokit/request');\nvar graphql = require('@octokit/graphql');\nvar authToken = require('@octokit/auth-token');\n\nfunction _objectWithoutPropertiesLoose(source, excluded) {\n if (source == null) return {};\n var target = {};\n var sourceKeys = Object.keys(source);\n var key, i;\n\n for (i = 0; i < sourceKeys.length; i++) {\n key = sourceKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n target[key] = source[key];\n }\n\n return target;\n}\n\nfunction _objectWithoutProperties(source, excluded) {\n if (source == null) return {};\n\n var target = _objectWithoutPropertiesLoose(source, excluded);\n\n var key, i;\n\n if (Object.getOwnPropertySymbols) {\n var sourceSymbolKeys = Object.getOwnPropertySymbols(source);\n\n for (i = 0; i < sourceSymbolKeys.length; i++) {\n key = sourceSymbolKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;\n target[key] = source[key];\n }\n }\n\n return target;\n}\n\nconst VERSION = \"3.2.1\";\n\nclass Octokit {\n constructor(options = {}) {\n const hook = new beforeAfterHook.Collection();\n const requestDefaults = {\n baseUrl: request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n }; // prepend default user agent with `options.userAgent` if set\n\n requestDefaults.headers[\"user-agent\"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(\" \");\n\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n\n this.request = request.request.defaults(requestDefaults);\n this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => {},\n info: () => {},\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n }, options.log);\n this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n // (2)\n const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const {\n authStrategy\n } = options,\n otherOptions = _objectWithoutProperties(options, [\"authStrategy\"]);\n\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n }, options.auth)); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n } // apply plugins\n // https://stackoverflow.com/a/16345172\n\n\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach(plugin => {\n Object.assign(this, plugin(this, options));\n });\n }\n\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null));\n }\n\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n\n\n static plugin(...newPlugins) {\n var _a;\n\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);\n return NewOctokit;\n }\n\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n\nexports.Octokit = Octokit;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar isPlainObject = require('is-plain-object');\nvar universalUserAgent = require('universal-user-agent');\n\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach(key => {\n if (isPlainObject.isPlainObject(options[key])) {\n if (!(key in defaults)) Object.assign(result, {\n [key]: options[key]\n });else result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, {\n [key]: options[key]\n });\n }\n });\n return result;\n}\n\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n\n return obj;\n}\n\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? {\n method,\n url\n } : {\n url: method\n }, options);\n } else {\n options = Object.assign({}, route);\n } // lowercase header names before merging with defaults to avoid duplicates\n\n\n options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging\n\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten\n\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);\n }\n\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n\n if (names.length === 0) {\n return url;\n }\n\n return url + separator + names.map(name => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\nconst urlVariableRegex = /\\{[^}]+\\}/g;\n\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\n\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n\n if (!matches) {\n return [];\n }\n\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\nfunction omit(object, keysToOmit) {\n return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n\n// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n\n return part;\n }).join(\"\");\n}\n\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\n\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\n\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\n\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\n\nfunction getValues(context, operator, key, modifier) {\n var value = context[key],\n result = [];\n\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n\n return result;\n}\n\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\n\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n\n if (operator && operator !== \"+\") {\n var separator = \",\";\n\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n });\n}\n\nfunction parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible\n\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"mediaType\"]); // extract variable names from URL to calculate remaining variables later\n\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n\n const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(\",\");\n }\n\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n } // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n\n\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n } else {\n headers[\"content-length\"] = 0;\n }\n }\n } // default content-type for JSON if body is set\n\n\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n\n\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n } // Only return body/request keys if present\n\n\n return Object.assign({\n method,\n url,\n headers\n }, typeof body !== \"undefined\" ? {\n body\n } : null, options.request ? {\n request: options.request\n } : null);\n}\n\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse\n });\n}\n\nconst VERSION = \"6.0.9\";\n\nconst userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\n\nconst DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\",\n previews: []\n }\n};\n\nconst endpoint = withDefaults(null, DEFAULTS);\n\nexports.endpoint = endpoint;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar request = require('@octokit/request');\nvar universalUserAgent = require('universal-user-agent');\n\nconst VERSION = \"4.5.7\";\n\nclass GraphqlError extends Error {\n constructor(request, response) {\n const message = response.data.errors[0].message;\n super(message);\n Object.assign(this, response.data);\n Object.assign(this, {\n headers: response.headers\n });\n this.name = \"GraphqlError\";\n this.request = request; // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n\n}\n\nconst NON_VARIABLE_OPTIONS = [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"query\", \"mediaType\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request, query, options) {\n if (typeof query === \"string\" && options && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n\n const parsedOptions = typeof query === \"string\" ? Object.assign({\n query\n }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n\n if (!result.variables) {\n result.variables = {};\n }\n\n result.variables[key] = parsedOptions[key];\n return result;\n }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n\n return request(requestOptions).then(response => {\n if (response.data.errors) {\n const headers = {};\n\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n\n throw new GraphqlError(requestOptions, {\n headers,\n data: response.data\n });\n }\n\n return response.data.data;\n });\n}\n\nfunction withDefaults(request$1, newDefaults) {\n const newRequest = request$1.defaults(newDefaults);\n\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: request.request.endpoint\n });\n}\n\nconst graphql$1 = withDefaults(request.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n\nexports.graphql = graphql$1;\nexports.withCustomRequest = withCustomRequest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"2.6.0\";\n\n/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nfunction normalizePaginatedListResponse(response) {\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n\n response.data.total_count = totalCount;\n return response;\n}\n\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url) return {\n done: true\n };\n const response = await requestMethod({\n method,\n url,\n headers\n });\n const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return {\n value: normalizedResponse\n };\n }\n\n })\n };\n}\n\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\n\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then(result => {\n if (result.done) {\n return results;\n }\n\n let earlyExit = false;\n\n function done() {\n earlyExit = true;\n }\n\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n\n if (earlyExit) {\n return results;\n }\n\n return gather(octokit, results, iterator, mapFn);\n });\n}\n\nconst composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\n\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n\nexports.composePaginateRest = composePaginateRest;\nexports.paginateRest = paginateRest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst Endpoints = {\n actions: {\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n cancelWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n createRegistrationTokenForOrg: [\"POST /orgs/{org}/actions/runners/registration-token\"],\n createRegistrationTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/registration-token\"],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/remove-token\"],\n createWorkflowDispatch: [\"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"],\n deleteArtifact: [\"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n deleteSelfHostedRunnerFromOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}\"],\n deleteSelfHostedRunnerFromRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n downloadArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"],\n downloadJobLogsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"],\n downloadWorkflowRunLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunUsage: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"],\n getWorkflowUsage: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listJobsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/downloads\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"],\n listWorkflowRuns: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\"DELETE /notifications/threads/{thread_id}/subscription\"],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\"GET /notifications/threads/{thread_id}/subscription\"],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\"GET /users/{username}/events/orgs/{org}\"],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\"GET /users/{username}/received_events/public\"],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/notifications\"],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\"PUT /notifications/threads/{thread_id}/subscription\"],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\"],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createContentAttachment: [\"POST /content_references/{content_reference_id}/attachments\", {\n mediaType: {\n previews: [\"corsair\"]\n }\n }],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\"POST /app/installations/{installation_id}/access_tokens\"],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\"GET /marketplace_listing/accounts/{account_id}\"],\n getSubscriptionPlanForAccountStubbed: [\"GET /marketplace_listing/stubbed/accounts/{account_id}\"],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"],\n listInstallationReposForAuthenticatedUser: [\"GET /user/installations/{installation_id}/repositories\"],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\"GET /user/marketplace_purchases/stubbed\"],\n removeRepoFromInstallation: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\"DELETE /app/installations/{installation_id}/suspended\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\"GET /users/{username}/settings/billing/actions\"],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\"GET /users/{username}/settings/billing/packages\"],\n getSharedStorageBillingOrg: [\"GET /orgs/{org}/settings/billing/shared-storage\"],\n getSharedStorageBillingUser: [\"GET /users/{username}/settings/billing/shared-storage\"]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listAnnotations: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listForSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n rerequestSuite: [\"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n setSuitesPreferences: [\"PATCH /repos/{owner}/{repo}/check-suites/preferences\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }]\n },\n codeScanning: {\n getAlert: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\", {}, {\n renamedParameters: {\n alert_id: \"alert_number\"\n }\n }],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }],\n getConductCode: [\"GET /codes_of_conduct/{key}\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }],\n getForRepo: [\"GET /repos/{owner}/{repo}/community/code_of_conduct\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }]\n },\n emojis: {\n get: [\"GET /emojis\"]\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n removeRestrictionsForRepo: [\"DELETE /repos/{owner}/{repo}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }]\n },\n issues: {\n addAssignees: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\", {\n mediaType: {\n previews: [\"mockingbird\"]\n }\n }],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n removeAssignees: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n removeLabel: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\"POST /markdown/raw\", {\n headers: {\n \"content-type\": \"text/plain; charset=utf-8\"\n }\n }]\n },\n meta: {\n get: [\"GET /meta\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n deleteArchiveForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n downloadArchiveForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getArchiveForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listForAuthenticatedUser: [\"GET /user/migrations\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listForOrg: [\"GET /orgs/{org}/migrations\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listReposForUser: [\"GET /user/migrations/{migration_id}/repositories\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n unlockRepoForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\"PUT /orgs/{org}/outside_collaborators/{username}\"],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\"DELETE /orgs/{org}/outside_collaborators/{username}\"],\n removePublicMembershipForAuthenticatedUser: [\"DELETE /orgs/{org}/public_members/{username}\"],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\"PUT /orgs/{org}/public_members/{username}\"],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\"PATCH /user/memberships/orgs/{org}\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createCard: [\"POST /projects/columns/{column_id}/cards\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createColumn: [\"POST /projects/{project_id}/columns\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForAuthenticatedUser: [\"POST /user/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForOrg: [\"POST /orgs/{org}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n delete: [\"DELETE /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n get: [\"GET /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getCard: [\"GET /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getColumn: [\"GET /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getPermissionForUser: [\"GET /projects/{project_id}/collaborators/{username}/permission\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listCards: [\"GET /projects/columns/{column_id}/cards\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listColumns: [\"GET /projects/{project_id}/columns\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForOrg: [\"GET /orgs/{org}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForUser: [\"GET /users/{username}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n removeCollaborator: [\"DELETE /projects/{project_id}/collaborators/{username}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n update: [\"PATCH /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n updateColumn: [\"PATCH /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n deletePendingReview: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n deleteReviewComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n dismissReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n listReviewComments: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n requestReviewers: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n submitReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\", {\n mediaType: {\n previews: [\"lydian\"]\n }\n }],\n updateReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n updateReviewComment: [\"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"]\n },\n rateLimit: {\n get: [\"GET /rate_limit\"]\n },\n reactions: {\n createForCommitComment: [\"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForIssue: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForIssueComment: [\"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForPullRequestReviewComment: [\"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForTeamDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForTeamDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForIssue: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForIssueComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForPullRequestComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForTeamDiscussion: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForTeamDiscussionComment: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteLegacy: [\"DELETE /reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }, {\n deprecated: \"octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy\"\n }],\n listForCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForIssueComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForPullRequestReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForTeamDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForTeamDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }]\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n addTeamAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n addUserAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\"GET /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n createCommitComment: [\"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n createCommitSignatureProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\", {\n mediaType: {\n previews: [\"switcheroo\"]\n }\n }],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createUsingTemplate: [\"POST /repos/{template_owner}/{template_repo}/generate\", {\n mediaType: {\n previews: [\"baptiste\"]\n }\n }],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n deleteAdminBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n deleteBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\", {\n mediaType: {\n previews: [\"switcheroo\"]\n }\n }],\n deletePullRequestReviewProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\"DELETE /repos/{owner}/{repo}/automated-security-fixes\", {\n mediaType: {\n previews: [\"london\"]\n }\n }],\n disableVulnerabilityAlerts: [\"DELETE /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n downloadArchive: [\"GET /repos/{owner}/{repo}/{archive_format}/{ref}\"],\n enableAutomatedSecurityFixes: [\"PUT /repos/{owner}/{repo}/automated-security-fixes\", {\n mediaType: {\n previews: [\"london\"]\n }\n }],\n enableVulnerabilityAlerts: [\"PUT /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n getAdminBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n getAllStatusCheckContexts: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n getAppsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\", {\n mediaType: {\n previews: [\"black-panther\"]\n }\n }],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n getTeamsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\", {\n mediaType: {\n previews: [\"groot\"]\n }\n }],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\", {\n mediaType: {\n previews: [\"groot\"]\n }\n }],\n listReleaseAssets: [\"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n removeAppAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n removeCollaborator: [\"DELETE /repos/{owner}/{repo}/collaborators/{username}\"],\n removeStatusCheckContexts: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n removeStatusCheckProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n removeTeamAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n removeUserAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n setAppAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n setStatusCheckContexts: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n setTeamAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n setUserAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n updatePullRequestReviewProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n updateStatusCheckPotection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n uploadReleaseAsset: [\"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\", {\n baseUrl: \"https://uploads.github.com\"\n }]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\", {\n mediaType: {\n previews: [\"cloak\"]\n }\n }],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n users: [\"GET /search/users\"]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n addOrUpdateProjectPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n addOrUpdateRepoPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n checkPermissionsForProjectInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n checkPermissionsForRepoInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n deleteDiscussionInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n getDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n getMembershipForUserInOrg: [\"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/invitations\"],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n removeProjectInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n removeRepoInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n updateDiscussionCommentInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n updateDiscussionInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\n\nconst VERSION = \"4.2.1\";\n\nfunction endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({\n method,\n url\n }, defaults);\n\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n\n const scopeMethods = newMethods[scope];\n\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n\n return newMethods;\n}\n\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`\n\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined\n });\n return requestWithDefaults(options);\n }\n\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n\n delete options[name];\n }\n }\n\n return requestWithDefaults(options);\n } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n\n\n return requestWithDefaults(...args);\n }\n\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\n/**\n * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary\n * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is\n * done, we will remove the registerEndpoints methods and return the methods\n * directly as with the other plugins. At that point we will also remove the\n * legacy workarounds and deprecations.\n *\n * See the plan at\n * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1\n */\n\nfunction restEndpointMethods(octokit) {\n return endpointsToMethods(octokit, Endpoints);\n}\nrestEndpointMethods.VERSION = VERSION;\n\nexports.restEndpointMethods = restEndpointMethods;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar deprecation = require('deprecation');\nvar once = _interopDefault(require('once'));\n\nconst logOnce = once(deprecation => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\n\nclass RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = \"HttpError\";\n this.status = statusCode;\n Object.defineProperty(this, \"code\", {\n get() {\n logOnce(new deprecation.Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n }\n\n });\n this.headers = options.headers || {}; // redact request credentials without mutating original request options\n\n const requestCopy = Object.assign({}, options.request);\n\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\")\n });\n }\n\n requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\") // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n }\n\n}\n\nexports.RequestError = RequestError;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar endpoint = require('@octokit/endpoint');\nvar universalUserAgent = require('universal-user-agent');\nvar isPlainObject = require('is-plain-object');\nvar nodeFetch = _interopDefault(require('node-fetch'));\nvar requestError = require('@octokit/request-error');\n\nconst VERSION = \"5.4.10\";\n\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\nfunction fetchWrapper(requestOptions) {\n if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect\n }, requestOptions.request)).then(response => {\n url = response.url;\n status = response.status;\n\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n\n if (status === 204 || status === 205) {\n return;\n } // GitHub API returns 200 for HEAD requests\n\n\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n\n throw new requestError.RequestError(response.statusText, status, {\n headers,\n request: requestOptions\n });\n }\n\n if (status === 304) {\n throw new requestError.RequestError(\"Not modified\", status, {\n headers,\n request: requestOptions\n });\n }\n\n if (status >= 400) {\n return response.text().then(message => {\n const error = new requestError.RequestError(message, status, {\n headers,\n request: requestOptions\n });\n\n try {\n let responseBody = JSON.parse(error.message);\n Object.assign(error, responseBody);\n let errors = responseBody.errors; // Assumption `errors` would always be in Array format\n\n error.message = error.message + \": \" + errors.map(JSON.stringify).join(\", \");\n } catch (e) {// ignore, see octokit/rest.js#684\n }\n\n throw error;\n });\n }\n\n const contentType = response.headers.get(\"content-type\");\n\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n\n return getBufferResponse(response);\n }).then(data => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch(error => {\n if (error instanceof requestError.RequestError) {\n throw error;\n }\n\n throw new requestError.RequestError(error.message, 500, {\n headers,\n request: requestOptions\n });\n });\n}\n\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\n\nconst request = withDefaults(endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n }\n});\n\nexports.request = request;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","\"use strict\";\n/// \n/// \n/// \nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst typedArrayTypeNames = [\n 'Int8Array',\n 'Uint8Array',\n 'Uint8ClampedArray',\n 'Int16Array',\n 'Uint16Array',\n 'Int32Array',\n 'Uint32Array',\n 'Float32Array',\n 'Float64Array',\n 'BigInt64Array',\n 'BigUint64Array'\n];\nfunction isTypedArrayName(name) {\n return typedArrayTypeNames.includes(name);\n}\nconst objectTypeNames = [\n 'Function',\n 'Generator',\n 'AsyncGenerator',\n 'GeneratorFunction',\n 'AsyncGeneratorFunction',\n 'AsyncFunction',\n 'Observable',\n 'Array',\n 'Buffer',\n 'Object',\n 'RegExp',\n 'Date',\n 'Error',\n 'Map',\n 'Set',\n 'WeakMap',\n 'WeakSet',\n 'ArrayBuffer',\n 'SharedArrayBuffer',\n 'DataView',\n 'Promise',\n 'URL',\n 'HTMLElement',\n ...typedArrayTypeNames\n];\nfunction isObjectTypeName(name) {\n return objectTypeNames.includes(name);\n}\nconst primitiveTypeNames = [\n 'null',\n 'undefined',\n 'string',\n 'number',\n 'bigint',\n 'boolean',\n 'symbol'\n];\nfunction isPrimitiveTypeName(name) {\n return primitiveTypeNames.includes(name);\n}\n// eslint-disable-next-line @typescript-eslint/ban-types\nfunction isOfType(type) {\n return (value) => typeof value === type;\n}\nconst { toString } = Object.prototype;\nconst getObjectType = (value) => {\n const objectTypeName = toString.call(value).slice(8, -1);\n if (/HTML\\w+Element/.test(objectTypeName) && is.domElement(value)) {\n return 'HTMLElement';\n }\n if (isObjectTypeName(objectTypeName)) {\n return objectTypeName;\n }\n return undefined;\n};\nconst isObjectOfType = (type) => (value) => getObjectType(value) === type;\nfunction is(value) {\n if (value === null) {\n return 'null';\n }\n switch (typeof value) {\n case 'undefined':\n return 'undefined';\n case 'string':\n return 'string';\n case 'number':\n return 'number';\n case 'boolean':\n return 'boolean';\n case 'function':\n return 'Function';\n case 'bigint':\n return 'bigint';\n case 'symbol':\n return 'symbol';\n default:\n }\n if (is.observable(value)) {\n return 'Observable';\n }\n if (is.array(value)) {\n return 'Array';\n }\n if (is.buffer(value)) {\n return 'Buffer';\n }\n const tagType = getObjectType(value);\n if (tagType) {\n return tagType;\n }\n if (value instanceof String || value instanceof Boolean || value instanceof Number) {\n throw new TypeError('Please don\\'t use object wrappers for primitive types');\n }\n return 'Object';\n}\nis.undefined = isOfType('undefined');\nis.string = isOfType('string');\nconst isNumberType = isOfType('number');\nis.number = (value) => isNumberType(value) && !is.nan(value);\nis.bigint = isOfType('bigint');\n// eslint-disable-next-line @typescript-eslint/ban-types\nis.function_ = isOfType('function');\nis.null_ = (value) => value === null;\nis.class_ = (value) => is.function_(value) && value.toString().startsWith('class ');\nis.boolean = (value) => value === true || value === false;\nis.symbol = isOfType('symbol');\nis.numericString = (value) => is.string(value) && !is.emptyStringOrWhitespace(value) && !Number.isNaN(Number(value));\nis.array = (value, assertion) => {\n if (!Array.isArray(value)) {\n return false;\n }\n if (!is.function_(assertion)) {\n return true;\n }\n return value.every(assertion);\n};\nis.buffer = (value) => { var _a, _b, _c, _d; return (_d = (_c = (_b = (_a = value) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.isBuffer) === null || _c === void 0 ? void 0 : _c.call(_b, value)) !== null && _d !== void 0 ? _d : false; };\nis.nullOrUndefined = (value) => is.null_(value) || is.undefined(value);\nis.object = (value) => !is.null_(value) && (typeof value === 'object' || is.function_(value));\nis.iterable = (value) => { var _a; return is.function_((_a = value) === null || _a === void 0 ? void 0 : _a[Symbol.iterator]); };\nis.asyncIterable = (value) => { var _a; return is.function_((_a = value) === null || _a === void 0 ? void 0 : _a[Symbol.asyncIterator]); };\nis.generator = (value) => is.iterable(value) && is.function_(value.next) && is.function_(value.throw);\nis.asyncGenerator = (value) => is.asyncIterable(value) && is.function_(value.next) && is.function_(value.throw);\nis.nativePromise = (value) => isObjectOfType('Promise')(value);\nconst hasPromiseAPI = (value) => {\n var _a, _b;\n return is.function_((_a = value) === null || _a === void 0 ? void 0 : _a.then) &&\n is.function_((_b = value) === null || _b === void 0 ? void 0 : _b.catch);\n};\nis.promise = (value) => is.nativePromise(value) || hasPromiseAPI(value);\nis.generatorFunction = isObjectOfType('GeneratorFunction');\nis.asyncGeneratorFunction = (value) => getObjectType(value) === 'AsyncGeneratorFunction';\nis.asyncFunction = (value) => getObjectType(value) === 'AsyncFunction';\n// eslint-disable-next-line no-prototype-builtins, @typescript-eslint/ban-types\nis.boundFunction = (value) => is.function_(value) && !value.hasOwnProperty('prototype');\nis.regExp = isObjectOfType('RegExp');\nis.date = isObjectOfType('Date');\nis.error = isObjectOfType('Error');\nis.map = (value) => isObjectOfType('Map')(value);\nis.set = (value) => isObjectOfType('Set')(value);\nis.weakMap = (value) => isObjectOfType('WeakMap')(value);\nis.weakSet = (value) => isObjectOfType('WeakSet')(value);\nis.int8Array = isObjectOfType('Int8Array');\nis.uint8Array = isObjectOfType('Uint8Array');\nis.uint8ClampedArray = isObjectOfType('Uint8ClampedArray');\nis.int16Array = isObjectOfType('Int16Array');\nis.uint16Array = isObjectOfType('Uint16Array');\nis.int32Array = isObjectOfType('Int32Array');\nis.uint32Array = isObjectOfType('Uint32Array');\nis.float32Array = isObjectOfType('Float32Array');\nis.float64Array = isObjectOfType('Float64Array');\nis.bigInt64Array = isObjectOfType('BigInt64Array');\nis.bigUint64Array = isObjectOfType('BigUint64Array');\nis.arrayBuffer = isObjectOfType('ArrayBuffer');\nis.sharedArrayBuffer = isObjectOfType('SharedArrayBuffer');\nis.dataView = isObjectOfType('DataView');\nis.directInstanceOf = (instance, class_) => Object.getPrototypeOf(instance) === class_.prototype;\nis.urlInstance = (value) => isObjectOfType('URL')(value);\nis.urlString = (value) => {\n if (!is.string(value)) {\n return false;\n }\n try {\n new URL(value); // eslint-disable-line no-new\n return true;\n }\n catch (_a) {\n return false;\n }\n};\n// TODO: Use the `not` operator with a type guard here when it's available.\n// Example: `is.truthy = (value: unknown): value is (not false | not 0 | not '' | not undefined | not null) => Boolean(value);`\nis.truthy = (value) => Boolean(value);\n// Example: `is.falsy = (value: unknown): value is (not true | 0 | '' | undefined | null) => Boolean(value);`\nis.falsy = (value) => !value;\nis.nan = (value) => Number.isNaN(value);\nis.primitive = (value) => is.null_(value) || isPrimitiveTypeName(typeof value);\nis.integer = (value) => Number.isInteger(value);\nis.safeInteger = (value) => Number.isSafeInteger(value);\nis.plainObject = (value) => {\n // From: https://github.com/sindresorhus/is-plain-obj/blob/master/index.js\n if (toString.call(value) !== '[object Object]') {\n return false;\n }\n const prototype = Object.getPrototypeOf(value);\n return prototype === null || prototype === Object.getPrototypeOf({});\n};\nis.typedArray = (value) => isTypedArrayName(getObjectType(value));\nconst isValidLength = (value) => is.safeInteger(value) && value >= 0;\nis.arrayLike = (value) => !is.nullOrUndefined(value) && !is.function_(value) && isValidLength(value.length);\nis.inRange = (value, range) => {\n if (is.number(range)) {\n return value >= Math.min(0, range) && value <= Math.max(range, 0);\n }\n if (is.array(range) && range.length === 2) {\n return value >= Math.min(...range) && value <= Math.max(...range);\n }\n throw new TypeError(`Invalid range: ${JSON.stringify(range)}`);\n};\nconst NODE_TYPE_ELEMENT = 1;\nconst DOM_PROPERTIES_TO_CHECK = [\n 'innerHTML',\n 'ownerDocument',\n 'style',\n 'attributes',\n 'nodeValue'\n];\nis.domElement = (value) => {\n return is.object(value) &&\n value.nodeType === NODE_TYPE_ELEMENT &&\n is.string(value.nodeName) &&\n !is.plainObject(value) &&\n DOM_PROPERTIES_TO_CHECK.every(property => property in value);\n};\nis.observable = (value) => {\n var _a, _b, _c, _d;\n if (!value) {\n return false;\n }\n // eslint-disable-next-line no-use-extend-native/no-use-extend-native\n if (value === ((_b = (_a = value)[Symbol.observable]) === null || _b === void 0 ? void 0 : _b.call(_a))) {\n return true;\n }\n if (value === ((_d = (_c = value)['@@observable']) === null || _d === void 0 ? void 0 : _d.call(_c))) {\n return true;\n }\n return false;\n};\nis.nodeStream = (value) => is.object(value) && is.function_(value.pipe) && !is.observable(value);\nis.infinite = (value) => value === Infinity || value === -Infinity;\nconst isAbsoluteMod2 = (remainder) => (value) => is.integer(value) && Math.abs(value % 2) === remainder;\nis.evenInteger = isAbsoluteMod2(0);\nis.oddInteger = isAbsoluteMod2(1);\nis.emptyArray = (value) => is.array(value) && value.length === 0;\nis.nonEmptyArray = (value) => is.array(value) && value.length > 0;\nis.emptyString = (value) => is.string(value) && value.length === 0;\n// TODO: Use `not ''` when the `not` operator is available.\nis.nonEmptyString = (value) => is.string(value) && value.length > 0;\nconst isWhiteSpaceString = (value) => is.string(value) && !/\\S/.test(value);\nis.emptyStringOrWhitespace = (value) => is.emptyString(value) || isWhiteSpaceString(value);\nis.emptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length === 0;\n// TODO: Use `not` operator here to remove `Map` and `Set` from type guard:\n// - https://github.com/Microsoft/TypeScript/pull/29317\nis.nonEmptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length > 0;\nis.emptySet = (value) => is.set(value) && value.size === 0;\nis.nonEmptySet = (value) => is.set(value) && value.size > 0;\nis.emptyMap = (value) => is.map(value) && value.size === 0;\nis.nonEmptyMap = (value) => is.map(value) && value.size > 0;\nconst predicateOnArray = (method, predicate, values) => {\n if (!is.function_(predicate)) {\n throw new TypeError(`Invalid predicate: ${JSON.stringify(predicate)}`);\n }\n if (values.length === 0) {\n throw new TypeError('Invalid number of values');\n }\n return method.call(values, predicate);\n};\nis.any = (predicate, ...values) => {\n const predicates = is.array(predicate) ? predicate : [predicate];\n return predicates.some(singlePredicate => predicateOnArray(Array.prototype.some, singlePredicate, values));\n};\nis.all = (predicate, ...values) => predicateOnArray(Array.prototype.every, predicate, values);\nconst assertType = (condition, description, value) => {\n if (!condition) {\n throw new TypeError(`Expected value which is \\`${description}\\`, received value of type \\`${is(value)}\\`.`);\n }\n};\nexports.assert = {\n // Unknowns.\n undefined: (value) => assertType(is.undefined(value), 'undefined', value),\n string: (value) => assertType(is.string(value), 'string', value),\n number: (value) => assertType(is.number(value), 'number', value),\n bigint: (value) => assertType(is.bigint(value), 'bigint', value),\n // eslint-disable-next-line @typescript-eslint/ban-types\n function_: (value) => assertType(is.function_(value), 'Function', value),\n null_: (value) => assertType(is.null_(value), 'null', value),\n class_: (value) => assertType(is.class_(value), \"Class\" /* class_ */, value),\n boolean: (value) => assertType(is.boolean(value), 'boolean', value),\n symbol: (value) => assertType(is.symbol(value), 'symbol', value),\n numericString: (value) => assertType(is.numericString(value), \"string with a number\" /* numericString */, value),\n array: (value, assertion) => {\n const assert = assertType;\n assert(is.array(value), 'Array', value);\n if (assertion) {\n value.forEach(assertion);\n }\n },\n buffer: (value) => assertType(is.buffer(value), 'Buffer', value),\n nullOrUndefined: (value) => assertType(is.nullOrUndefined(value), \"null or undefined\" /* nullOrUndefined */, value),\n object: (value) => assertType(is.object(value), 'Object', value),\n iterable: (value) => assertType(is.iterable(value), \"Iterable\" /* iterable */, value),\n asyncIterable: (value) => assertType(is.asyncIterable(value), \"AsyncIterable\" /* asyncIterable */, value),\n generator: (value) => assertType(is.generator(value), 'Generator', value),\n asyncGenerator: (value) => assertType(is.asyncGenerator(value), 'AsyncGenerator', value),\n nativePromise: (value) => assertType(is.nativePromise(value), \"native Promise\" /* nativePromise */, value),\n promise: (value) => assertType(is.promise(value), 'Promise', value),\n generatorFunction: (value) => assertType(is.generatorFunction(value), 'GeneratorFunction', value),\n asyncGeneratorFunction: (value) => assertType(is.asyncGeneratorFunction(value), 'AsyncGeneratorFunction', value),\n // eslint-disable-next-line @typescript-eslint/ban-types\n asyncFunction: (value) => assertType(is.asyncFunction(value), 'AsyncFunction', value),\n // eslint-disable-next-line @typescript-eslint/ban-types\n boundFunction: (value) => assertType(is.boundFunction(value), 'Function', value),\n regExp: (value) => assertType(is.regExp(value), 'RegExp', value),\n date: (value) => assertType(is.date(value), 'Date', value),\n error: (value) => assertType(is.error(value), 'Error', value),\n map: (value) => assertType(is.map(value), 'Map', value),\n set: (value) => assertType(is.set(value), 'Set', value),\n weakMap: (value) => assertType(is.weakMap(value), 'WeakMap', value),\n weakSet: (value) => assertType(is.weakSet(value), 'WeakSet', value),\n int8Array: (value) => assertType(is.int8Array(value), 'Int8Array', value),\n uint8Array: (value) => assertType(is.uint8Array(value), 'Uint8Array', value),\n uint8ClampedArray: (value) => assertType(is.uint8ClampedArray(value), 'Uint8ClampedArray', value),\n int16Array: (value) => assertType(is.int16Array(value), 'Int16Array', value),\n uint16Array: (value) => assertType(is.uint16Array(value), 'Uint16Array', value),\n int32Array: (value) => assertType(is.int32Array(value), 'Int32Array', value),\n uint32Array: (value) => assertType(is.uint32Array(value), 'Uint32Array', value),\n float32Array: (value) => assertType(is.float32Array(value), 'Float32Array', value),\n float64Array: (value) => assertType(is.float64Array(value), 'Float64Array', value),\n bigInt64Array: (value) => assertType(is.bigInt64Array(value), 'BigInt64Array', value),\n bigUint64Array: (value) => assertType(is.bigUint64Array(value), 'BigUint64Array', value),\n arrayBuffer: (value) => assertType(is.arrayBuffer(value), 'ArrayBuffer', value),\n sharedArrayBuffer: (value) => assertType(is.sharedArrayBuffer(value), 'SharedArrayBuffer', value),\n dataView: (value) => assertType(is.dataView(value), 'DataView', value),\n urlInstance: (value) => assertType(is.urlInstance(value), 'URL', value),\n urlString: (value) => assertType(is.urlString(value), \"string with a URL\" /* urlString */, value),\n truthy: (value) => assertType(is.truthy(value), \"truthy\" /* truthy */, value),\n falsy: (value) => assertType(is.falsy(value), \"falsy\" /* falsy */, value),\n nan: (value) => assertType(is.nan(value), \"NaN\" /* nan */, value),\n primitive: (value) => assertType(is.primitive(value), \"primitive\" /* primitive */, value),\n integer: (value) => assertType(is.integer(value), \"integer\" /* integer */, value),\n safeInteger: (value) => assertType(is.safeInteger(value), \"integer\" /* safeInteger */, value),\n plainObject: (value) => assertType(is.plainObject(value), \"plain object\" /* plainObject */, value),\n typedArray: (value) => assertType(is.typedArray(value), \"TypedArray\" /* typedArray */, value),\n arrayLike: (value) => assertType(is.arrayLike(value), \"array-like\" /* arrayLike */, value),\n domElement: (value) => assertType(is.domElement(value), \"HTMLElement\" /* domElement */, value),\n observable: (value) => assertType(is.observable(value), 'Observable', value),\n nodeStream: (value) => assertType(is.nodeStream(value), \"Node.js Stream\" /* nodeStream */, value),\n infinite: (value) => assertType(is.infinite(value), \"infinite number\" /* infinite */, value),\n emptyArray: (value) => assertType(is.emptyArray(value), \"empty array\" /* emptyArray */, value),\n nonEmptyArray: (value) => assertType(is.nonEmptyArray(value), \"non-empty array\" /* nonEmptyArray */, value),\n emptyString: (value) => assertType(is.emptyString(value), \"empty string\" /* emptyString */, value),\n nonEmptyString: (value) => assertType(is.nonEmptyString(value), \"non-empty string\" /* nonEmptyString */, value),\n emptyStringOrWhitespace: (value) => assertType(is.emptyStringOrWhitespace(value), \"empty string or whitespace\" /* emptyStringOrWhitespace */, value),\n emptyObject: (value) => assertType(is.emptyObject(value), \"empty object\" /* emptyObject */, value),\n nonEmptyObject: (value) => assertType(is.nonEmptyObject(value), \"non-empty object\" /* nonEmptyObject */, value),\n emptySet: (value) => assertType(is.emptySet(value), \"empty set\" /* emptySet */, value),\n nonEmptySet: (value) => assertType(is.nonEmptySet(value), \"non-empty set\" /* nonEmptySet */, value),\n emptyMap: (value) => assertType(is.emptyMap(value), \"empty map\" /* emptyMap */, value),\n nonEmptyMap: (value) => assertType(is.nonEmptyMap(value), \"non-empty map\" /* nonEmptyMap */, value),\n // Numbers.\n evenInteger: (value) => assertType(is.evenInteger(value), \"even integer\" /* evenInteger */, value),\n oddInteger: (value) => assertType(is.oddInteger(value), \"odd integer\" /* oddInteger */, value),\n // Two arguments.\n directInstanceOf: (instance, class_) => assertType(is.directInstanceOf(instance, class_), \"T\" /* directInstanceOf */, instance),\n inRange: (value, range) => assertType(is.inRange(value, range), \"in range\" /* inRange */, value),\n // Variadic functions.\n any: (predicate, ...values) => assertType(is.any(predicate, ...values), \"predicate returns truthy for any value\" /* any */, values),\n all: (predicate, ...values) => assertType(is.all(predicate, ...values), \"predicate returns truthy for all values\" /* all */, values)\n};\n// Some few keywords are reserved, but we'll populate them for Node.js users\n// See https://github.com/Microsoft/TypeScript/issues/2536\nObject.defineProperties(is, {\n class: {\n value: is.class_\n },\n function: {\n value: is.function_\n },\n null: {\n value: is.null_\n }\n});\nObject.defineProperties(exports.assert, {\n class: {\n value: exports.assert.class_\n },\n function: {\n value: exports.assert.function_\n },\n null: {\n value: exports.assert.null_\n }\n});\nexports.default = is;\n// For CommonJS default export support\nmodule.exports = is;\nmodule.exports.default = is;\nmodule.exports.assert = exports.assert;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst defer_to_connect_1 = require(\"defer-to-connect\");\nconst nodejsMajorVersion = Number(process.versions.node.split('.')[0]);\nconst timer = (request) => {\n const timings = {\n start: Date.now(),\n socket: undefined,\n lookup: undefined,\n connect: undefined,\n secureConnect: undefined,\n upload: undefined,\n response: undefined,\n end: undefined,\n error: undefined,\n abort: undefined,\n phases: {\n wait: undefined,\n dns: undefined,\n tcp: undefined,\n tls: undefined,\n request: undefined,\n firstByte: undefined,\n download: undefined,\n total: undefined\n }\n };\n request.timings = timings;\n const handleError = (origin) => {\n const emit = origin.emit.bind(origin);\n origin.emit = (event, ...args) => {\n // Catches the `error` event\n if (event === 'error') {\n timings.error = Date.now();\n timings.phases.total = timings.error - timings.start;\n origin.emit = emit;\n }\n // Saves the original behavior\n return emit(event, ...args);\n };\n };\n handleError(request);\n request.prependOnceListener('abort', () => {\n timings.abort = Date.now();\n // Let the `end` response event be responsible for setting the total phase,\n // unless the Node.js major version is >= 13.\n if (!timings.response || nodejsMajorVersion >= 13) {\n timings.phases.total = Date.now() - timings.start;\n }\n });\n const onSocket = (socket) => {\n timings.socket = Date.now();\n timings.phases.wait = timings.socket - timings.start;\n const lookupListener = () => {\n timings.lookup = Date.now();\n timings.phases.dns = timings.lookup - timings.socket;\n };\n socket.prependOnceListener('lookup', lookupListener);\n defer_to_connect_1.default(socket, {\n connect: () => {\n timings.connect = Date.now();\n if (timings.lookup === undefined) {\n socket.removeListener('lookup', lookupListener);\n timings.lookup = timings.connect;\n timings.phases.dns = timings.lookup - timings.socket;\n }\n timings.phases.tcp = timings.connect - timings.lookup;\n // This callback is called before flushing any data,\n // so we don't need to set `timings.phases.request` here.\n },\n secureConnect: () => {\n timings.secureConnect = Date.now();\n timings.phases.tls = timings.secureConnect - timings.connect;\n }\n });\n };\n if (request.socket) {\n onSocket(request.socket);\n }\n else {\n request.prependOnceListener('socket', onSocket);\n }\n const onUpload = () => {\n var _a;\n timings.upload = Date.now();\n timings.phases.request = timings.upload - (_a = timings.secureConnect, (_a !== null && _a !== void 0 ? _a : timings.connect));\n };\n const writableFinished = () => {\n if (typeof request.writableFinished === 'boolean') {\n return request.writableFinished;\n }\n // Node.js doesn't have `request.writableFinished` property\n return request.finished && request.outputSize === 0 && (!request.socket || request.socket.writableLength === 0);\n };\n if (writableFinished()) {\n onUpload();\n }\n else {\n request.prependOnceListener('finish', onUpload);\n }\n request.prependOnceListener('response', (response) => {\n timings.response = Date.now();\n timings.phases.firstByte = timings.response - timings.upload;\n response.timings = timings;\n handleError(response);\n response.prependOnceListener('end', () => {\n timings.end = Date.now();\n timings.phases.download = timings.end - timings.response;\n timings.phases.total = timings.end - timings.start;\n });\n });\n return timings;\n};\nexports.default = timer;\n// For CommonJS default export support\nmodule.exports = timer;\nmodule.exports.default = timer;\n","var Utils = require(\"./util\");\r\nvar fs = Utils.FileSystem.require(),\r\n\tpth = require(\"path\");\r\n\r\nfs.existsSync = fs.existsSync || pth.existsSync;\r\n\r\nvar ZipEntry = require(\"./zipEntry\"),\r\n\tZipFile = require(\"./zipFile\");\r\n\r\nvar isWin = /^win/.test(process.platform);\r\n\r\nfunction canonical(p) {\r\n var safeSuffix = pth.normalize(p).replace(/^(\\.\\.(\\/|\\\\|$))+/, '');\r\n return pth.join('./', safeSuffix);\r\n}\r\n\r\nmodule.exports = function (/**String*/input) {\r\n\tvar _zip = undefined,\r\n\t\t_filename = \"\";\r\n\r\n\tif (input && typeof input === \"string\") { // load zip file\r\n\t\tif (fs.existsSync(input)) {\r\n\t\t\t_filename = input;\r\n\t\t\t_zip = new ZipFile(input, Utils.Constants.FILE);\r\n\t\t} else {\r\n\t\t\tthrow new Error(Utils.Errors.INVALID_FILENAME);\r\n\t\t}\r\n\t} else if (input && Buffer.isBuffer(input)) { // load buffer\r\n\t\t_zip = new ZipFile(input, Utils.Constants.BUFFER);\r\n\t} else { // create new zip file\r\n\t\t_zip = new ZipFile(null, Utils.Constants.NONE);\r\n\t}\r\n\r\n\tfunction sanitize(prefix, name) {\r\n\t\tprefix = pth.resolve(pth.normalize(prefix));\r\n\t\tvar parts = name.split('/');\r\n\t\tfor (var i = 0, l = parts.length; i < l; i++) {\r\n\t\t\tvar path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep)));\r\n\t\t\tif (path.indexOf(prefix) === 0) {\r\n\t\t\t\treturn path;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn pth.normalize(pth.join(prefix, pth.basename(name)));\r\n\t}\r\n\r\n\tfunction getEntry(/**Object*/entry) {\r\n\t\tif (entry && _zip) {\r\n\t\t\tvar item;\r\n\t\t\t// If entry was given as a file name\r\n\t\t\tif (typeof entry === \"string\")\r\n\t\t\t\titem = _zip.getEntry(entry);\r\n\t\t\t// if entry was given as a ZipEntry object\r\n\t\t\tif (typeof entry === \"object\" && typeof entry.entryName !== \"undefined\" && typeof entry.header !== \"undefined\")\r\n\t\t\t\titem = _zip.getEntry(entry.entryName);\r\n\r\n\t\t\tif (item) {\r\n\t\t\t\treturn item;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn null;\r\n\t}\r\n\r\n function fixPath(zipPath){\r\n // convert windows file separators\r\n zipPath = zipPath.split(\"\\\\\").join(\"/\");\r\n // add separator if it wasnt given\r\n if (zipPath.charAt(zipPath.length - 1) !== \"/\") {\r\n zipPath += \"/\";\r\n } \r\n return zipPath;\r\n }\r\n\r\n\treturn {\r\n\t\t/**\r\n\t\t * Extracts the given entry from the archive and returns the content as a Buffer object\r\n\t\t * @param entry ZipEntry object or String with the full path of the entry\r\n\t\t *\r\n\t\t * @return Buffer or Null in case of error\r\n\t\t */\r\n\t\treadFile: function (/**Object*/entry, /*String, Buffer*/pass) {\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\treturn item && item.getData(pass) || null;\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Asynchronous readFile\r\n\t\t * @param entry ZipEntry object or String with the full path of the entry\r\n\t\t * @param callback\r\n\t\t *\r\n\t\t * @return Buffer or Null in case of error\r\n\t\t */\r\n\t\treadFileAsync: function (/**Object*/entry, /**Function*/callback) {\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\tif (item) {\r\n\t\t\t\titem.getDataAsync(callback);\r\n\t\t\t} else {\r\n\t\t\t\tcallback(null, \"getEntry failed for:\" + entry)\r\n\t\t\t}\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Extracts the given entry from the archive and returns the content as plain text in the given encoding\r\n\t\t * @param entry ZipEntry object or String with the full path of the entry\r\n\t\t * @param encoding Optional. If no encoding is specified utf8 is used\r\n\t\t *\r\n\t\t * @return String\r\n\t\t */\r\n\t\treadAsText: function (/**Object*/entry, /**String=*/encoding) {\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\tif (item) {\r\n\t\t\t\tvar data = item.getData();\r\n\t\t\t\tif (data && data.length) {\r\n\t\t\t\t\treturn data.toString(encoding || \"utf8\");\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\treturn \"\";\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Asynchronous readAsText\r\n\t\t * @param entry ZipEntry object or String with the full path of the entry\r\n\t\t * @param callback\r\n\t\t * @param encoding Optional. If no encoding is specified utf8 is used\r\n\t\t *\r\n\t\t * @return String\r\n\t\t */\r\n\t\treadAsTextAsync: function (/**Object*/entry, /**Function*/callback, /**String=*/encoding) {\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\tif (item) {\r\n\t\t\t\titem.getDataAsync(function (data, err) {\r\n\t\t\t\t\tif (err) {\r\n\t\t\t\t\t\tcallback(data, err);\r\n\t\t\t\t\t\treturn;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tif (data && data.length) {\r\n\t\t\t\t\t\tcallback(data.toString(encoding || \"utf8\"));\r\n\t\t\t\t\t} else {\r\n\t\t\t\t\t\tcallback(\"\");\r\n\t\t\t\t\t}\r\n\t\t\t\t})\r\n\t\t\t} else {\r\n\t\t\t\tcallback(\"\");\r\n\t\t\t}\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory\r\n\t\t *\r\n\t\t * @param entry\r\n\t\t */\r\n\t\tdeleteFile: function (/**Object*/entry) { // @TODO: test deleteFile\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\tif (item) {\r\n\t\t\t\t_zip.deleteEntry(item.entryName);\r\n\t\t\t}\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Adds a comment to the zip. The zip must be rewritten after adding the comment.\r\n\t\t *\r\n\t\t * @param comment\r\n\t\t */\r\n\t\taddZipComment: function (/**String*/comment) { // @TODO: test addZipComment\r\n\t\t\t_zip.comment = comment;\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Returns the zip comment\r\n\t\t *\r\n\t\t * @return String\r\n\t\t */\r\n\t\tgetZipComment: function () {\r\n\t\t\treturn _zip.comment || '';\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Adds a comment to a specified zipEntry. The zip must be rewritten after adding the comment\r\n\t\t * The comment cannot exceed 65535 characters in length\r\n\t\t *\r\n\t\t * @param entry\r\n\t\t * @param comment\r\n\t\t */\r\n\t\taddZipEntryComment: function (/**Object*/entry, /**String*/comment) {\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\tif (item) {\r\n\t\t\t\titem.comment = comment;\r\n\t\t\t}\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Returns the comment of the specified entry\r\n\t\t *\r\n\t\t * @param entry\r\n\t\t * @return String\r\n\t\t */\r\n\t\tgetZipEntryComment: function (/**Object*/entry) {\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\tif (item) {\r\n\t\t\t\treturn item.comment || '';\r\n\t\t\t}\r\n\t\t\treturn ''\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Updates the content of an existing entry inside the archive. The zip must be rewritten after updating the content\r\n\t\t *\r\n\t\t * @param entry\r\n\t\t * @param content\r\n\t\t */\r\n\t\tupdateFile: function (/**Object*/entry, /**Buffer*/content) {\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\tif (item) {\r\n\t\t\t\titem.setData(content);\r\n\t\t\t}\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Adds a file from the disk to the archive\r\n\t\t *\r\n\t\t * @param localPath File to add to zip\r\n\t\t * @param zipPath Optional path inside the zip\r\n\t\t * @param zipName Optional name for the file\r\n\t\t */\r\n\t\taddLocalFile: function (/**String*/localPath, /**String=*/zipPath, /**String=*/zipName, /**String*/comment) {\r\n\t\t\tif (fs.existsSync(localPath)) {\r\n\t\t\t\t// fix ZipPath\r\n\t\t\t\tzipPath = (zipPath) ? fixPath(zipPath) : \"\";\r\n\r\n\t\t\t\t// p - local file name\r\n\t\t\t\tvar p = localPath.split(\"\\\\\").join(\"/\").split(\"/\").pop();\r\n\r\n\t\t\t\t// add file name into zippath\r\n\t\t\t\tzipPath += (zipName) ? zipName : p;\r\n\r\n\t\t\t\t// read file attributes \r\n\t\t\t\tconst _attr = fs.statSync(localPath);\r\n\r\n\t\t\t\t// add file into zip file\r\n\t\t\t\tthis.addFile(zipPath, fs.readFileSync(localPath), comment, _attr)\r\n\t\t\t} else {\r\n\t\t\t\tthrow new Error(Utils.Errors.FILE_NOT_FOUND.replace(\"%s\", localPath));\r\n\t\t\t}\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Adds a local directory and all its nested files and directories to the archive\r\n\t\t *\r\n\t\t * @param localPath\r\n\t\t * @param zipPath optional path inside zip\r\n\t\t * @param filter optional RegExp or Function if files match will\r\n\t\t * be included.\r\n\t\t */\r\n addLocalFolder: function (/**String*/localPath, /**String=*/zipPath, /**=RegExp|Function*/filter) {\r\n // Prepare filter\r\n if (filter instanceof RegExp) { // if filter is RegExp wrap it \r\n filter = (function (rx){\r\n return function (filename) {\r\n return rx.test(filename);\r\n }\r\n })(filter);\r\n } else if ('function' !== typeof filter) { // if filter is not function we will replace it\r\n filter = function () {\r\n return true;\r\n };\r\n }\r\n\r\n // fix ZipPath\r\n zipPath = (zipPath) ? fixPath(zipPath) : \"\";\r\n\r\n // normalize the path first\r\n localPath = pth.normalize(localPath);\r\n\r\n if (fs.existsSync(localPath)) {\r\n\r\n var items = Utils.findFiles(localPath),\r\n self = this;\r\n\r\n if (items.length) {\r\n items.forEach(function (filepath) {\r\n var p = pth.relative(localPath, filepath).split(\"\\\\\").join(\"/\"); //windows fix\r\n if (filter(p)) {\r\n if (filepath.charAt(filepath.length - 1) !== pth.sep) {\r\n self.addFile(zipPath + p, fs.readFileSync(filepath), \"\", fs.statSync(filepath));\r\n } else {\r\n self.addFile(zipPath + p + '/', Buffer.alloc(0), \"\", 0);\r\n }\r\n }\r\n });\r\n }\r\n } else {\r\n throw new Error(Utils.Errors.FILE_NOT_FOUND.replace(\"%s\", localPath));\r\n }\r\n },\r\n\r\n\t\t/**\r\n\t\t * Asynchronous addLocalFile\r\n\t\t * @param localPath\r\n\t\t * @param callback\r\n\t\t * @param zipPath optional path inside zip\r\n\t\t * @param filter optional RegExp or Function if files match will\r\n\t\t * be included.\r\n\t\t */\r\n\t\taddLocalFolderAsync: function (/*String*/localPath, /*Function*/callback, /*String*/zipPath, /*RegExp|Function*/filter) {\r\n\t\t\tif (filter === undefined) {\r\n\t\t\t\tfilter = function () {\r\n\t\t\t\t\treturn true;\r\n\t\t\t\t};\r\n\t\t\t} else if (filter instanceof RegExp) {\r\n\t\t\t\tfilter = function (filter) {\r\n\t\t\t\t\treturn function (filename) {\r\n\t\t\t\t\t\treturn filter.test(filename);\r\n\t\t\t\t\t}\r\n\t\t\t\t}(filter);\r\n\t\t\t}\r\n\r\n\t\t\tif (zipPath) {\r\n\t\t\t\tzipPath = zipPath.split(\"\\\\\").join(\"/\");\r\n\t\t\t\tif (zipPath.charAt(zipPath.length - 1) !== \"/\") {\r\n\t\t\t\t\tzipPath += \"/\";\r\n\t\t\t\t}\r\n\t\t\t} else {\r\n\t\t\t\tzipPath = \"\";\r\n\t\t\t}\r\n\t\t\t// normalize the path first\r\n\t\t\tlocalPath = pth.normalize(localPath);\r\n\t\t\tlocalPath = localPath.split(\"\\\\\").join(\"/\"); //windows fix\r\n\t\t\tif (localPath.charAt(localPath.length - 1) !== \"/\")\r\n\t\t\t\tlocalPath += \"/\";\r\n\r\n\t\t\tvar self = this;\r\n\t\t\tfs.open(localPath, 'r', function (err, fd) {\r\n\t\t\t\tif (err && err.code === 'ENOENT') {\r\n\t\t\t\t\tcallback(undefined, Utils.Errors.FILE_NOT_FOUND.replace(\"%s\", localPath));\r\n\t\t\t\t} else if (err) {\r\n\t\t\t\t\tcallback(undefined, err);\r\n\t\t\t\t} else {\r\n\t\t\t\t\tvar items = Utils.findFiles(localPath);\r\n\t\t\t\t\tvar i = -1;\r\n\r\n\t\t\t\t\tvar next = function () {\r\n\t\t\t\t\t\ti += 1;\r\n\t\t\t\t\t\tif (i < items.length) {\r\n\t\t\t\t\t\t\tvar p = items[i].split(\"\\\\\").join(\"/\").replace(new RegExp(localPath.replace(/(\\(|\\))/g, '\\\\$1'), 'i'), \"\"); //windows fix\r\n\t\t\t\t\t\t\tp = p.normalize('NFD').replace(/[\\u0300-\\u036f]/g, '').replace(/[^\\x20-\\x7E]/g, '') // accent fix\r\n\t\t\t\t\t\t\tif (filter(p)) {\r\n\t\t\t\t\t\t\t\tif (p.charAt(p.length - 1) !== \"/\") {\r\n\t\t\t\t\t\t\t\t\tfs.readFile(items[i], function (err, data) {\r\n\t\t\t\t\t\t\t\t\t\tif (err) {\r\n\t\t\t\t\t\t\t\t\t\t\tcallback(undefined, err);\r\n\t\t\t\t\t\t\t\t\t\t} else {\r\n\t\t\t\t\t\t\t\t\t\t\tself.addFile(zipPath + p, data, '', 0);\r\n\t\t\t\t\t\t\t\t\t\t\tnext();\r\n\t\t\t\t\t\t\t\t\t\t}\r\n\t\t\t\t\t\t\t\t\t})\r\n\t\t\t\t\t\t\t\t} else {\r\n\t\t\t\t\t\t\t\t\tself.addFile(zipPath + p, Buffer.alloc(0), \"\", 0);\r\n\t\t\t\t\t\t\t\t\tnext();\r\n\t\t\t\t\t\t\t\t}\r\n\t\t\t\t\t\t\t} else {\r\n\t\t\t\t\t\t\t\tnext();\r\n\t\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\t} else {\r\n\t\t\t\t\t\t\tcallback(true, undefined);\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tnext();\r\n\t\t\t\t}\r\n\t\t\t});\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Allows you to create a entry (file or directory) in the zip file.\r\n\t\t * If you want to create a directory the entryName must end in / and a null buffer should be provided.\r\n\t\t * Comment and attributes are optional\r\n\t\t *\r\n\t\t * @param entryName\r\n\t\t * @param content\r\n\t\t * @param comment\r\n\t\t * @param attr\r\n\t\t */\r\n\t\taddFile: function (/**String*/entryName, /**Buffer*/content, /**String*/comment, /**Number*/attr) {\r\n\t\t\t// prepare new entry\r\n\t\t\tvar entry = new ZipEntry();\r\n\t\t\tentry.entryName = entryName;\r\n\t\t\tentry.comment = comment || \"\";\r\n\r\n\t\t\tvar isStat = ('object' === typeof attr) && (attr instanceof fs.Stats);\r\n\r\n\t\t\t// last modification time from file stats\r\n\t\t\tif (isStat){\r\n\t\t\t\tentry.header.time = attr.mtime;\r\n\t\t\t}\r\n\r\n\t\t\t// Set file attribute\r\n\t\t\tvar fileattr = (entry.isDirectory) ? 0x10 : 0; // (MS-DOS directory flag)\r\n\r\n\t\t\t// extended attributes field for Unix\r\n\t\t\tif('win32' !== process.platform){\r\n\t\t\t\t// set file type either S_IFDIR / S_IFREG\r\n\t\t\t\tvar unix = (entry.isDirectory) ? 0x4000 : 0x8000;\r\n\r\n\t\t\t\tif (isStat) { \t\t\t\t\t\t\t\t\t\t// File attributes from file stats\r\n\t\t\t\t\tunix |= (0xfff & attr.mode) \r\n\t\t\t\t}else if ('number' === typeof attr){ \t\t\t\t// attr from given attr values\r\n\t\t\t\t\tunix |= (0xfff & attr);\r\n\t\t\t\t}else{\t\t\t\t\t\t\t\t\t\t\t\t// Default values: \r\n\t\t\t\t\tunix |= (entry.isDirectory) ? 0o755 : 0o644; \t// permissions (drwxr-xr-x) or (-r-wr--r--)\r\n\t\t\t\t}\r\n\r\n\t\t\t\tfileattr = (fileattr | (unix << 16)) >>> 0;\t\t\t// add attributes\r\n\t\t\t}\r\n\r\n\t\t\tentry.attr = fileattr;\r\n\r\n\t\t\tentry.setData(content);\r\n\t\t\t_zip.setEntry(entry);\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Returns an array of ZipEntry objects representing the files and folders inside the archive\r\n\t\t *\r\n\t\t * @return Array\r\n\t\t */\r\n\t\tgetEntries: function () {\r\n\t\t\tif (_zip) {\r\n\t\t\t\treturn _zip.entries;\r\n\t\t\t} else {\r\n\t\t\t\treturn [];\r\n\t\t\t}\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Returns a ZipEntry object representing the file or folder specified by ``name``.\r\n\t\t *\r\n\t\t * @param name\r\n\t\t * @return ZipEntry\r\n\t\t */\r\n\t\tgetEntry: function (/**String*/name) {\r\n\t\t\treturn getEntry(name);\r\n\t\t},\r\n\r\n\t\tgetEntryCount: function() {\r\n\t\t\treturn _zip.getEntryCount();\r\n\t\t},\r\n\r\n\t\tforEach: function(callback) {\r\n\t\t\treturn _zip.forEach(callback);\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Extracts the given entry to the given targetPath\r\n\t\t * If the entry is a directory inside the archive, the entire directory and it's subdirectories will be extracted\r\n\t\t *\r\n\t\t * @param entry ZipEntry object or String with the full path of the entry\r\n\t\t * @param targetPath Target folder where to write the file\r\n\t\t * @param maintainEntryPath If maintainEntryPath is true and the entry is inside a folder, the entry folder\r\n\t\t * will be created in targetPath as well. Default is TRUE\r\n\t\t * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.\r\n\t\t * Default is FALSE\r\n * @param outFileName String If set will override the filename of the extracted file (Only works if the entry is a file)\r\n\t\t *\r\n\t\t * @return Boolean\r\n\t\t */\r\n\t\textractEntryTo: function (/**Object*/entry, /**String*/targetPath, /**Boolean*/maintainEntryPath, /**Boolean*/overwrite, /**String**/outFileName) {\r\n\t\t\toverwrite = overwrite || false;\r\n\t\t\tmaintainEntryPath = typeof maintainEntryPath === \"undefined\" ? true : maintainEntryPath;\r\n\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\tif (!item) {\r\n\t\t\t\tthrow new Error(Utils.Errors.NO_ENTRY);\r\n\t\t\t}\r\n\r\n\t\t\tvar entryName = canonical(item.entryName);\r\n\r\n\t\t\tvar target = sanitize(targetPath,outFileName && !item.isDirectory ? outFileName : (maintainEntryPath ? entryName : pth.basename(entryName)));\r\n\r\n\t\t\tif (item.isDirectory) {\r\n\t\t\t\ttarget = pth.resolve(target, \"..\");\r\n\t\t\t\tvar children = _zip.getEntryChildren(item);\r\n\t\t\t\tchildren.forEach(function (child) {\r\n\t\t\t\t\tif (child.isDirectory) return;\r\n\t\t\t\t\tvar content = child.getData();\r\n\t\t\t\t\tif (!content) {\r\n\t\t\t\t\t\tthrow new Error(Utils.Errors.CANT_EXTRACT_FILE);\r\n\t\t\t\t\t}\r\n\t\t\t\t\tvar name = canonical(child.entryName)\r\n\t\t\t\t\tvar childName = sanitize(targetPath, maintainEntryPath ? name : pth.basename(name));\r\n\r\n\t\t\t\t\tUtils.writeFileTo(childName, content, overwrite);\r\n\t\t\t\t});\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\r\n\t\t\tvar content = item.getData();\r\n\t\t\tif (!content) throw new Error(Utils.Errors.CANT_EXTRACT_FILE);\r\n\r\n\t\t\tif (fs.existsSync(target) && !overwrite) {\r\n\t\t\t\tthrow new Error(Utils.Errors.CANT_OVERRIDE);\r\n\t\t\t}\r\n\t\t\tUtils.writeFileTo(target, content, overwrite);\r\n\r\n\t\t\treturn true;\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Test the archive\r\n\t\t *\r\n\t\t */\r\n\t\ttest: function (pass) {\r\n\t\t\tif (!_zip) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\r\n\t\t\tfor (var entry in _zip.entries) {\r\n\t\t\t\ttry {\r\n\t\t\t\t\tif (entry.isDirectory) {\r\n\t\t\t\t\t\tcontinue;\r\n\t\t\t\t\t}\r\n\t\t\t\t\tvar content = _zip.entries[entry].getData(pass);\r\n\t\t\t\t\tif (!content) {\r\n\t\t\t\t\t\treturn false;\r\n\t\t\t\t\t}\r\n\t\t\t\t} catch (err) {\r\n\t\t\t\t\treturn false;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\treturn true;\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Extracts the entire archive to the given location\r\n\t\t *\r\n\t\t * @param targetPath Target location\r\n\t\t * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.\r\n\t\t * Default is FALSE\r\n\t\t */\r\n\t\textractAllTo: function (/**String*/targetPath, /**Boolean*/overwrite, /*String, Buffer*/pass) {\r\n\t\t\toverwrite = overwrite || false;\r\n\t\t\tif (!_zip) {\r\n\t\t\t\tthrow new Error(Utils.Errors.NO_ZIP);\r\n\t\t\t}\r\n\t\t\t_zip.entries.forEach(function (entry) {\r\n\t\t\t\tvar entryName = sanitize(targetPath, canonical(entry.entryName.toString()));\r\n\t\t\t\tif (entry.isDirectory) {\r\n\t\t\t\t\tUtils.makeDir(entryName);\r\n\t\t\t\t\treturn;\r\n\t\t\t\t}\r\n\t\t\t\tvar content = entry.getData(pass);\r\n\t\t\t\tif (!content) {\r\n\t\t\t\t\tthrow new Error(Utils.Errors.CANT_EXTRACT_FILE);\r\n\t\t\t\t}\r\n\t\t\t\tUtils.writeFileTo(entryName, content, overwrite);\r\n\t\t\t\ttry {\r\n\t\t\t\t\tfs.utimesSync(entryName, entry.header.time, entry.header.time)\r\n\t\t\t\t} catch (err) {\r\n\t\t\t\t\tthrow new Error(Utils.Errors.CANT_EXTRACT_FILE);\r\n\t\t\t\t}\r\n\t\t\t})\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Asynchronous extractAllTo\r\n\t\t *\r\n\t\t * @param targetPath Target location\r\n\t\t * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.\r\n\t\t * Default is FALSE\r\n\t\t * @param callback\r\n\t\t */\r\n\t\textractAllToAsync: function (/**String*/targetPath, /**Boolean*/overwrite, /**Function*/callback) {\r\n\t\t\tif (!callback) {\r\n\t\t\t\tcallback = function() {}\r\n\t\t\t}\r\n\t\t\toverwrite = overwrite || false;\r\n\t\t\tif (!_zip) {\r\n\t\t\t\tcallback(new Error(Utils.Errors.NO_ZIP));\r\n\t\t\t\treturn;\r\n\t\t\t}\r\n\r\n\t\t\tvar entries = _zip.entries;\r\n\t\t\tvar i = entries.length;\r\n\t\t\tentries.forEach(function (entry) {\r\n\t\t\t\tif (i <= 0) return; // Had an error already\r\n\r\n\t\t\t\tvar entryName = pth.normalize(canonical(entry.entryName.toString()));\r\n\r\n\t\t\t\tif (entry.isDirectory) {\r\n\t\t\t\t\tUtils.makeDir(sanitize(targetPath, entryName));\r\n\t\t\t\t\tif (--i === 0)\r\n\t\t\t\t\t\tcallback(undefined);\r\n\t\t\t\t\treturn;\r\n\t\t\t\t}\r\n\t\t\t\tentry.getDataAsync(function (content, err) {\r\n\t\t\t\t\tif (i <= 0) return;\r\n\t\t\t\t\tif (err) {\r\n\t\t\t\t\t\tcallback(new Error(err));\r\n\t\t\t\t\t\treturn;\r\n\t\t\t\t\t}\r\n\t\t\t\t\tif (!content) {\r\n\t\t\t\t\t\ti = 0;\r\n\t\t\t\t\t\tcallback(new Error(Utils.Errors.CANT_EXTRACT_FILE));\r\n\t\t\t\t\t\treturn;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tUtils.writeFileToAsync(sanitize(targetPath, entryName), content, overwrite, function (succ) {\r\n\t\t\t\t\t\ttry {\r\n\t\t\t\t\t\t\tfs.utimesSync(pth.resolve(targetPath, entryName), entry.header.time, entry.header.time);\r\n\t\t\t\t\t\t} catch (err) {\r\n\t\t\t\t\t\t\tcallback(new Error('Unable to set utimes'));\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t\tif (i <= 0) return;\r\n\t\t\t\t\t\tif (!succ) {\r\n\t\t\t\t\t\t\ti = 0;\r\n\t\t\t\t\t\t\tcallback(new Error('Unable to write'));\r\n\t\t\t\t\t\t\treturn;\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t\tif (--i === 0)\r\n\t\t\t\t\t\t\tcallback(undefined);\r\n\t\t\t\t\t});\r\n\t\t\t\t});\r\n\t\t\t})\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Writes the newly created zip file to disk at the specified location or if a zip was opened and no ``targetFileName`` is provided, it will overwrite the opened zip\r\n\t\t *\r\n\t\t * @param targetFileName\r\n\t\t * @param callback\r\n\t\t */\r\n\t\twriteZip: function (/**String*/targetFileName, /**Function*/callback) {\r\n\t\t\tif (arguments.length === 1) {\r\n\t\t\t\tif (typeof targetFileName === \"function\") {\r\n\t\t\t\t\tcallback = targetFileName;\r\n\t\t\t\t\ttargetFileName = \"\";\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tif (!targetFileName && _filename) {\r\n\t\t\t\ttargetFileName = _filename;\r\n\t\t\t}\r\n\t\t\tif (!targetFileName) return;\r\n\r\n\t\t\tvar zipData = _zip.compressToBuffer();\r\n\t\t\tif (zipData) {\r\n\t\t\t\tvar ok = Utils.writeFileTo(targetFileName, zipData, true);\r\n\t\t\t\tif (typeof callback === 'function') callback(!ok ? new Error(\"failed\") : null, \"\");\r\n\t\t\t}\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Returns the content of the entire zip file as a Buffer object\r\n\t\t *\r\n\t\t * @return Buffer\r\n\t\t */\r\n\t\ttoBuffer: function (/**Function=*/onSuccess, /**Function=*/onFail, /**Function=*/onItemStart, /**Function=*/onItemEnd) {\r\n\t\t\tthis.valueOf = 2;\r\n\t\t\tif (typeof onSuccess === \"function\") {\r\n\t\t\t\t_zip.toAsyncBuffer(onSuccess, onFail, onItemStart, onItemEnd);\r\n\t\t\t\treturn null;\r\n\t\t\t}\r\n\t\t\treturn _zip.compressToBuffer()\r\n\t\t}\r\n\t}\r\n};\r\n","var Utils = require(\"../util\"),\r\n Constants = Utils.Constants;\r\n\r\n/* The central directory file header */\r\nmodule.exports = function () {\r\n var _verMade = 0x14,\r\n _version = 0x0A,\r\n _flags = 0,\r\n _method = 0,\r\n _time = 0,\r\n _crc = 0,\r\n _compressedSize = 0,\r\n _size = 0,\r\n _fnameLen = 0,\r\n _extraLen = 0,\r\n\r\n _comLen = 0,\r\n _diskStart = 0,\r\n _inattr = 0,\r\n _attr = 0,\r\n _offset = 0;\r\n\r\n switch(process.platform){\r\n case 'win32':\r\n _verMade |= 0x0A00;\r\n case 'darwin':\r\n _verMade |= 0x1300;\r\n default:\r\n _verMade |= 0x0300;\r\n }\r\n\r\n var _dataHeader = {};\r\n\r\n function setTime(val) {\r\n val = new Date(val);\r\n _time = (val.getFullYear() - 1980 & 0x7f) << 25 // b09-16 years from 1980\r\n | (val.getMonth() + 1) << 21 // b05-08 month\r\n | val.getDate() << 16 // b00-04 hour\r\n\r\n // 2 bytes time\r\n | val.getHours() << 11 // b11-15 hour\r\n | val.getMinutes() << 5 // b05-10 minute\r\n | val.getSeconds() >> 1; // b00-04 seconds divided by 2\r\n }\r\n\r\n setTime(+new Date());\r\n\r\n return {\r\n get made () { return _verMade; },\r\n set made (val) { _verMade = val; },\r\n\r\n get version () { return _version; },\r\n set version (val) { _version = val },\r\n\r\n get flags () { return _flags },\r\n set flags (val) { _flags = val; },\r\n\r\n get method () { return _method; },\r\n set method (val) {\r\n switch (val){\r\n case Constants.STORED:\r\n this.version = 10;\r\n case Constants.DEFLATED:\r\n default:\r\n this.version = 20;\r\n }\r\n _method = val;\r\n },\r\n\r\n get time () { return new Date(\r\n ((_time >> 25) & 0x7f) + 1980,\r\n ((_time >> 21) & 0x0f) - 1,\r\n (_time >> 16) & 0x1f,\r\n (_time >> 11) & 0x1f,\r\n (_time >> 5) & 0x3f,\r\n (_time & 0x1f) << 1\r\n );\r\n },\r\n set time (val) {\r\n setTime(val);\r\n },\r\n\r\n get crc () { return _crc; },\r\n set crc (val) { _crc = val; },\r\n\r\n get compressedSize () { return _compressedSize; },\r\n set compressedSize (val) { _compressedSize = val; },\r\n\r\n get size () { return _size; },\r\n set size (val) { _size = val; },\r\n\r\n get fileNameLength () { return _fnameLen; },\r\n set fileNameLength (val) { _fnameLen = val; },\r\n\r\n get extraLength () { return _extraLen },\r\n set extraLength (val) { _extraLen = val; },\r\n\r\n get commentLength () { return _comLen },\r\n set commentLength (val) { _comLen = val },\r\n\r\n get diskNumStart () { return _diskStart },\r\n set diskNumStart (val) { _diskStart = val },\r\n\r\n get inAttr () { return _inattr },\r\n set inAttr (val) { _inattr = val },\r\n\r\n get attr () { return _attr },\r\n set attr (val) { _attr = val },\r\n\r\n get offset () { return _offset },\r\n set offset (val) { _offset = val },\r\n\r\n get encripted () { return (_flags & 1) === 1 },\r\n\r\n get entryHeaderSize () {\r\n return Constants.CENHDR + _fnameLen + _extraLen + _comLen;\r\n },\r\n\r\n get realDataOffset () {\r\n return _offset + Constants.LOCHDR + _dataHeader.fnameLen + _dataHeader.extraLen;\r\n },\r\n\r\n get dataHeader () {\r\n return _dataHeader;\r\n },\r\n\r\n loadDataHeaderFromBinary : function(/*Buffer*/input) {\r\n var data = input.slice(_offset, _offset + Constants.LOCHDR);\r\n // 30 bytes and should start with \"PK\\003\\004\"\r\n if (data.readUInt32LE(0) !== Constants.LOCSIG) {\r\n throw new Error(Utils.Errors.INVALID_LOC);\r\n }\r\n _dataHeader = {\r\n // version needed to extract\r\n version : data.readUInt16LE(Constants.LOCVER),\r\n // general purpose bit flag\r\n flags : data.readUInt16LE(Constants.LOCFLG),\r\n // compression method\r\n method : data.readUInt16LE(Constants.LOCHOW),\r\n // modification time (2 bytes time, 2 bytes date)\r\n time : data.readUInt32LE(Constants.LOCTIM),\r\n // uncompressed file crc-32 value\r\n crc : data.readUInt32LE(Constants.LOCCRC),\r\n // compressed size\r\n compressedSize : data.readUInt32LE(Constants.LOCSIZ),\r\n // uncompressed size\r\n size : data.readUInt32LE(Constants.LOCLEN),\r\n // filename length\r\n fnameLen : data.readUInt16LE(Constants.LOCNAM),\r\n // extra field length\r\n extraLen : data.readUInt16LE(Constants.LOCEXT)\r\n }\r\n },\r\n\r\n loadFromBinary : function(/*Buffer*/data) {\r\n // data should be 46 bytes and start with \"PK 01 02\"\r\n if (data.length !== Constants.CENHDR || data.readUInt32LE(0) !== Constants.CENSIG) {\r\n throw new Error(Utils.Errors.INVALID_CEN);\r\n }\r\n // version made by\r\n _verMade = data.readUInt16LE(Constants.CENVEM);\r\n // version needed to extract\r\n _version = data.readUInt16LE(Constants.CENVER);\r\n // encrypt, decrypt flags\r\n _flags = data.readUInt16LE(Constants.CENFLG);\r\n // compression method\r\n _method = data.readUInt16LE(Constants.CENHOW);\r\n // modification time (2 bytes time, 2 bytes date)\r\n _time = data.readUInt32LE(Constants.CENTIM);\r\n // uncompressed file crc-32 value\r\n _crc = data.readUInt32LE(Constants.CENCRC);\r\n // compressed size\r\n _compressedSize = data.readUInt32LE(Constants.CENSIZ);\r\n // uncompressed size\r\n _size = data.readUInt32LE(Constants.CENLEN);\r\n // filename length\r\n _fnameLen = data.readUInt16LE(Constants.CENNAM);\r\n // extra field length\r\n _extraLen = data.readUInt16LE(Constants.CENEXT);\r\n // file comment length\r\n _comLen = data.readUInt16LE(Constants.CENCOM);\r\n // volume number start\r\n _diskStart = data.readUInt16LE(Constants.CENDSK);\r\n // internal file attributes\r\n _inattr = data.readUInt16LE(Constants.CENATT);\r\n // external file attributes\r\n _attr = data.readUInt32LE(Constants.CENATX);\r\n // LOC header offset\r\n _offset = data.readUInt32LE(Constants.CENOFF);\r\n },\r\n\r\n dataHeaderToBinary : function() {\r\n // LOC header size (30 bytes)\r\n var data = Buffer.alloc(Constants.LOCHDR);\r\n // \"PK\\003\\004\"\r\n data.writeUInt32LE(Constants.LOCSIG, 0);\r\n // version needed to extract\r\n data.writeUInt16LE(_version, Constants.LOCVER);\r\n // general purpose bit flag\r\n data.writeUInt16LE(_flags, Constants.LOCFLG);\r\n // compression method\r\n data.writeUInt16LE(_method, Constants.LOCHOW);\r\n // modification time (2 bytes time, 2 bytes date)\r\n data.writeUInt32LE(_time, Constants.LOCTIM);\r\n // uncompressed file crc-32 value\r\n data.writeUInt32LE(_crc, Constants.LOCCRC);\r\n // compressed size\r\n data.writeUInt32LE(_compressedSize, Constants.LOCSIZ);\r\n // uncompressed size\r\n data.writeUInt32LE(_size, Constants.LOCLEN);\r\n // filename length\r\n data.writeUInt16LE(_fnameLen, Constants.LOCNAM);\r\n // extra field length\r\n data.writeUInt16LE(_extraLen, Constants.LOCEXT);\r\n return data;\r\n },\r\n\r\n entryHeaderToBinary : function() {\r\n // CEN header size (46 bytes)\r\n var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen);\r\n // \"PK\\001\\002\"\r\n data.writeUInt32LE(Constants.CENSIG, 0);\r\n // version made by\r\n data.writeUInt16LE(_verMade, Constants.CENVEM);\r\n // version needed to extract\r\n data.writeUInt16LE(_version, Constants.CENVER);\r\n // encrypt, decrypt flags\r\n data.writeUInt16LE(_flags, Constants.CENFLG);\r\n // compression method\r\n data.writeUInt16LE(_method, Constants.CENHOW);\r\n // modification time (2 bytes time, 2 bytes date)\r\n data.writeUInt32LE(_time, Constants.CENTIM);\r\n // uncompressed file crc-32 value\r\n data.writeUInt32LE(_crc, Constants.CENCRC);\r\n // compressed size\r\n data.writeUInt32LE(_compressedSize, Constants.CENSIZ);\r\n // uncompressed size\r\n data.writeUInt32LE(_size, Constants.CENLEN);\r\n // filename length\r\n data.writeUInt16LE(_fnameLen, Constants.CENNAM);\r\n // extra field length\r\n data.writeUInt16LE(_extraLen, Constants.CENEXT);\r\n // file comment length\r\n data.writeUInt16LE(_comLen, Constants.CENCOM);\r\n // volume number start\r\n data.writeUInt16LE(_diskStart, Constants.CENDSK);\r\n // internal file attributes\r\n data.writeUInt16LE(_inattr, Constants.CENATT);\r\n // external file attributes\r\n data.writeUInt32LE(_attr, Constants.CENATX);\r\n // LOC header offset\r\n data.writeUInt32LE(_offset, Constants.CENOFF);\r\n // fill all with\r\n data.fill(0x00, Constants.CENHDR);\r\n return data;\r\n },\r\n\r\n toString : function() {\r\n return '{\\n' +\r\n '\\t\"made\" : ' + _verMade + \",\\n\" +\r\n '\\t\"version\" : ' + _version + \",\\n\" +\r\n '\\t\"flags\" : ' + _flags + \",\\n\" +\r\n '\\t\"method\" : ' + Utils.methodToString(_method) + \",\\n\" +\r\n '\\t\"time\" : ' + this.time + \",\\n\" +\r\n '\\t\"crc\" : 0x' + _crc.toString(16).toUpperCase() + \",\\n\" +\r\n '\\t\"compressedSize\" : ' + _compressedSize + \" bytes,\\n\" +\r\n '\\t\"size\" : ' + _size + \" bytes,\\n\" +\r\n '\\t\"fileNameLength\" : ' + _fnameLen + \",\\n\" +\r\n '\\t\"extraLength\" : ' + _extraLen + \" bytes,\\n\" +\r\n '\\t\"commentLength\" : ' + _comLen + \" bytes,\\n\" +\r\n '\\t\"diskNumStart\" : ' + _diskStart + \",\\n\" +\r\n '\\t\"inAttr\" : ' + _inattr + \",\\n\" +\r\n '\\t\"attr\" : ' + _attr + \",\\n\" +\r\n '\\t\"offset\" : ' + _offset + \",\\n\" +\r\n '\\t\"entryHeaderSize\" : ' + (Constants.CENHDR + _fnameLen + _extraLen + _comLen) + \" bytes\\n\" +\r\n '}';\r\n }\r\n }\r\n};\r\n","exports.EntryHeader = require(\"./entryHeader\");\r\nexports.MainHeader = require(\"./mainHeader\");\r\n","var Utils = require(\"../util\"),\r\n Constants = Utils.Constants;\r\n\r\n/* The entries in the end of central directory */\r\nmodule.exports = function () {\r\n var _volumeEntries = 0,\r\n _totalEntries = 0,\r\n _size = 0,\r\n _offset = 0,\r\n _commentLength = 0;\r\n\r\n return {\r\n get diskEntries () { return _volumeEntries },\r\n set diskEntries (/*Number*/val) { _volumeEntries = _totalEntries = val; },\r\n\r\n get totalEntries () { return _totalEntries },\r\n set totalEntries (/*Number*/val) { _totalEntries = _volumeEntries = val; },\r\n\r\n get size () { return _size },\r\n set size (/*Number*/val) { _size = val; },\r\n\r\n get offset () { return _offset },\r\n set offset (/*Number*/val) { _offset = val; },\r\n\r\n get commentLength () { return _commentLength },\r\n set commentLength (/*Number*/val) { _commentLength = val; },\r\n\r\n get mainHeaderSize () {\r\n return Constants.ENDHDR + _commentLength;\r\n },\r\n\r\n loadFromBinary : function(/*Buffer*/data) {\r\n // data should be 22 bytes and start with \"PK 05 06\"\r\n // or be 56+ bytes and start with \"PK 06 06\" for Zip64\r\n if ((data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) &&\r\n (data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG)) {\r\n\r\n throw new Error(Utils.Errors.INVALID_END);\r\n }\r\n\r\n if (data.readUInt32LE(0) === Constants.ENDSIG) {\r\n // number of entries on this volume\r\n _volumeEntries = data.readUInt16LE(Constants.ENDSUB);\r\n // total number of entries\r\n _totalEntries = data.readUInt16LE(Constants.ENDTOT);\r\n // central directory size in bytes\r\n _size = data.readUInt32LE(Constants.ENDSIZ);\r\n // offset of first CEN header\r\n _offset = data.readUInt32LE(Constants.ENDOFF);\r\n // zip file comment length\r\n _commentLength = data.readUInt16LE(Constants.ENDCOM);\r\n } else {\r\n // number of entries on this volume\r\n _volumeEntries = Utils.readBigUInt64LE(data, Constants.ZIP64SUB);\r\n // total number of entries\r\n _totalEntries = Utils.readBigUInt64LE(data, Constants.ZIP64TOT);\r\n // central directory size in bytes\r\n _size = Utils.readBigUInt64LE(data, Constants.ZIP64SIZ);\r\n // offset of first CEN header\r\n _offset = Utils.readBigUInt64LE(data, Constants.ZIP64OFF);\r\n\r\n _commentLength = 0;\r\n }\r\n\r\n },\r\n\r\n toBinary : function() {\r\n var b = Buffer.alloc(Constants.ENDHDR + _commentLength);\r\n // \"PK 05 06\" signature\r\n b.writeUInt32LE(Constants.ENDSIG, 0);\r\n b.writeUInt32LE(0, 4);\r\n // number of entries on this volume\r\n b.writeUInt16LE(_volumeEntries, Constants.ENDSUB);\r\n // total number of entries\r\n b.writeUInt16LE(_totalEntries, Constants.ENDTOT);\r\n // central directory size in bytes\r\n b.writeUInt32LE(_size, Constants.ENDSIZ);\r\n // offset of first CEN header\r\n b.writeUInt32LE(_offset, Constants.ENDOFF);\r\n // zip file comment length\r\n b.writeUInt16LE(_commentLength, Constants.ENDCOM);\r\n // fill comment memory with spaces so no garbage is left there\r\n b.fill(\" \", Constants.ENDHDR);\r\n\r\n return b;\r\n },\r\n\r\n toString : function() {\r\n return '{\\n' +\r\n '\\t\"diskEntries\" : ' + _volumeEntries + \",\\n\" +\r\n '\\t\"totalEntries\" : ' + _totalEntries + \",\\n\" +\r\n '\\t\"size\" : ' + _size + \" bytes,\\n\" +\r\n '\\t\"offset\" : 0x' + _offset.toString(16).toUpperCase() + \",\\n\" +\r\n '\\t\"commentLength\" : 0x' + _commentLength + \"\\n\" +\r\n '}';\r\n }\r\n }\r\n};","module.exports = function (/*Buffer*/inbuf) {\r\n\r\n var zlib = require(\"zlib\");\r\n \r\n var opts = {chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024};\r\n \r\n return {\r\n deflate: function () {\r\n return zlib.deflateRawSync(inbuf, opts);\r\n },\r\n\r\n deflateAsync: function (/*Function*/callback) {\r\n var tmp = zlib.createDeflateRaw(opts), parts = [], total = 0;\r\n tmp.on('data', function (data) {\r\n parts.push(data);\r\n total += data.length;\r\n });\r\n tmp.on('end', function () {\r\n var buf = Buffer.alloc(total), written = 0;\r\n buf.fill(0);\r\n for (var i = 0; i < parts.length; i++) {\r\n var part = parts[i];\r\n part.copy(buf, written);\r\n written += part.length;\r\n }\r\n callback && callback(buf);\r\n });\r\n tmp.end(inbuf);\r\n }\r\n }\r\n};\r\n","exports.Deflater = require(\"./deflater\");\r\nexports.Inflater = require(\"./inflater\");\r\nexports.ZipCrypto = require(\"./zipcrypto\");","module.exports = function (/*Buffer*/inbuf) {\r\n\r\n var zlib = require(\"zlib\");\r\n\r\n return {\r\n inflate: function () {\r\n return zlib.inflateRawSync(inbuf);\r\n },\r\n\r\n inflateAsync: function (/*Function*/callback) {\r\n var tmp = zlib.createInflateRaw(), parts = [], total = 0;\r\n tmp.on('data', function (data) {\r\n parts.push(data);\r\n total += data.length;\r\n });\r\n tmp.on('end', function () {\r\n var buf = Buffer.alloc(total), written = 0;\r\n buf.fill(0);\r\n for (var i = 0; i < parts.length; i++) {\r\n var part = parts[i];\r\n part.copy(buf, written);\r\n written += part.length;\r\n }\r\n callback && callback(buf);\r\n });\r\n tmp.end(inbuf);\r\n }\r\n }\r\n};\r\n","// generate CRC32 lookup table\r\nconst crctable = (new Uint32Array(256)).map((t,crc)=>{\r\n for(let j=0;j<8;j++){\r\n if (0 !== (crc & 1)){\r\n crc = (crc >>> 1) ^ 0xEDB88320\r\n }else{\r\n crc >>>= 1 \r\n }\r\n }\r\n return crc>>>0;\r\n});\r\n\r\nfunction make_decrypter(/*Buffer*/pwd){\r\n // C-style uInt32 Multiply\r\n const uMul = (a,b) => Math.imul(a, b) >>> 0;\r\n // Initialize keys with default values\r\n const keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]);\r\n // crc32 byte update \r\n const crc32update = (pCrc32, bval) => {\r\n return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8);\r\n }\r\n // update keys with byteValues\r\n const updateKeys = (byteValue) => {\r\n keys[0] = crc32update(keys[0], byteValue);\r\n keys[1] += keys[0] & 0xff; \r\n keys[1] = uMul(keys[1], 134775813) + 1;\r\n keys[2] = crc32update(keys[2], keys[1] >>> 24);\r\n }\r\n\r\n // 1. Stage initialize key\r\n const pass = (Buffer.isBuffer(pwd)) ? pwd : Buffer.from(pwd);\r\n for(let i=0; i< pass.length; i++){\r\n updateKeys(pass[i]);\r\n }\r\n\r\n // return decrypter function\r\n return function (/*Buffer*/data){\r\n if (!Buffer.isBuffer(data)){\r\n throw 'decrypter needs Buffer'\r\n }\r\n // result - we create new Buffer for results\r\n const result = Buffer.alloc(data.length);\r\n let pos = 0;\r\n // process input data\r\n for(let c of data){\r\n const k = (keys[2] | 2) >>> 0; // key\r\n c ^= (uMul(k, k^1) >> 8) & 0xff; // decode\r\n result[pos++] = c; // Save Value\r\n updateKeys(c); // update keys with decoded byte\r\n }\r\n return result;\r\n }\r\n}\r\n\r\nfunction decrypt(/*Buffer*/ data, /*Object*/header, /*String, Buffer*/ pwd){\r\n if (!data || !Buffer.isBuffer(data) || data.length < 12) {\r\n return Buffer.alloc(0);\r\n }\r\n \r\n // We Initialize and generate decrypting function\r\n const decrypter = make_decrypter(pwd);\r\n\r\n // check - for testing password\r\n const check = header.crc >>> 24;\r\n // decrypt salt what is always 12 bytes and is a part of file content\r\n const testbyte = decrypter(data.slice(0, 12))[11];\r\n\r\n // does password meet expectations\r\n if (check !== testbyte){\r\n throw 'ADM-ZIP: Wrong Password';\r\n }\r\n\r\n // decode content\r\n return decrypter(data.slice(12));\r\n}\r\n\r\nmodule.exports = {decrypt};\r\n","module.exports = {\r\n /* The local file header */\r\n LOCHDR : 30, // LOC header size\r\n LOCSIG : 0x04034b50, // \"PK\\003\\004\"\r\n LOCVER : 4,\t// version needed to extract\r\n LOCFLG : 6, // general purpose bit flag\r\n LOCHOW : 8, // compression method\r\n LOCTIM : 10, // modification time (2 bytes time, 2 bytes date)\r\n LOCCRC : 14, // uncompressed file crc-32 value\r\n LOCSIZ : 18, // compressed size\r\n LOCLEN : 22, // uncompressed size\r\n LOCNAM : 26, // filename length\r\n LOCEXT : 28, // extra field length\r\n\r\n /* The Data descriptor */\r\n EXTSIG : 0x08074b50, // \"PK\\007\\008\"\r\n EXTHDR : 16, // EXT header size\r\n EXTCRC : 4, // uncompressed file crc-32 value\r\n EXTSIZ : 8, // compressed size\r\n EXTLEN : 12, // uncompressed size\r\n\r\n /* The central directory file header */\r\n CENHDR : 46, // CEN header size\r\n CENSIG : 0x02014b50, // \"PK\\001\\002\"\r\n CENVEM : 4, // version made by\r\n CENVER : 6, // version needed to extract\r\n CENFLG : 8, // encrypt, decrypt flags\r\n CENHOW : 10, // compression method\r\n CENTIM : 12, // modification time (2 bytes time, 2 bytes date)\r\n CENCRC : 16, // uncompressed file crc-32 value\r\n CENSIZ : 20, // compressed size\r\n CENLEN : 24, // uncompressed size\r\n CENNAM : 28, // filename length\r\n CENEXT : 30, // extra field length\r\n CENCOM : 32, // file comment length\r\n CENDSK : 34, // volume number start\r\n CENATT : 36, // internal file attributes\r\n CENATX : 38, // external file attributes (host system dependent)\r\n CENOFF : 42, // LOC header offset\r\n\r\n /* The entries in the end of central directory */\r\n ENDHDR : 22, // END header size\r\n ENDSIG : 0x06054b50, // \"PK\\005\\006\"\r\n ENDSUB : 8, // number of entries on this disk\r\n ENDTOT : 10, // total number of entries\r\n ENDSIZ : 12, // central directory size in bytes\r\n ENDOFF : 16, // offset of first CEN header\r\n ENDCOM : 20, // zip file comment length\r\n\r\n END64HDR : 20, // zip64 END header size\r\n END64SIG : 0x07064b50, // zip64 Locator signature, \"PK\\006\\007\"\r\n END64START : 4, // number of the disk with the start of the zip64\r\n END64OFF : 8, // relative offset of the zip64 end of central directory\r\n END64NUMDISKS : 16, // total number of disks\r\n\r\n ZIP64SIG : 0x06064b50, // zip64 signature, \"PK\\006\\006\"\r\n ZIP64HDR : 56, // zip64 record minimum size\r\n ZIP64LEAD : 12, // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE\r\n ZIP64SIZE : 4, // zip64 size of the central directory record\r\n ZIP64VEM : 12, // zip64 version made by\r\n ZIP64VER : 14, // zip64 version needed to extract\r\n ZIP64DSK : 16, // zip64 number of this disk\r\n ZIP64DSKDIR : 20, // number of the disk with the start of the record directory\r\n ZIP64SUB : 24, // number of entries on this disk\r\n ZIP64TOT : 32, // total number of entries\r\n ZIP64SIZB : 40, // zip64 central directory size in bytes\r\n ZIP64OFF : 48, // offset of start of central directory with respect to the starting disk number\r\n ZIP64EXTRA : 56, // extensible data sector\r\n\r\n /* Compression methods */\r\n STORED : 0, // no compression\r\n SHRUNK : 1, // shrunk\r\n REDUCED1 : 2, // reduced with compression factor 1\r\n REDUCED2 : 3, // reduced with compression factor 2\r\n REDUCED3 : 4, // reduced with compression factor 3\r\n REDUCED4 : 5, // reduced with compression factor 4\r\n IMPLODED : 6, // imploded\r\n // 7 reserved\r\n DEFLATED : 8, // deflated\r\n ENHANCED_DEFLATED: 9, // enhanced deflated\r\n PKWARE : 10,// PKWare DCL imploded\r\n // 11 reserved\r\n BZIP2 : 12, // compressed using BZIP2\r\n // 13 reserved\r\n LZMA : 14, // LZMA\r\n // 15-17 reserved\r\n IBM_TERSE : 18, // compressed using IBM TERSE\r\n IBM_LZ77 : 19, //IBM LZ77 z\r\n\r\n /* General purpose bit flag */\r\n FLG_ENC : 0, // encripted file\r\n FLG_COMP1 : 1, // compression option\r\n FLG_COMP2 : 2, // compression option\r\n FLG_DESC : 4, // data descriptor\r\n FLG_ENH : 8, // enhanced deflation\r\n FLG_STR : 16, // strong encryption\r\n FLG_LNG : 1024, // language encoding\r\n FLG_MSK : 4096, // mask header values\r\n\r\n /* Load type */\r\n FILE : 0,\r\n BUFFER : 1,\r\n NONE : 2,\r\n\r\n /* 4.5 Extensible data fields */\r\n EF_ID : 0,\r\n EF_SIZE : 2,\r\n\r\n /* Header IDs */\r\n ID_ZIP64 : 0x0001,\r\n ID_AVINFO : 0x0007,\r\n ID_PFS : 0x0008,\r\n ID_OS2 : 0x0009,\r\n ID_NTFS : 0x000a,\r\n ID_OPENVMS : 0x000c,\r\n ID_UNIX : 0x000d,\r\n ID_FORK : 0x000e,\r\n ID_PATCH : 0x000f,\r\n ID_X509_PKCS7 : 0x0014,\r\n ID_X509_CERTID_F : 0x0015,\r\n ID_X509_CERTID_C : 0x0016,\r\n ID_STRONGENC : 0x0017,\r\n ID_RECORD_MGT : 0x0018,\r\n ID_X509_PKCS7_RL : 0x0019,\r\n ID_IBM1 : 0x0065,\r\n ID_IBM2 : 0x0066,\r\n ID_POSZIP : 0x4690,\r\n\r\n EF_ZIP64_OR_32 : 0xffffffff,\r\n EF_ZIP64_OR_16 : 0xffff,\r\n EF_ZIP64_SUNCOMP : 0,\r\n EF_ZIP64_SCOMP : 8,\r\n EF_ZIP64_RHO : 16,\r\n EF_ZIP64_DSN : 24\r\n};\r\n","module.exports = {\r\n /* Header error messages */\r\n \"INVALID_LOC\" : \"Invalid LOC header (bad signature)\",\r\n \"INVALID_CEN\" : \"Invalid CEN header (bad signature)\",\r\n \"INVALID_END\" : \"Invalid END header (bad signature)\",\r\n\r\n /* ZipEntry error messages*/\r\n \"NO_DATA\" : \"Nothing to decompress\",\r\n \"BAD_CRC\" : \"CRC32 checksum failed\",\r\n \"FILE_IN_THE_WAY\" : \"There is a file in the way: %s\",\r\n \"UNKNOWN_METHOD\" : \"Invalid/unsupported compression method\",\r\n\r\n /* Inflater error messages */\r\n \"AVAIL_DATA\" : \"inflate::Available inflate data did not terminate\",\r\n \"INVALID_DISTANCE\" : \"inflate::Invalid literal/length or distance code in fixed or dynamic block\",\r\n \"TO_MANY_CODES\" : \"inflate::Dynamic block code description: too many length or distance codes\",\r\n \"INVALID_REPEAT_LEN\" : \"inflate::Dynamic block code description: repeat more than specified lengths\",\r\n \"INVALID_REPEAT_FIRST\" : \"inflate::Dynamic block code description: repeat lengths with no first length\",\r\n \"INCOMPLETE_CODES\" : \"inflate::Dynamic block code description: code lengths codes incomplete\",\r\n \"INVALID_DYN_DISTANCE\": \"inflate::Dynamic block code description: invalid distance code lengths\",\r\n \"INVALID_CODES_LEN\": \"inflate::Dynamic block code description: invalid literal/length code lengths\",\r\n \"INVALID_STORE_BLOCK\" : \"inflate::Stored block length did not match one's complement\",\r\n \"INVALID_BLOCK_TYPE\" : \"inflate::Invalid block type (type == 3)\",\r\n\r\n /* ADM-ZIP error messages */\r\n \"CANT_EXTRACT_FILE\" : \"Could not extract the file\",\r\n \"CANT_OVERRIDE\" : \"Target file already exists\",\r\n \"NO_ZIP\" : \"No zip file was loaded\",\r\n \"NO_ENTRY\" : \"Entry doesn't exist\",\r\n \"DIRECTORY_CONTENT_ERROR\" : \"A directory cannot have content\",\r\n \"FILE_NOT_FOUND\" : \"File not found: %s\",\r\n \"NOT_IMPLEMENTED\" : \"Not implemented\",\r\n \"INVALID_FILENAME\" : \"Invalid filename\",\r\n \"INVALID_FORMAT\" : \"Invalid or unsupported zip format. No END header found\"\r\n};","var fs = require(\"./fileSystem\").require(),\r\n pth = require(\"path\");\r\n\t\r\nfs.existsSync = fs.existsSync || pth.existsSync;\r\n\r\nmodule.exports = function(/*String*/path) {\r\n\r\n var _path = path || \"\",\r\n _permissions = 0,\r\n _obj = newAttr(),\r\n _stat = null;\r\n\r\n function newAttr() {\r\n return {\r\n directory : false,\r\n readonly : false,\r\n hidden : false,\r\n executable : false,\r\n mtime : 0,\r\n atime : 0\r\n }\r\n }\r\n\r\n if (_path && fs.existsSync(_path)) {\r\n _stat = fs.statSync(_path);\r\n _obj.directory = _stat.isDirectory();\r\n _obj.mtime = _stat.mtime;\r\n _obj.atime = _stat.atime;\r\n _obj.executable = (0o111 & _stat.mode) != 0; // file is executable who ever har right not just owner\r\n _obj.readonly = (0o200 & _stat.mode) == 0; // readonly if owner has no write right\r\n _obj.hidden = pth.basename(_path)[0] === \".\";\r\n } else {\r\n console.warn(\"Invalid path: \" + _path)\r\n }\r\n\r\n return {\r\n\r\n get directory () {\r\n return _obj.directory;\r\n },\r\n\r\n get readOnly () {\r\n return _obj.readonly;\r\n },\r\n\r\n get hidden () {\r\n return _obj.hidden;\r\n },\r\n\r\n get mtime () {\r\n return _obj.mtime;\r\n },\r\n\r\n get atime () {\r\n return _obj.atime;\r\n },\r\n\r\n\r\n get executable () {\r\n return _obj.executable;\r\n },\r\n\r\n decodeAttributes : function(val) {\r\n\r\n },\r\n\r\n encodeAttributes : function (val) {\r\n\r\n },\r\n\r\n toString : function() {\r\n return '{\\n' +\r\n '\\t\"path\" : \"' + _path + \",\\n\" +\r\n '\\t\"isDirectory\" : ' + _obj.directory + \",\\n\" +\r\n '\\t\"isReadOnly\" : ' + _obj.readonly + \",\\n\" +\r\n '\\t\"isHidden\" : ' + _obj.hidden + \",\\n\" +\r\n '\\t\"isExecutable\" : ' + _obj.executable + \",\\n\" +\r\n '\\t\"mTime\" : ' + _obj.mtime + \"\\n\" +\r\n '\\t\"aTime\" : ' + _obj.atime + \"\\n\" +\r\n '}';\r\n }\r\n }\r\n\r\n};\r\n","exports.require = function() {\r\n var fs = require(\"fs\");\r\n if (process && process.versions && process.versions['electron']) {\r\n\t try {\r\n\t originalFs = require(\"original-fs\");\r\n\t if (Object.keys(originalFs).length > 0) {\r\n\t fs = originalFs;\r\n }\r\n\t } catch (e) {}\r\n }\r\n return fs\r\n};\r\n","module.exports = require(\"./utils\");\r\nmodule.exports.FileSystem = require(\"./fileSystem\");\r\nmodule.exports.Constants = require(\"./constants\");\r\nmodule.exports.Errors = require(\"./errors\");\r\nmodule.exports.FileAttr = require(\"./fattr\");","var fs = require(\"./fileSystem\").require(),\r\n pth = require('path');\r\n\r\nfs.existsSync = fs.existsSync || pth.existsSync;\r\n\r\nmodule.exports = (function() {\r\n\r\n var crcTable = [],\r\n Constants = require('./constants'),\r\n Errors = require('./errors'),\r\n\r\n PATH_SEPARATOR = pth.sep;\r\n\r\n\r\n function mkdirSync(/*String*/path) {\r\n var resolvedPath = path.split(PATH_SEPARATOR)[0];\r\n path.split(PATH_SEPARATOR).forEach(function(name) {\r\n if (!name || name.substr(-1,1) === \":\") return;\r\n resolvedPath += PATH_SEPARATOR + name;\r\n var stat;\r\n try {\r\n stat = fs.statSync(resolvedPath);\r\n } catch (e) {\r\n fs.mkdirSync(resolvedPath);\r\n }\r\n if (stat && stat.isFile())\r\n throw Errors.FILE_IN_THE_WAY.replace(\"%s\", resolvedPath);\r\n });\r\n }\r\n\r\n function findSync(/*String*/dir, /*RegExp*/pattern, /*Boolean*/recoursive) {\r\n if (typeof pattern === 'boolean') {\r\n recoursive = pattern;\r\n pattern = undefined;\r\n }\r\n var files = [];\r\n fs.readdirSync(dir).forEach(function(file) {\r\n var path = pth.join(dir, file);\r\n\r\n if (fs.statSync(path).isDirectory() && recoursive)\r\n files = files.concat(findSync(path, pattern, recoursive));\r\n\r\n if (!pattern || pattern.test(path)) {\r\n files.push(pth.normalize(path) + (fs.statSync(path).isDirectory() ? PATH_SEPARATOR : \"\"));\r\n }\r\n\r\n });\r\n return files;\r\n }\r\n\r\n function readBigUInt64LE(/*Buffer*/buffer, /*int*/index) {\r\n var slice = Buffer.from(buffer.slice(index, index + 8));\r\n slice.swap64();\r\n\r\n return parseInt(`0x${ slice.toString('hex') }`);\r\n }\r\n\r\n return {\r\n makeDir : function(/*String*/path) {\r\n mkdirSync(path);\r\n },\r\n\r\n crc32 : function(buf) {\r\n if (typeof buf === 'string') {\r\n buf = Buffer.alloc(buf.length, buf);\r\n }\r\n var b = Buffer.alloc(4);\r\n if (!crcTable.length) {\r\n for (var n = 0; n < 256; n++) {\r\n var c = n;\r\n for (var k = 8; --k >= 0;) //\r\n if ((c & 1) !== 0) { c = 0xedb88320 ^ (c >>> 1); } else { c = c >>> 1; }\r\n if (c < 0) {\r\n b.writeInt32LE(c, 0);\r\n c = b.readUInt32LE(0);\r\n }\r\n crcTable[n] = c;\r\n }\r\n }\r\n var crc = 0, off = 0, len = buf.length, c1 = ~crc;\r\n while(--len >= 0) c1 = crcTable[(c1 ^ buf[off++]) & 0xff] ^ (c1 >>> 8);\r\n crc = ~c1;\r\n b.writeInt32LE(crc & 0xffffffff, 0);\r\n return b.readUInt32LE(0);\r\n },\r\n\r\n methodToString : function(/*Number*/method) {\r\n switch (method) {\r\n case Constants.STORED:\r\n return 'STORED (' + method + ')';\r\n case Constants.DEFLATED:\r\n return 'DEFLATED (' + method + ')';\r\n default:\r\n return 'UNSUPPORTED (' + method + ')';\r\n }\r\n\r\n },\r\n\r\n writeFileTo : function(/*String*/path, /*Buffer*/content, /*Boolean*/overwrite, /*Number*/attr) {\r\n if (fs.existsSync(path)) {\r\n if (!overwrite)\r\n return false; // cannot overwrite\r\n\r\n var stat = fs.statSync(path);\r\n if (stat.isDirectory()) {\r\n return false;\r\n }\r\n }\r\n var folder = pth.dirname(path);\r\n if (!fs.existsSync(folder)) {\r\n mkdirSync(folder);\r\n }\r\n\r\n var fd;\r\n try {\r\n fd = fs.openSync(path, 'w', 438); // 0666\r\n } catch(e) {\r\n fs.chmodSync(path, 438);\r\n fd = fs.openSync(path, 'w', 438);\r\n }\r\n if (fd) {\r\n try {\r\n fs.writeSync(fd, content, 0, content.length, 0);\r\n }\r\n catch (e){\r\n throw e;\r\n }\r\n finally {\r\n fs.closeSync(fd);\r\n }\r\n }\r\n fs.chmodSync(path, attr || 438);\r\n return true;\r\n },\r\n\r\n writeFileToAsync : function(/*String*/path, /*Buffer*/content, /*Boolean*/overwrite, /*Number*/attr, /*Function*/callback) {\r\n if(typeof attr === 'function') {\r\n callback = attr;\r\n attr = undefined;\r\n }\r\n\r\n fs.exists(path, function(exists) {\r\n if(exists && !overwrite)\r\n return callback(false);\r\n\r\n fs.stat(path, function(err, stat) {\r\n if(exists &&stat.isDirectory()) {\r\n return callback(false);\r\n }\r\n\r\n var folder = pth.dirname(path);\r\n fs.exists(folder, function(exists) {\r\n if(!exists)\r\n mkdirSync(folder);\r\n\r\n fs.open(path, 'w', 438, function(err, fd) {\r\n if(err) {\r\n fs.chmod(path, 438, function() {\r\n fs.open(path, 'w', 438, function(err, fd) {\r\n fs.write(fd, content, 0, content.length, 0, function() {\r\n fs.close(fd, function() {\r\n fs.chmod(path, attr || 438, function() {\r\n callback(true);\r\n })\r\n });\r\n });\r\n });\r\n })\r\n } else {\r\n if(fd) {\r\n fs.write(fd, content, 0, content.length, 0, function() {\r\n fs.close(fd, function() {\r\n fs.chmod(path, attr || 438, function() {\r\n callback(true);\r\n })\r\n });\r\n });\r\n } else {\r\n fs.chmod(path, attr || 438, function() {\r\n callback(true);\r\n })\r\n }\r\n }\r\n });\r\n })\r\n })\r\n })\r\n },\r\n\r\n findFiles : function(/*String*/path) {\r\n return findSync(path, true);\r\n },\r\n\r\n getAttributes : function(/*String*/path) {\r\n\r\n },\r\n\r\n setAttributes : function(/*String*/path) {\r\n\r\n },\r\n\r\n toBuffer : function(input) {\r\n if (Buffer.isBuffer(input)) {\r\n return input;\r\n } else {\r\n if (input.length === 0) {\r\n return Buffer.alloc(0)\r\n }\r\n return Buffer.from(input, 'utf8');\r\n }\r\n },\r\n\r\n readBigUInt64LE,\r\n\r\n Constants : Constants,\r\n Errors : Errors\r\n }\r\n})();\r\n","var Utils = require(\"./util\"),\r\n Headers = require(\"./headers\"),\r\n Constants = Utils.Constants,\r\n Methods = require(\"./methods\");\r\n\r\nmodule.exports = function (/*Buffer*/input) {\r\n\r\n var _entryHeader = new Headers.EntryHeader(),\r\n _entryName = Buffer.alloc(0),\r\n _comment = Buffer.alloc(0),\r\n _isDirectory = false,\r\n uncompressedData = null,\r\n _extra = Buffer.alloc(0);\r\n\r\n function getCompressedDataFromZip() {\r\n if (!input || !Buffer.isBuffer(input)) {\r\n return Buffer.alloc(0);\r\n }\r\n _entryHeader.loadDataHeaderFromBinary(input);\r\n return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize)\r\n }\r\n\r\n function crc32OK(data) {\r\n // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written\r\n if ((_entryHeader.flags & 0x8) !== 0x8) {\r\n if (Utils.crc32(data) !== _entryHeader.dataHeader.crc) {\r\n return false;\r\n }\r\n } else {\r\n // @TODO: load and check data descriptor header\r\n // The fields in the local header are filled with zero, and the CRC-32 and size are appended in a 12-byte structure\r\n // (optionally preceded by a 4-byte signature) immediately after the compressed data:\r\n }\r\n return true;\r\n }\r\n\r\n function decompress(/*Boolean*/async, /*Function*/callback, /*String, Buffer*/pass) {\r\n if(typeof callback === 'undefined' && typeof async === 'string') {\r\n pass=async;\r\n async=void 0;\r\n }\r\n if (_isDirectory) {\r\n if (async && callback) {\r\n callback(Buffer.alloc(0), Utils.Errors.DIRECTORY_CONTENT_ERROR); //si added error.\r\n }\r\n return Buffer.alloc(0);\r\n }\r\n\r\n var compressedData = getCompressedDataFromZip();\r\n\r\n if (compressedData.length === 0) {\r\n // File is empty, nothing to decompress.\r\n if (async && callback) callback(compressedData);\r\n return compressedData;\r\n }\r\n\r\n if (_entryHeader.encripted){\r\n if ('string' !== typeof pass && !Buffer.isBuffer(pass)){\r\n throw new Error('ADM-ZIP: Incompatible password parameter');\r\n }\r\n compressedData = Methods.ZipCrypto.decrypt(compressedData, _entryHeader, pass);\r\n }\r\n\r\n var data = Buffer.alloc(_entryHeader.size);\r\n\r\n switch (_entryHeader.method) {\r\n case Utils.Constants.STORED:\r\n compressedData.copy(data);\r\n if (!crc32OK(data)) {\r\n if (async && callback) callback(data, Utils.Errors.BAD_CRC);//si added error\r\n throw new Error(Utils.Errors.BAD_CRC);\r\n } else {//si added otherwise did not seem to return data.\r\n if (async && callback) callback(data);\r\n return data;\r\n }\r\n case Utils.Constants.DEFLATED:\r\n var inflater = new Methods.Inflater(compressedData);\r\n if (!async) {\r\n var result = inflater.inflate(data);\r\n result.copy(data, 0);\r\n if (!crc32OK(data)) {\r\n throw new Error(Utils.Errors.BAD_CRC + \" \" + _entryName.toString());\r\n }\r\n return data;\r\n } else {\r\n inflater.inflateAsync(function(result) {\r\n result.copy(data, 0);\r\n if (!crc32OK(data)) {\r\n if (callback) callback(data, Utils.Errors.BAD_CRC); //si added error\r\n } else { //si added otherwise did not seem to return data.\r\n if (callback) callback(data);\r\n }\r\n })\r\n }\r\n break;\r\n default:\r\n if (async && callback) callback(Buffer.alloc(0), Utils.Errors.UNKNOWN_METHOD);\r\n throw new Error(Utils.Errors.UNKNOWN_METHOD);\r\n }\r\n }\r\n\r\n function compress(/*Boolean*/async, /*Function*/callback) {\r\n if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) {\r\n // no data set or the data wasn't changed to require recompression\r\n if (async && callback) callback(getCompressedDataFromZip());\r\n return getCompressedDataFromZip();\r\n }\r\n\r\n if (uncompressedData.length && !_isDirectory) {\r\n var compressedData;\r\n // Local file header\r\n switch (_entryHeader.method) {\r\n case Utils.Constants.STORED:\r\n _entryHeader.compressedSize = _entryHeader.size;\r\n\r\n compressedData = Buffer.alloc(uncompressedData.length);\r\n uncompressedData.copy(compressedData);\r\n\r\n if (async && callback) callback(compressedData);\r\n return compressedData;\r\n default:\r\n case Utils.Constants.DEFLATED:\r\n\r\n var deflater = new Methods.Deflater(uncompressedData);\r\n if (!async) {\r\n var deflated = deflater.deflate();\r\n _entryHeader.compressedSize = deflated.length;\r\n return deflated;\r\n } else {\r\n deflater.deflateAsync(function(data) {\r\n compressedData = Buffer.alloc(data.length);\r\n _entryHeader.compressedSize = data.length;\r\n data.copy(compressedData);\r\n callback && callback(compressedData);\r\n })\r\n }\r\n deflater = null;\r\n break;\r\n }\r\n } else {\r\n if (async && callback) {\r\n callback(Buffer.alloc(0));\r\n } else {\r\n return Buffer.alloc(0);\r\n }\r\n }\r\n }\r\n\r\n function readUInt64LE(buffer, offset) {\r\n return (buffer.readUInt32LE(offset + 4) << 4) + buffer.readUInt32LE(offset);\r\n }\r\n\r\n function parseExtra(data) {\r\n var offset = 0;\r\n var signature, size, part;\r\n while(offset= Constants.EF_ZIP64_SCOMP) {\r\n size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP);\r\n if(_entryHeader.size === Constants.EF_ZIP64_OR_32) {\r\n _entryHeader.size = size;\r\n }\r\n }\r\n if(data.length >= Constants.EF_ZIP64_RHO) {\r\n compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP);\r\n if(_entryHeader.compressedSize === Constants.EF_ZIP64_OR_32) {\r\n _entryHeader.compressedSize = compressedSize;\r\n }\r\n }\r\n if(data.length >= Constants.EF_ZIP64_DSN) {\r\n offset = readUInt64LE(data, Constants.EF_ZIP64_RHO);\r\n if(_entryHeader.offset === Constants.EF_ZIP64_OR_32) {\r\n _entryHeader.offset = offset;\r\n }\r\n }\r\n if(data.length >= Constants.EF_ZIP64_DSN+4) {\r\n diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN);\r\n if(_entryHeader.diskNumStart === Constants.EF_ZIP64_OR_16) {\r\n _entryHeader.diskNumStart = diskNumStart;\r\n }\r\n }\r\n }\r\n\r\n\r\n return {\r\n get entryName () { return _entryName.toString(); },\r\n get rawEntryName() { return _entryName; },\r\n set entryName (val) {\r\n _entryName = Utils.toBuffer(val);\r\n var lastChar = _entryName[_entryName.length - 1];\r\n _isDirectory = (lastChar === 47) || (lastChar === 92);\r\n _entryHeader.fileNameLength = _entryName.length;\r\n },\r\n\r\n get extra () { return _extra; },\r\n set extra (val) {\r\n _extra = val;\r\n _entryHeader.extraLength = val.length;\r\n parseExtra(val);\r\n },\r\n\r\n get comment () { return _comment.toString(); },\r\n set comment (val) {\r\n _comment = Utils.toBuffer(val);\r\n _entryHeader.commentLength = _comment.length;\r\n },\r\n\r\n get name () { var n = _entryName.toString(); return _isDirectory ? n.substr(n.length - 1).split(\"/\").pop() : n.split(\"/\").pop(); },\r\n get isDirectory () { return _isDirectory },\r\n\r\n getCompressedData : function() {\r\n return compress(false, null)\r\n },\r\n\r\n getCompressedDataAsync : function(/*Function*/callback) {\r\n compress(true, callback)\r\n },\r\n\r\n setData : function(value) {\r\n uncompressedData = Utils.toBuffer(value);\r\n if (!_isDirectory && uncompressedData.length) {\r\n _entryHeader.size = uncompressedData.length;\r\n _entryHeader.method = Utils.Constants.DEFLATED;\r\n _entryHeader.crc = Utils.crc32(value);\r\n _entryHeader.changed = true;\r\n } else { // folders and blank files should be stored\r\n _entryHeader.method = Utils.Constants.STORED;\r\n }\r\n },\r\n\r\n getData : function(pass) {\r\n if (_entryHeader.changed) {\r\n\t\t\t\treturn uncompressedData;\r\n\t\t\t} else {\r\n\t\t\t\treturn decompress(false, null, pass);\r\n }\r\n },\r\n\r\n getDataAsync : function(/*Function*/callback, pass) {\r\n\t\t\tif (_entryHeader.changed) {\r\n\t\t\t\tcallback(uncompressedData)\r\n\t\t\t} else {\r\n\t\t\t\tdecompress(true, callback, pass)\r\n }\r\n },\r\n\r\n set attr(attr) { _entryHeader.attr = attr; },\r\n get attr() { return _entryHeader.attr; },\r\n\r\n set header(/*Buffer*/data) {\r\n _entryHeader.loadFromBinary(data);\r\n },\r\n\r\n get header() {\r\n return _entryHeader;\r\n },\r\n\r\n packHeader : function() {\r\n var header = _entryHeader.entryHeaderToBinary();\r\n // add\r\n _entryName.copy(header, Utils.Constants.CENHDR);\r\n if (_entryHeader.extraLength) {\r\n _extra.copy(header, Utils.Constants.CENHDR + _entryName.length)\r\n }\r\n if (_entryHeader.commentLength) {\r\n _comment.copy(header, Utils.Constants.CENHDR + _entryName.length + _entryHeader.extraLength, _comment.length);\r\n }\r\n return header;\r\n },\r\n\r\n toString : function() {\r\n return '{\\n' +\r\n '\\t\"entryName\" : \"' + _entryName.toString() + \"\\\",\\n\" +\r\n '\\t\"name\" : \"' + (_isDirectory ? _entryName.toString().replace(/\\/$/, '').split(\"/\").pop() : _entryName.toString().split(\"/\").pop()) + \"\\\",\\n\" +\r\n '\\t\"comment\" : \"' + _comment.toString() + \"\\\",\\n\" +\r\n '\\t\"isDirectory\" : ' + _isDirectory + \",\\n\" +\r\n '\\t\"header\" : ' + _entryHeader.toString().replace(/\\t/mg, \"\\t\\t\").replace(/}/mg, \"\\t}\") + \",\\n\" +\r\n '\\t\"compressedData\" : <' + (input && input.length + \" bytes buffer\" || \"null\") + \">\\n\" +\r\n '\\t\"data\" : <' + (uncompressedData && uncompressedData.length + \" bytes buffer\" || \"null\") + \">\\n\" +\r\n '}';\r\n }\r\n }\r\n};\r\n","var ZipEntry = require(\"./zipEntry\"),\r\n\tHeaders = require(\"./headers\"),\r\n\tUtils = require(\"./util\");\r\n\r\nmodule.exports = function (/*String|Buffer*/input, /*Number*/inputType) {\r\n\tvar entryList = [],\r\n\t\tentryTable = {},\r\n\t\t_comment = Buffer.alloc(0),\r\n\t\tfilename = \"\",\r\n\t\tfs = Utils.FileSystem.require(),\r\n\t\tinBuffer = null,\r\n\t\tmainHeader = new Headers.MainHeader(),\r\n\t\tloadedEntries = false;\r\n\r\n\tif (inputType === Utils.Constants.FILE) {\r\n\t\t// is a filename\r\n\t\tfilename = input;\r\n\t\tinBuffer = fs.readFileSync(filename);\r\n\t\treadMainHeader();\r\n\t} else if (inputType === Utils.Constants.BUFFER) {\r\n\t\t// is a memory buffer\r\n\t\tinBuffer = input;\r\n\t\treadMainHeader();\r\n\t} else {\r\n\t\t// none. is a new file\r\n\t\tloadedEntries = true;\r\n\t}\r\n\r\n\tfunction iterateEntries(callback) {\r\n\t\tconst totalEntries = mainHeader.diskEntries; // total number of entries\r\n\t\tlet index = mainHeader.offset; // offset of first CEN header\r\n\r\n\t\tfor (let i = 0; i < totalEntries; i++) {\r\n\t\t\tlet tmp = index;\r\n\t\t\tconst entry = new ZipEntry(inBuffer);\r\n\r\n\t\t\tentry.header = inBuffer.slice(tmp, tmp += Utils.Constants.CENHDR);\r\n\t\t\tentry.entryName = inBuffer.slice(tmp, tmp += entry.header.fileNameLength);\r\n\r\n\t\t\tindex += entry.header.entryHeaderSize;\r\n\r\n\t\t\tcallback(entry);\r\n\t\t}\r\n\t}\r\n\r\n\tfunction readEntries() {\r\n\t\tloadedEntries = true;\r\n\t\tentryTable = {};\r\n\t\tentryList = new Array(mainHeader.diskEntries); // total number of entries\r\n\t\tvar index = mainHeader.offset; // offset of first CEN header\r\n\t\tfor (var i = 0; i < entryList.length; i++) {\r\n\r\n\t\t\tvar tmp = index,\r\n\t\t\t\tentry = new ZipEntry(inBuffer);\r\n\t\t\tentry.header = inBuffer.slice(tmp, tmp += Utils.Constants.CENHDR);\r\n\r\n\t\t\tentry.entryName = inBuffer.slice(tmp, tmp += entry.header.fileNameLength);\r\n\r\n\t\t\tif (entry.header.extraLength) {\r\n\t\t\t\tentry.extra = inBuffer.slice(tmp, tmp += entry.header.extraLength);\r\n\t\t\t}\r\n\r\n\t\t\tif (entry.header.commentLength)\r\n\t\t\t\tentry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength);\r\n\r\n\t\t\tindex += entry.header.entryHeaderSize;\r\n\r\n\t\t\tentryList[i] = entry;\r\n\t\t\tentryTable[entry.entryName] = entry;\r\n\t\t}\r\n\t}\r\n\r\n\tfunction readMainHeader() {\r\n\t\tvar i = inBuffer.length - Utils.Constants.ENDHDR, // END header size\r\n\t\t\tmax = Math.max(0, i - 0xFFFF), // 0xFFFF is the max zip file comment length\r\n\t\t\tn = max,\r\n\t\t\tendStart = inBuffer.length,\r\n\t\t\tendOffset = -1, // Start offset of the END header\r\n\t\t\tcommentEnd = 0;\r\n\r\n\t\tfor (i; i >= n; i--) {\r\n\t\t\tif (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P'\r\n\t\t\tif (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) { // \"PK\\005\\006\"\r\n\t\t\t\tendOffset = i;\r\n\t\t\t\tcommentEnd = i;\r\n\t\t\t\tendStart = i + Utils.Constants.ENDHDR;\r\n\t\t\t\t// We already found a regular signature, let's look just a bit further to check if there's any zip64 signature\r\n\t\t\t\tn = i - Utils.Constants.END64HDR;\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tif (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) {\r\n\t\t\t\t// Found a zip64 signature, let's continue reading the whole zip64 record\r\n\t\t\t\tn = max;\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tif (inBuffer.readUInt32LE(i) == Utils.Constants.ZIP64SIG) {\r\n\t\t\t\t// Found the zip64 record, let's determine it's size\r\n\t\t\t\tendOffset = i;\r\n\t\t\t\tendStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD;\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (!~endOffset)\r\n\t\t\tthrow new Error(Utils.Errors.INVALID_FORMAT);\r\n\r\n\t\tmainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart));\r\n\t\tif (mainHeader.commentLength) {\r\n\t\t\t_comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR);\r\n\t\t}\r\n\t\t// readEntries();\r\n\t}\r\n\r\n\treturn {\r\n\t\t/**\r\n\t\t * Returns an array of ZipEntry objects existent in the current opened archive\r\n\t\t * @return Array\r\n\t\t */\r\n\t\tget entries() {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\treadEntries();\r\n\t\t\t}\r\n\t\t\treturn entryList;\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Archive comment\r\n\t\t * @return {String}\r\n\t\t */\r\n\t\tget comment() {\r\n\t\t\treturn _comment.toString();\r\n\t\t},\r\n\t\tset comment(val) {\r\n\t\t\t_comment = Utils.toBuffer(val);\r\n\t\t\tmainHeader.commentLength = _comment.length;\r\n\t\t},\r\n\r\n\t\tgetEntryCount: function() {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\treturn mainHeader.diskEntries;\r\n\t\t\t}\r\n\r\n\t\t\treturn entryList.length;\r\n\t\t},\r\n\r\n\t\tforEach: function(callback) {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\titerateEntries(callback);\r\n\t\t\t\treturn;\r\n\t\t\t}\r\n\r\n\t\t\tentryList.forEach(callback);\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Returns a reference to the entry with the given name or null if entry is inexistent\r\n\t\t *\r\n\t\t * @param entryName\r\n\t\t * @return ZipEntry\r\n\t\t */\r\n\t\tgetEntry: function (/*String*/entryName) {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\treadEntries();\r\n\t\t\t}\r\n\t\t\treturn entryTable[entryName] || null;\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Adds the given entry to the entry list\r\n\t\t *\r\n\t\t * @param entry\r\n\t\t */\r\n\t\tsetEntry: function (/*ZipEntry*/entry) {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\treadEntries();\r\n\t\t\t}\r\n\t\t\tentryList.push(entry);\r\n\t\t\tentryTable[entry.entryName] = entry;\r\n\t\t\tmainHeader.totalEntries = entryList.length;\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Removes the entry with the given name from the entry list.\r\n\t\t *\r\n\t\t * If the entry is a directory, then all nested files and directories will be removed\r\n\t\t * @param entryName\r\n\t\t */\r\n\t\tdeleteEntry: function (/*String*/entryName) {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\treadEntries();\r\n\t\t\t}\r\n\t\t\tvar entry = entryTable[entryName];\r\n\t\t\tif (entry && entry.isDirectory) {\r\n\t\t\t\tvar _self = this;\r\n\t\t\t\tthis.getEntryChildren(entry).forEach(function (child) {\r\n\t\t\t\t\tif (child.entryName !== entryName) {\r\n\t\t\t\t\t\t_self.deleteEntry(child.entryName)\r\n\t\t\t\t\t}\r\n\t\t\t\t})\r\n\t\t\t}\r\n\t\t\tentryList.splice(entryList.indexOf(entry), 1);\r\n\t\t\tdelete(entryTable[entryName]);\r\n\t\t\tmainHeader.totalEntries = entryList.length;\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Iterates and returns all nested files and directories of the given entry\r\n\t\t *\r\n\t\t * @param entry\r\n\t\t * @return Array\r\n\t\t */\r\n\t\tgetEntryChildren: function (/*ZipEntry*/entry) {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\treadEntries();\r\n\t\t\t}\r\n\t\t\tif (entry.isDirectory) {\r\n\t\t\t\tvar list = [],\r\n\t\t\t\t\tname = entry.entryName,\r\n\t\t\t\t\tlen = name.length;\r\n\r\n\t\t\t\tentryList.forEach(function (zipEntry) {\r\n\t\t\t\t\tif (zipEntry.entryName.substr(0, len) === name) {\r\n\t\t\t\t\t\tlist.push(zipEntry);\r\n\t\t\t\t\t}\r\n\t\t\t\t});\r\n\t\t\t\treturn list;\r\n\t\t\t}\r\n\t\t\treturn []\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Returns the zip file\r\n\t\t *\r\n\t\t * @return Buffer\r\n\t\t */\r\n\t\tcompressToBuffer: function () {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\treadEntries();\r\n\t\t\t}\r\n\t\t\tif (entryList.length > 1) {\r\n\t\t\t\tentryList.sort(function (a, b) {\r\n\t\t\t\t\tvar nameA = a.entryName.toLowerCase();\r\n\t\t\t\t\tvar nameB = b.entryName.toLowerCase();\r\n\t\t\t\t\tif (nameA < nameB) {\r\n\t\t\t\t\t\treturn -1\r\n\t\t\t\t\t}\r\n\t\t\t\t\tif (nameA > nameB) {\r\n\t\t\t\t\t\treturn 1\r\n\t\t\t\t\t}\r\n\t\t\t\t\treturn 0;\r\n\t\t\t\t});\r\n\t\t\t}\r\n\r\n\t\t\tvar totalSize = 0,\r\n\t\t\t\tdataBlock = [],\r\n\t\t\t\tentryHeaders = [],\r\n\t\t\t\tdindex = 0;\r\n\r\n\t\t\tmainHeader.size = 0;\r\n\t\t\tmainHeader.offset = 0;\r\n\r\n\t\t\tentryList.forEach(function (entry) {\r\n\t\t\t\t// compress data and set local and entry header accordingly. Reason why is called first\r\n\t\t\t\tvar compressedData = entry.getCompressedData();\r\n\t\t\t\t// data header\r\n\t\t\t\tentry.header.offset = dindex;\r\n\t\t\t\tvar dataHeader = entry.header.dataHeaderToBinary();\r\n\t\t\t\tvar entryNameLen = entry.rawEntryName.length;\r\n\t\t\t\tvar extra = entry.extra.toString();\r\n\t\t\t\tvar postHeader = Buffer.alloc(entryNameLen + extra.length);\r\n\t\t\t\tentry.rawEntryName.copy(postHeader, 0);\r\n\t\t\t\tpostHeader.fill(extra, entryNameLen);\r\n\r\n\t\t\t\tvar dataLength = dataHeader.length + postHeader.length + compressedData.length;\r\n\r\n\t\t\t\tdindex += dataLength;\r\n\r\n\t\t\t\tdataBlock.push(dataHeader);\r\n\t\t\t\tdataBlock.push(postHeader);\r\n\t\t\t\tdataBlock.push(compressedData);\r\n\r\n\t\t\t\tvar entryHeader = entry.packHeader();\r\n\t\t\t\tentryHeaders.push(entryHeader);\r\n\t\t\t\tmainHeader.size += entryHeader.length;\r\n\t\t\t\ttotalSize += (dataLength + entryHeader.length);\r\n\t\t\t});\r\n\r\n\t\t\ttotalSize += mainHeader.mainHeaderSize; // also includes zip file comment length\r\n\t\t\t// point to end of data and beginning of central directory first record\r\n\t\t\tmainHeader.offset = dindex;\r\n\r\n\t\t\tdindex = 0;\r\n\t\t\tvar outBuffer = Buffer.alloc(totalSize);\r\n\t\t\tdataBlock.forEach(function (content) {\r\n\t\t\t\tcontent.copy(outBuffer, dindex); // write data blocks\r\n\t\t\t\tdindex += content.length;\r\n\t\t\t});\r\n\t\t\tentryHeaders.forEach(function (content) {\r\n\t\t\t\tcontent.copy(outBuffer, dindex); // write central directory entries\r\n\t\t\t\tdindex += content.length;\r\n\t\t\t});\r\n\r\n\t\t\tvar mh = mainHeader.toBinary();\r\n\t\t\tif (_comment) {\r\n\t\t\t\tBuffer.from(_comment).copy(mh, Utils.Constants.ENDHDR); // add zip file comment\r\n\t\t\t}\r\n\r\n\t\t\tmh.copy(outBuffer, dindex); // write main header\r\n\r\n\t\t\treturn outBuffer\r\n\t\t},\r\n\r\n\t\ttoAsyncBuffer: function (/*Function*/onSuccess, /*Function*/onFail, /*Function*/onItemStart, /*Function*/onItemEnd) {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\treadEntries();\r\n\t\t\t}\r\n\t\t\tif (entryList.length > 1) {\r\n\t\t\t\tentryList.sort(function (a, b) {\r\n\t\t\t\t\tvar nameA = a.entryName.toLowerCase();\r\n\t\t\t\t\tvar nameB = b.entryName.toLowerCase();\r\n\t\t\t\t\tif (nameA > nameB) {\r\n\t\t\t\t\t\treturn -1\r\n\t\t\t\t\t}\r\n\t\t\t\t\tif (nameA < nameB) {\r\n\t\t\t\t\t\treturn 1\r\n\t\t\t\t\t}\r\n\t\t\t\t\treturn 0;\r\n\t\t\t\t});\r\n\t\t\t}\r\n\r\n\t\t\tvar totalSize = 0,\r\n\t\t\t\tdataBlock = [],\r\n\t\t\t\tentryHeaders = [],\r\n\t\t\t\tdindex = 0;\r\n\r\n\t\t\tmainHeader.size = 0;\r\n\t\t\tmainHeader.offset = 0;\r\n\r\n\t\t\tvar compress = function (entryList) {\r\n\t\t\t\tvar self = arguments.callee;\r\n\t\t\t\tif (entryList.length) {\r\n\t\t\t\t\tvar entry = entryList.pop();\r\n\t\t\t\t\tvar name = entry.entryName + entry.extra.toString();\r\n\t\t\t\t\tif (onItemStart) onItemStart(name);\r\n\t\t\t\t\tentry.getCompressedDataAsync(function (compressedData) {\r\n\t\t\t\t\t\tif (onItemEnd) onItemEnd(name);\r\n\r\n\t\t\t\t\t\tentry.header.offset = dindex;\r\n\t\t\t\t\t\t// data header\r\n\t\t\t\t\t\tvar dataHeader = entry.header.dataHeaderToBinary();\r\n\t\t\t\t\t\tvar postHeader;\r\n\t\t\t\t\t\ttry {\r\n\t\t\t\t\t\t\tpostHeader = Buffer.alloc(name.length, name); // using alloc will work on node 5.x+\r\n\t\t\t\t\t\t} catch(e){\r\n\t\t\t\t\t\t\tpostHeader = new Buffer(name); // use deprecated method if alloc fails...\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t\tvar dataLength = dataHeader.length + postHeader.length + compressedData.length;\r\n\r\n\t\t\t\t\t\tdindex += dataLength;\r\n\r\n\t\t\t\t\t\tdataBlock.push(dataHeader);\r\n\t\t\t\t\t\tdataBlock.push(postHeader);\r\n\t\t\t\t\t\tdataBlock.push(compressedData);\r\n\r\n\t\t\t\t\t\tvar entryHeader = entry.packHeader();\r\n\t\t\t\t\t\tentryHeaders.push(entryHeader);\r\n\t\t\t\t\t\tmainHeader.size += entryHeader.length;\r\n\t\t\t\t\t\ttotalSize += (dataLength + entryHeader.length);\r\n\r\n\t\t\t\t\t\tif (entryList.length) {\r\n\t\t\t\t\t\t\tself(entryList);\r\n\t\t\t\t\t\t} else {\r\n\r\n\r\n\t\t\t\t\t\t\ttotalSize += mainHeader.mainHeaderSize; // also includes zip file comment length\r\n\t\t\t\t\t\t\t// point to end of data and beginning of central directory first record\r\n\t\t\t\t\t\t\tmainHeader.offset = dindex;\r\n\r\n\t\t\t\t\t\t\tdindex = 0;\r\n\t\t\t\t\t\t\tvar outBuffer = Buffer.alloc(totalSize);\r\n\t\t\t\t\t\t\tdataBlock.forEach(function (content) {\r\n\t\t\t\t\t\t\t\tcontent.copy(outBuffer, dindex); // write data blocks\r\n\t\t\t\t\t\t\t\tdindex += content.length;\r\n\t\t\t\t\t\t\t});\r\n\t\t\t\t\t\t\tentryHeaders.forEach(function (content) {\r\n\t\t\t\t\t\t\t\tcontent.copy(outBuffer, dindex); // write central directory entries\r\n\t\t\t\t\t\t\t\tdindex += content.length;\r\n\t\t\t\t\t\t\t});\r\n\r\n\t\t\t\t\t\t\tvar mh = mainHeader.toBinary();\r\n\t\t\t\t\t\t\tif (_comment) {\r\n\t\t\t\t\t\t\t\t_comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment\r\n\t\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\t\tmh.copy(outBuffer, dindex); // write main header\r\n\r\n\t\t\t\t\t\t\tonSuccess(outBuffer);\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t});\r\n\t\t\t\t}\r\n\t\t\t};\r\n\r\n\t\t\tcompress(entryList);\r\n\t\t}\r\n\t}\r\n};\r\n","var register = require('./lib/register')\nvar addHook = require('./lib/add')\nvar removeHook = require('./lib/remove')\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind\nvar bindable = bind.bind(bind)\n\nfunction bindApi (hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])\n hook.api = { remove: removeHookRef }\n hook.remove = removeHookRef\n\n ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind]\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)\n })\n}\n\nfunction HookSingular () {\n var singularHookName = 'h'\n var singularHookState = {\n registry: {}\n }\n var singularHook = register.bind(null, singularHookState, singularHookName)\n bindApi(singularHook, singularHookState, singularHookName)\n return singularHook\n}\n\nfunction HookCollection () {\n var state = {\n registry: {}\n }\n\n var hook = register.bind(null, state)\n bindApi(hook, state)\n\n return hook\n}\n\nvar collectionHookDeprecationMessageDisplayed = false\nfunction Hook () {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn('[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4')\n collectionHookDeprecationMessageDisplayed = true\n }\n return HookCollection()\n}\n\nHook.Singular = HookSingular.bind()\nHook.Collection = HookCollection.bind()\n\nmodule.exports = Hook\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook\nmodule.exports.Singular = Hook.Singular\nmodule.exports.Collection = Hook.Collection\n","module.exports = addHook\n\nfunction addHook (state, kind, name, hook) {\n var orig = hook\n if (!state.registry[name]) {\n state.registry[name] = []\n }\n\n if (kind === 'before') {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options))\n }\n }\n\n if (kind === 'after') {\n hook = function (method, options) {\n var result\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_\n return orig(result, options)\n })\n .then(function () {\n return result\n })\n }\n }\n\n if (kind === 'error') {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options)\n })\n }\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig\n })\n}\n","module.exports = register\n\nfunction register (state, name, method, options) {\n if (typeof method !== 'function') {\n throw new Error('method for before hook must be a function')\n }\n\n if (!options) {\n options = {}\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options)\n }, method)()\n }\n\n return Promise.resolve()\n .then(function () {\n if (!state.registry[name]) {\n return method(options)\n }\n\n return (state.registry[name]).reduce(function (method, registered) {\n return registered.hook.bind(null, method, options)\n }, method)()\n })\n}\n","module.exports = removeHook\n\nfunction removeHook (state, name, method) {\n if (!state.registry[name]) {\n return\n }\n\n var index = state.registry[name]\n .map(function (registered) { return registered.orig })\n .indexOf(method)\n\n if (index === -1) {\n return\n }\n\n state.registry[name].splice(index, 1)\n}\n","'use strict';\nconst {\n\tV4MAPPED,\n\tADDRCONFIG,\n\tALL,\n\tpromises: {\n\t\tResolver: AsyncResolver\n\t},\n\tlookup: dnsLookup\n} = require('dns');\nconst {promisify} = require('util');\nconst os = require('os');\n\nconst kCacheableLookupCreateConnection = Symbol('cacheableLookupCreateConnection');\nconst kCacheableLookupInstance = Symbol('cacheableLookupInstance');\nconst kExpires = Symbol('expires');\n\nconst supportsALL = typeof ALL === 'number';\n\nconst verifyAgent = agent => {\n\tif (!(agent && typeof agent.createConnection === 'function')) {\n\t\tthrow new Error('Expected an Agent instance as the first argument');\n\t}\n};\n\nconst map4to6 = entries => {\n\tfor (const entry of entries) {\n\t\tif (entry.family === 6) {\n\t\t\tcontinue;\n\t\t}\n\n\t\tentry.address = `::ffff:${entry.address}`;\n\t\tentry.family = 6;\n\t}\n};\n\nconst getIfaceInfo = () => {\n\tlet has4 = false;\n\tlet has6 = false;\n\n\tfor (const device of Object.values(os.networkInterfaces())) {\n\t\tfor (const iface of device) {\n\t\t\tif (iface.internal) {\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tif (iface.family === 'IPv6') {\n\t\t\t\thas6 = true;\n\t\t\t} else {\n\t\t\t\thas4 = true;\n\t\t\t}\n\n\t\t\tif (has4 && has6) {\n\t\t\t\treturn {has4, has6};\n\t\t\t}\n\t\t}\n\t}\n\n\treturn {has4, has6};\n};\n\nconst isIterable = map => {\n\treturn Symbol.iterator in map;\n};\n\nconst ttl = {ttl: true};\nconst all = {all: true};\n\nclass CacheableLookup {\n\tconstructor({\n\t\tcache = new Map(),\n\t\tmaxTtl = Infinity,\n\t\tfallbackDuration = 3600,\n\t\terrorTtl = 0.15,\n\t\tresolver = new AsyncResolver(),\n\t\tlookup = dnsLookup\n\t} = {}) {\n\t\tthis.maxTtl = maxTtl;\n\t\tthis.errorTtl = errorTtl;\n\n\t\tthis._cache = cache;\n\t\tthis._resolver = resolver;\n\t\tthis._dnsLookup = promisify(lookup);\n\n\t\tif (this._resolver instanceof AsyncResolver) {\n\t\t\tthis._resolve4 = this._resolver.resolve4.bind(this._resolver);\n\t\t\tthis._resolve6 = this._resolver.resolve6.bind(this._resolver);\n\t\t} else {\n\t\t\tthis._resolve4 = promisify(this._resolver.resolve4.bind(this._resolver));\n\t\t\tthis._resolve6 = promisify(this._resolver.resolve6.bind(this._resolver));\n\t\t}\n\n\t\tthis._iface = getIfaceInfo();\n\n\t\tthis._pending = {};\n\t\tthis._nextRemovalTime = false;\n\t\tthis._hostnamesToFallback = new Set();\n\n\t\tif (fallbackDuration < 1) {\n\t\t\tthis._fallback = false;\n\t\t} else {\n\t\t\tthis._fallback = true;\n\n\t\t\tconst interval = setInterval(() => {\n\t\t\t\tthis._hostnamesToFallback.clear();\n\t\t\t}, fallbackDuration * 1000);\n\n\t\t\t/* istanbul ignore next: There is no `interval.unref()` when running inside an Electron renderer */\n\t\t\tif (interval.unref) {\n\t\t\t\tinterval.unref();\n\t\t\t}\n\t\t}\n\n\t\tthis.lookup = this.lookup.bind(this);\n\t\tthis.lookupAsync = this.lookupAsync.bind(this);\n\t}\n\n\tset servers(servers) {\n\t\tthis.clear();\n\n\t\tthis._resolver.setServers(servers);\n\t}\n\n\tget servers() {\n\t\treturn this._resolver.getServers();\n\t}\n\n\tlookup(hostname, options, callback) {\n\t\tif (typeof options === 'function') {\n\t\t\tcallback = options;\n\t\t\toptions = {};\n\t\t} else if (typeof options === 'number') {\n\t\t\toptions = {\n\t\t\t\tfamily: options\n\t\t\t};\n\t\t}\n\n\t\tif (!callback) {\n\t\t\tthrow new Error('Callback must be a function.');\n\t\t}\n\n\t\t// eslint-disable-next-line promise/prefer-await-to-then\n\t\tthis.lookupAsync(hostname, options).then(result => {\n\t\t\tif (options.all) {\n\t\t\t\tcallback(null, result);\n\t\t\t} else {\n\t\t\t\tcallback(null, result.address, result.family, result.expires, result.ttl);\n\t\t\t}\n\t\t}, callback);\n\t}\n\n\tasync lookupAsync(hostname, options = {}) {\n\t\tif (typeof options === 'number') {\n\t\t\toptions = {\n\t\t\t\tfamily: options\n\t\t\t};\n\t\t}\n\n\t\tlet cached = await this.query(hostname);\n\n\t\tif (options.family === 6) {\n\t\t\tconst filtered = cached.filter(entry => entry.family === 6);\n\n\t\t\tif (options.hints & V4MAPPED) {\n\t\t\t\tif ((supportsALL && options.hints & ALL) || filtered.length === 0) {\n\t\t\t\t\tmap4to6(cached);\n\t\t\t\t} else {\n\t\t\t\t\tcached = filtered;\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tcached = filtered;\n\t\t\t}\n\t\t} else if (options.family === 4) {\n\t\t\tcached = cached.filter(entry => entry.family === 4);\n\t\t}\n\n\t\tif (options.hints & ADDRCONFIG) {\n\t\t\tconst {_iface} = this;\n\t\t\tcached = cached.filter(entry => entry.family === 6 ? _iface.has6 : _iface.has4);\n\t\t}\n\n\t\tif (cached.length === 0) {\n\t\t\tconst error = new Error(`cacheableLookup ENOTFOUND ${hostname}`);\n\t\t\terror.code = 'ENOTFOUND';\n\t\t\terror.hostname = hostname;\n\n\t\t\tthrow error;\n\t\t}\n\n\t\tif (options.all) {\n\t\t\treturn cached;\n\t\t}\n\n\t\treturn cached[0];\n\t}\n\n\tasync query(hostname) {\n\t\tlet cached = await this._cache.get(hostname);\n\n\t\tif (!cached) {\n\t\t\tconst pending = this._pending[hostname];\n\n\t\t\tif (pending) {\n\t\t\t\tcached = await pending;\n\t\t\t} else {\n\t\t\t\tconst newPromise = this.queryAndCache(hostname);\n\t\t\t\tthis._pending[hostname] = newPromise;\n\n\t\t\t\ttry {\n\t\t\t\t\tcached = await newPromise;\n\t\t\t\t} finally {\n\t\t\t\t\tdelete this._pending[hostname];\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tcached = cached.map(entry => {\n\t\t\treturn {...entry};\n\t\t});\n\n\t\treturn cached;\n\t}\n\n\tasync _resolve(hostname) {\n\t\tconst wrap = async promise => {\n\t\t\ttry {\n\t\t\t\treturn await promise;\n\t\t\t} catch (error) {\n\t\t\t\tif (error.code === 'ENODATA' || error.code === 'ENOTFOUND') {\n\t\t\t\t\treturn [];\n\t\t\t\t}\n\n\t\t\t\tthrow error;\n\t\t\t}\n\t\t};\n\n\t\t// ANY is unsafe as it doesn't trigger new queries in the underlying server.\n\t\tconst [A, AAAA] = await Promise.all([\n\t\t\tthis._resolve4(hostname, ttl),\n\t\t\tthis._resolve6(hostname, ttl)\n\t\t].map(promise => wrap(promise)));\n\n\t\tlet aTtl = 0;\n\t\tlet aaaaTtl = 0;\n\t\tlet cacheTtl = 0;\n\n\t\tconst now = Date.now();\n\n\t\tfor (const entry of A) {\n\t\t\tentry.family = 4;\n\t\t\tentry.expires = now + (entry.ttl * 1000);\n\n\t\t\taTtl = Math.max(aTtl, entry.ttl);\n\t\t}\n\n\t\tfor (const entry of AAAA) {\n\t\t\tentry.family = 6;\n\t\t\tentry.expires = now + (entry.ttl * 1000);\n\n\t\t\taaaaTtl = Math.max(aaaaTtl, entry.ttl);\n\t\t}\n\n\t\tif (A.length > 0) {\n\t\t\tif (AAAA.length > 0) {\n\t\t\t\tcacheTtl = Math.min(aTtl, aaaaTtl);\n\t\t\t} else {\n\t\t\t\tcacheTtl = aTtl;\n\t\t\t}\n\t\t} else {\n\t\t\tcacheTtl = aaaaTtl;\n\t\t}\n\n\t\treturn {\n\t\t\tentries: [\n\t\t\t\t...A,\n\t\t\t\t...AAAA\n\t\t\t],\n\t\t\tcacheTtl\n\t\t};\n\t}\n\n\tasync _lookup(hostname) {\n\t\ttry {\n\t\t\tconst entries = await this._dnsLookup(hostname, {\n\t\t\t\tall: true\n\t\t\t});\n\n\t\t\treturn {\n\t\t\t\tentries,\n\t\t\t\tcacheTtl: 0\n\t\t\t};\n\t\t} catch (_) {\n\t\t\treturn {\n\t\t\t\tentries: [],\n\t\t\t\tcacheTtl: 0\n\t\t\t};\n\t\t}\n\t}\n\n\tasync _set(hostname, data, cacheTtl) {\n\t\tif (this.maxTtl > 0 && cacheTtl > 0) {\n\t\t\tcacheTtl = Math.min(cacheTtl, this.maxTtl) * 1000;\n\t\t\tdata[kExpires] = Date.now() + cacheTtl;\n\n\t\t\ttry {\n\t\t\t\tawait this._cache.set(hostname, data, cacheTtl);\n\t\t\t} catch (error) {\n\t\t\t\tthis.lookupAsync = async () => {\n\t\t\t\t\tconst cacheError = new Error('Cache Error. Please recreate the CacheableLookup instance.');\n\t\t\t\t\tcacheError.cause = error;\n\n\t\t\t\t\tthrow cacheError;\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tif (isIterable(this._cache)) {\n\t\t\t\tthis._tick(cacheTtl);\n\t\t\t}\n\t\t}\n\t}\n\n\tasync queryAndCache(hostname) {\n\t\tif (this._hostnamesToFallback.has(hostname)) {\n\t\t\treturn this._dnsLookup(hostname, all);\n\t\t}\n\n\t\tlet query = await this._resolve(hostname);\n\n\t\tif (query.entries.length === 0 && this._fallback) {\n\t\t\tquery = await this._lookup(hostname);\n\n\t\t\tif (query.entries.length !== 0) {\n\t\t\t\t// Use `dns.lookup(...)` for that particular hostname\n\t\t\t\tthis._hostnamesToFallback.add(hostname);\n\t\t\t}\n\t\t}\n\n\t\tconst cacheTtl = query.entries.length === 0 ? this.errorTtl : query.cacheTtl;\n\t\tawait this._set(hostname, query.entries, cacheTtl);\n\n\t\treturn query.entries;\n\t}\n\n\t_tick(ms) {\n\t\tconst nextRemovalTime = this._nextRemovalTime;\n\n\t\tif (!nextRemovalTime || ms < nextRemovalTime) {\n\t\t\tclearTimeout(this._removalTimeout);\n\n\t\t\tthis._nextRemovalTime = ms;\n\n\t\t\tthis._removalTimeout = setTimeout(() => {\n\t\t\t\tthis._nextRemovalTime = false;\n\n\t\t\t\tlet nextExpiry = Infinity;\n\n\t\t\t\tconst now = Date.now();\n\n\t\t\t\tfor (const [hostname, entries] of this._cache) {\n\t\t\t\t\tconst expires = entries[kExpires];\n\n\t\t\t\t\tif (now >= expires) {\n\t\t\t\t\t\tthis._cache.delete(hostname);\n\t\t\t\t\t} else if (expires < nextExpiry) {\n\t\t\t\t\t\tnextExpiry = expires;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif (nextExpiry !== Infinity) {\n\t\t\t\t\tthis._tick(nextExpiry - now);\n\t\t\t\t}\n\t\t\t}, ms);\n\n\t\t\t/* istanbul ignore next: There is no `timeout.unref()` when running inside an Electron renderer */\n\t\t\tif (this._removalTimeout.unref) {\n\t\t\t\tthis._removalTimeout.unref();\n\t\t\t}\n\t\t}\n\t}\n\n\tinstall(agent) {\n\t\tverifyAgent(agent);\n\n\t\tif (kCacheableLookupCreateConnection in agent) {\n\t\t\tthrow new Error('CacheableLookup has been already installed');\n\t\t}\n\n\t\tagent[kCacheableLookupCreateConnection] = agent.createConnection;\n\t\tagent[kCacheableLookupInstance] = this;\n\n\t\tagent.createConnection = (options, callback) => {\n\t\t\tif (!('lookup' in options)) {\n\t\t\t\toptions.lookup = this.lookup;\n\t\t\t}\n\n\t\t\treturn agent[kCacheableLookupCreateConnection](options, callback);\n\t\t};\n\t}\n\n\tuninstall(agent) {\n\t\tverifyAgent(agent);\n\n\t\tif (agent[kCacheableLookupCreateConnection]) {\n\t\t\tif (agent[kCacheableLookupInstance] !== this) {\n\t\t\t\tthrow new Error('The agent is not owned by this CacheableLookup instance');\n\t\t\t}\n\n\t\t\tagent.createConnection = agent[kCacheableLookupCreateConnection];\n\n\t\t\tdelete agent[kCacheableLookupCreateConnection];\n\t\t\tdelete agent[kCacheableLookupInstance];\n\t\t}\n\t}\n\n\tupdateInterfaceInfo() {\n\t\tconst {_iface} = this;\n\n\t\tthis._iface = getIfaceInfo();\n\n\t\tif ((_iface.has4 && !this._iface.has4) || (_iface.has6 && !this._iface.has6)) {\n\t\t\tthis._cache.clear();\n\t\t}\n\t}\n\n\tclear(hostname) {\n\t\tif (hostname) {\n\t\t\tthis._cache.delete(hostname);\n\t\t\treturn;\n\t\t}\n\n\t\tthis._cache.clear();\n\t}\n}\n\nmodule.exports = CacheableLookup;\nmodule.exports.default = CacheableLookup;\n","'use strict';\nconst {PassThrough: PassThroughStream} = require('stream');\n\nmodule.exports = options => {\n\toptions = {...options};\n\n\tconst {array} = options;\n\tlet {encoding} = options;\n\tconst isBuffer = encoding === 'buffer';\n\tlet objectMode = false;\n\n\tif (array) {\n\t\tobjectMode = !(encoding || isBuffer);\n\t} else {\n\t\tencoding = encoding || 'utf8';\n\t}\n\n\tif (isBuffer) {\n\t\tencoding = null;\n\t}\n\n\tconst stream = new PassThroughStream({objectMode});\n\n\tif (encoding) {\n\t\tstream.setEncoding(encoding);\n\t}\n\n\tlet length = 0;\n\tconst chunks = [];\n\n\tstream.on('data', chunk => {\n\t\tchunks.push(chunk);\n\n\t\tif (objectMode) {\n\t\t\tlength = chunks.length;\n\t\t} else {\n\t\t\tlength += chunk.length;\n\t\t}\n\t});\n\n\tstream.getBufferedValue = () => {\n\t\tif (array) {\n\t\t\treturn chunks;\n\t\t}\n\n\t\treturn isBuffer ? Buffer.concat(chunks, length) : chunks.join('');\n\t};\n\n\tstream.getBufferedLength = () => length;\n\n\treturn stream;\n};\n","'use strict';\nconst {constants: BufferConstants} = require('buffer');\nconst pump = require('pump');\nconst bufferStream = require('./buffer-stream');\n\nclass MaxBufferError extends Error {\n\tconstructor() {\n\t\tsuper('maxBuffer exceeded');\n\t\tthis.name = 'MaxBufferError';\n\t}\n}\n\nasync function getStream(inputStream, options) {\n\tif (!inputStream) {\n\t\treturn Promise.reject(new Error('Expected a stream'));\n\t}\n\n\toptions = {\n\t\tmaxBuffer: Infinity,\n\t\t...options\n\t};\n\n\tconst {maxBuffer} = options;\n\n\tlet stream;\n\tawait new Promise((resolve, reject) => {\n\t\tconst rejectPromise = error => {\n\t\t\t// Don't retrieve an oversized buffer.\n\t\t\tif (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) {\n\t\t\t\terror.bufferedData = stream.getBufferedValue();\n\t\t\t}\n\n\t\t\treject(error);\n\t\t};\n\n\t\tstream = pump(inputStream, bufferStream(options), error => {\n\t\t\tif (error) {\n\t\t\t\trejectPromise(error);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tresolve();\n\t\t});\n\n\t\tstream.on('data', () => {\n\t\t\tif (stream.getBufferedLength() > maxBuffer) {\n\t\t\t\trejectPromise(new MaxBufferError());\n\t\t\t}\n\t\t});\n\t});\n\n\treturn stream.getBufferedValue();\n}\n\nmodule.exports = getStream;\n// TODO: Remove this for the next major release\nmodule.exports.default = getStream;\nmodule.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'});\nmodule.exports.array = (stream, options) => getStream(stream, {...options, array: true});\nmodule.exports.MaxBufferError = MaxBufferError;\n","'use strict';\n\nconst EventEmitter = require('events');\nconst urlLib = require('url');\nconst normalizeUrl = require('normalize-url');\nconst getStream = require('get-stream');\nconst CachePolicy = require('http-cache-semantics');\nconst Response = require('responselike');\nconst lowercaseKeys = require('lowercase-keys');\nconst cloneResponse = require('clone-response');\nconst Keyv = require('keyv');\n\nclass CacheableRequest {\n\tconstructor(request, cacheAdapter) {\n\t\tif (typeof request !== 'function') {\n\t\t\tthrow new TypeError('Parameter `request` must be a function');\n\t\t}\n\n\t\tthis.cache = new Keyv({\n\t\t\turi: typeof cacheAdapter === 'string' && cacheAdapter,\n\t\t\tstore: typeof cacheAdapter !== 'string' && cacheAdapter,\n\t\t\tnamespace: 'cacheable-request'\n\t\t});\n\n\t\treturn this.createCacheableRequest(request);\n\t}\n\n\tcreateCacheableRequest(request) {\n\t\treturn (opts, cb) => {\n\t\t\tlet url;\n\t\t\tif (typeof opts === 'string') {\n\t\t\t\turl = normalizeUrlObject(urlLib.parse(opts));\n\t\t\t\topts = {};\n\t\t\t} else if (opts instanceof urlLib.URL) {\n\t\t\t\turl = normalizeUrlObject(urlLib.parse(opts.toString()));\n\t\t\t\topts = {};\n\t\t\t} else {\n\t\t\t\tconst [pathname, ...searchParts] = (opts.path || '').split('?');\n\t\t\t\tconst search = searchParts.length > 0 ?\n\t\t\t\t\t`?${searchParts.join('?')}` :\n\t\t\t\t\t'';\n\t\t\t\turl = normalizeUrlObject({ ...opts, pathname, search });\n\t\t\t}\n\n\t\t\topts = {\n\t\t\t\theaders: {},\n\t\t\t\tmethod: 'GET',\n\t\t\t\tcache: true,\n\t\t\t\tstrictTtl: false,\n\t\t\t\tautomaticFailover: false,\n\t\t\t\t...opts,\n\t\t\t\t...urlObjectToRequestOptions(url)\n\t\t\t};\n\t\t\topts.headers = lowercaseKeys(opts.headers);\n\n\t\t\tconst ee = new EventEmitter();\n\t\t\tconst normalizedUrlString = normalizeUrl(\n\t\t\t\turlLib.format(url),\n\t\t\t\t{\n\t\t\t\t\tstripWWW: false,\n\t\t\t\t\tremoveTrailingSlash: false,\n\t\t\t\t\tstripAuthentication: false\n\t\t\t\t}\n\t\t\t);\n\t\t\tconst key = `${opts.method}:${normalizedUrlString}`;\n\t\t\tlet revalidate = false;\n\t\t\tlet madeRequest = false;\n\n\t\t\tconst makeRequest = opts => {\n\t\t\t\tmadeRequest = true;\n\t\t\t\tlet requestErrored = false;\n\t\t\t\tlet requestErrorCallback;\n\n\t\t\t\tconst requestErrorPromise = new Promise(resolve => {\n\t\t\t\t\trequestErrorCallback = () => {\n\t\t\t\t\t\tif (!requestErrored) {\n\t\t\t\t\t\t\trequestErrored = true;\n\t\t\t\t\t\t\tresolve();\n\t\t\t\t\t\t}\n\t\t\t\t\t};\n\t\t\t\t});\n\n\t\t\t\tconst handler = response => {\n\t\t\t\t\tif (revalidate && !opts.forceRefresh) {\n\t\t\t\t\t\tresponse.status = response.statusCode;\n\t\t\t\t\t\tconst revalidatedPolicy = CachePolicy.fromObject(revalidate.cachePolicy).revalidatedPolicy(opts, response);\n\t\t\t\t\t\tif (!revalidatedPolicy.modified) {\n\t\t\t\t\t\t\tconst headers = revalidatedPolicy.policy.responseHeaders();\n\t\t\t\t\t\t\tresponse = new Response(revalidate.statusCode, headers, revalidate.body, revalidate.url);\n\t\t\t\t\t\t\tresponse.cachePolicy = revalidatedPolicy.policy;\n\t\t\t\t\t\t\tresponse.fromCache = true;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif (!response.fromCache) {\n\t\t\t\t\t\tresponse.cachePolicy = new CachePolicy(opts, response, opts);\n\t\t\t\t\t\tresponse.fromCache = false;\n\t\t\t\t\t}\n\n\t\t\t\t\tlet clonedResponse;\n\t\t\t\t\tif (opts.cache && response.cachePolicy.storable()) {\n\t\t\t\t\t\tclonedResponse = cloneResponse(response);\n\n\t\t\t\t\t\t(async () => {\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\tconst bodyPromise = getStream.buffer(response);\n\n\t\t\t\t\t\t\t\tawait Promise.race([\n\t\t\t\t\t\t\t\t\trequestErrorPromise,\n\t\t\t\t\t\t\t\t\tnew Promise(resolve => response.once('end', resolve))\n\t\t\t\t\t\t\t\t]);\n\n\t\t\t\t\t\t\t\tif (requestErrored) {\n\t\t\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\tconst body = await bodyPromise;\n\n\t\t\t\t\t\t\t\tconst value = {\n\t\t\t\t\t\t\t\t\tcachePolicy: response.cachePolicy.toObject(),\n\t\t\t\t\t\t\t\t\turl: response.url,\n\t\t\t\t\t\t\t\t\tstatusCode: response.fromCache ? revalidate.statusCode : response.statusCode,\n\t\t\t\t\t\t\t\t\tbody\n\t\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\t\tlet ttl = opts.strictTtl ? response.cachePolicy.timeToLive() : undefined;\n\t\t\t\t\t\t\t\tif (opts.maxTtl) {\n\t\t\t\t\t\t\t\t\tttl = ttl ? Math.min(ttl, opts.maxTtl) : opts.maxTtl;\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\tawait this.cache.set(key, value, ttl);\n\t\t\t\t\t\t\t} catch (error) {\n\t\t\t\t\t\t\t\tee.emit('error', new CacheableRequest.CacheError(error));\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})();\n\t\t\t\t\t} else if (opts.cache && revalidate) {\n\t\t\t\t\t\t(async () => {\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\tawait this.cache.delete(key);\n\t\t\t\t\t\t\t} catch (error) {\n\t\t\t\t\t\t\t\tee.emit('error', new CacheableRequest.CacheError(error));\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})();\n\t\t\t\t\t}\n\n\t\t\t\t\tee.emit('response', clonedResponse || response);\n\t\t\t\t\tif (typeof cb === 'function') {\n\t\t\t\t\t\tcb(clonedResponse || response);\n\t\t\t\t\t}\n\t\t\t\t};\n\n\t\t\t\ttry {\n\t\t\t\t\tconst req = request(opts, handler);\n\t\t\t\t\treq.once('error', requestErrorCallback);\n\t\t\t\t\treq.once('abort', requestErrorCallback);\n\t\t\t\t\tee.emit('request', req);\n\t\t\t\t} catch (error) {\n\t\t\t\t\tee.emit('error', new CacheableRequest.RequestError(error));\n\t\t\t\t}\n\t\t\t};\n\n\t\t\t(async () => {\n\t\t\t\tconst get = async opts => {\n\t\t\t\t\tawait Promise.resolve();\n\n\t\t\t\t\tconst cacheEntry = opts.cache ? await this.cache.get(key) : undefined;\n\t\t\t\t\tif (typeof cacheEntry === 'undefined') {\n\t\t\t\t\t\treturn makeRequest(opts);\n\t\t\t\t\t}\n\n\t\t\t\t\tconst policy = CachePolicy.fromObject(cacheEntry.cachePolicy);\n\t\t\t\t\tif (policy.satisfiesWithoutRevalidation(opts) && !opts.forceRefresh) {\n\t\t\t\t\t\tconst headers = policy.responseHeaders();\n\t\t\t\t\t\tconst response = new Response(cacheEntry.statusCode, headers, cacheEntry.body, cacheEntry.url);\n\t\t\t\t\t\tresponse.cachePolicy = policy;\n\t\t\t\t\t\tresponse.fromCache = true;\n\n\t\t\t\t\t\tee.emit('response', response);\n\t\t\t\t\t\tif (typeof cb === 'function') {\n\t\t\t\t\t\t\tcb(response);\n\t\t\t\t\t\t}\n\t\t\t\t\t} else {\n\t\t\t\t\t\trevalidate = cacheEntry;\n\t\t\t\t\t\topts.headers = policy.revalidationHeaders(opts);\n\t\t\t\t\t\tmakeRequest(opts);\n\t\t\t\t\t}\n\t\t\t\t};\n\n\t\t\t\tconst errorHandler = error => ee.emit('error', new CacheableRequest.CacheError(error));\n\t\t\t\tthis.cache.once('error', errorHandler);\n\t\t\t\tee.on('response', () => this.cache.removeListener('error', errorHandler));\n\n\t\t\t\ttry {\n\t\t\t\t\tawait get(opts);\n\t\t\t\t} catch (error) {\n\t\t\t\t\tif (opts.automaticFailover && !madeRequest) {\n\t\t\t\t\t\tmakeRequest(opts);\n\t\t\t\t\t}\n\n\t\t\t\t\tee.emit('error', new CacheableRequest.CacheError(error));\n\t\t\t\t}\n\t\t\t})();\n\n\t\t\treturn ee;\n\t\t};\n\t}\n}\n\nfunction urlObjectToRequestOptions(url) {\n\tconst options = { ...url };\n\toptions.path = `${url.pathname || '/'}${url.search || ''}`;\n\tdelete options.pathname;\n\tdelete options.search;\n\treturn options;\n}\n\nfunction normalizeUrlObject(url) {\n\t// If url was parsed by url.parse or new URL:\n\t// - hostname will be set\n\t// - host will be hostname[:port]\n\t// - port will be set if it was explicit in the parsed string\n\t// Otherwise, url was from request options:\n\t// - hostname or host may be set\n\t// - host shall not have port encoded\n\treturn {\n\t\tprotocol: url.protocol,\n\t\tauth: url.auth,\n\t\thostname: url.hostname || url.host || 'localhost',\n\t\tport: url.port,\n\t\tpathname: url.pathname,\n\t\tsearch: url.search\n\t};\n}\n\nCacheableRequest.RequestError = class extends Error {\n\tconstructor(error) {\n\t\tsuper(error.message);\n\t\tthis.name = 'RequestError';\n\t\tObject.assign(this, error);\n\t}\n};\n\nCacheableRequest.CacheError = class extends Error {\n\tconstructor(error) {\n\t\tsuper(error.message);\n\t\tthis.name = 'CacheError';\n\t\tObject.assign(this, error);\n\t}\n};\n\nmodule.exports = CacheableRequest;\n","'use strict';\n\nconst PassThrough = require('stream').PassThrough;\nconst mimicResponse = require('mimic-response');\n\nconst cloneResponse = response => {\n\tif (!(response && response.pipe)) {\n\t\tthrow new TypeError('Parameter `response` must be a response stream.');\n\t}\n\n\tconst clone = new PassThrough();\n\tmimicResponse(response, clone);\n\n\treturn response.pipe(clone);\n};\n\nmodule.exports = cloneResponse;\n","'use strict';\nconst {Transform, PassThrough} = require('stream');\nconst zlib = require('zlib');\nconst mimicResponse = require('mimic-response');\n\nmodule.exports = response => {\n\tconst contentEncoding = (response.headers['content-encoding'] || '').toLowerCase();\n\n\tif (!['gzip', 'deflate', 'br'].includes(contentEncoding)) {\n\t\treturn response;\n\t}\n\n\t// TODO: Remove this when targeting Node.js 12.\n\tconst isBrotli = contentEncoding === 'br';\n\tif (isBrotli && typeof zlib.createBrotliDecompress !== 'function') {\n\t\tresponse.destroy(new Error('Brotli is not supported on Node.js < 12'));\n\t\treturn response;\n\t}\n\n\tlet isEmpty = true;\n\n\tconst checker = new Transform({\n\t\ttransform(data, _encoding, callback) {\n\t\t\tisEmpty = false;\n\n\t\t\tcallback(null, data);\n\t\t},\n\n\t\tflush(callback) {\n\t\t\tcallback();\n\t\t}\n\t});\n\n\tconst finalStream = new PassThrough({\n\t\tautoDestroy: false,\n\t\tdestroy(error, callback) {\n\t\t\tresponse.destroy();\n\n\t\t\tcallback(error);\n\t\t}\n\t});\n\n\tconst decompressStream = isBrotli ? zlib.createBrotliDecompress() : zlib.createUnzip();\n\n\tdecompressStream.once('error', error => {\n\t\tif (isEmpty && !response.readable) {\n\t\t\tfinalStream.end();\n\t\t\treturn;\n\t\t}\n\n\t\tfinalStream.destroy(error);\n\t});\n\n\tmimicResponse(response, finalStream);\n\tresponse.pipe(checker).pipe(decompressStream).pipe(finalStream);\n\n\treturn finalStream;\n};\n","'use strict';\n\n// We define these manually to ensure they're always copied\n// even if they would move up the prototype chain\n// https://nodejs.org/api/http.html#http_class_http_incomingmessage\nconst knownProperties = [\n\t'aborted',\n\t'complete',\n\t'headers',\n\t'httpVersion',\n\t'httpVersionMinor',\n\t'httpVersionMajor',\n\t'method',\n\t'rawHeaders',\n\t'rawTrailers',\n\t'setTimeout',\n\t'socket',\n\t'statusCode',\n\t'statusMessage',\n\t'trailers',\n\t'url'\n];\n\nmodule.exports = (fromStream, toStream) => {\n\tif (toStream._readableState.autoDestroy) {\n\t\tthrow new Error('The second stream must have the `autoDestroy` option set to `false`');\n\t}\n\n\tconst fromProperties = new Set(Object.keys(fromStream).concat(knownProperties));\n\n\tconst properties = {};\n\n\tfor (const property of fromProperties) {\n\t\t// Don't overwrite existing properties.\n\t\tif (property in toStream) {\n\t\t\tcontinue;\n\t\t}\n\n\t\tproperties[property] = {\n\t\t\tget() {\n\t\t\t\tconst value = fromStream[property];\n\t\t\t\tconst isFunction = typeof value === 'function';\n\n\t\t\t\treturn isFunction ? value.bind(fromStream) : value;\n\t\t\t},\n\t\t\tset(value) {\n\t\t\t\tfromStream[property] = value;\n\t\t\t},\n\t\t\tenumerable: true,\n\t\t\tconfigurable: false\n\t\t};\n\t}\n\n\tObject.defineProperties(toStream, properties);\n\n\tfromStream.once('aborted', () => {\n\t\ttoStream.destroy();\n\n\t\ttoStream.emit('aborted');\n\t});\n\n\tfromStream.once('close', () => {\n\t\tif (fromStream.complete) {\n\t\t\tif (toStream.readable) {\n\t\t\t\ttoStream.once('end', () => {\n\t\t\t\t\ttoStream.emit('close');\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\ttoStream.emit('close');\n\t\t\t}\n\t\t} else {\n\t\t\ttoStream.emit('close');\n\t\t}\n\t});\n\n\treturn toStream;\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst tls_1 = require(\"tls\");\nconst deferToConnect = (socket, fn) => {\n let listeners;\n if (typeof fn === 'function') {\n const connect = fn;\n listeners = { connect };\n }\n else {\n listeners = fn;\n }\n const hasConnectListener = typeof listeners.connect === 'function';\n const hasSecureConnectListener = typeof listeners.secureConnect === 'function';\n const hasCloseListener = typeof listeners.close === 'function';\n const onConnect = () => {\n if (hasConnectListener) {\n listeners.connect();\n }\n if (socket instanceof tls_1.TLSSocket && hasSecureConnectListener) {\n if (socket.authorized) {\n listeners.secureConnect();\n }\n else if (!socket.authorizationError) {\n socket.once('secureConnect', listeners.secureConnect);\n }\n }\n if (hasCloseListener) {\n socket.once('close', listeners.close);\n }\n };\n if (socket.writable && !socket.connecting) {\n onConnect();\n }\n else if (socket.connecting) {\n socket.once('connect', onConnect);\n }\n else if (socket.destroyed && hasCloseListener) {\n listeners.close(socket._hadError);\n }\n};\nexports.default = deferToConnect;\n// For CommonJS default export support\nmodule.exports = deferToConnect;\nmodule.exports.default = deferToConnect;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","var once = require('once');\n\nvar noop = function() {};\n\nvar isRequest = function(stream) {\n\treturn stream.setHeader && typeof stream.abort === 'function';\n};\n\nvar isChildProcess = function(stream) {\n\treturn stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3\n};\n\nvar eos = function(stream, opts, callback) {\n\tif (typeof opts === 'function') return eos(stream, null, opts);\n\tif (!opts) opts = {};\n\n\tcallback = once(callback || noop);\n\n\tvar ws = stream._writableState;\n\tvar rs = stream._readableState;\n\tvar readable = opts.readable || (opts.readable !== false && stream.readable);\n\tvar writable = opts.writable || (opts.writable !== false && stream.writable);\n\tvar cancelled = false;\n\n\tvar onlegacyfinish = function() {\n\t\tif (!stream.writable) onfinish();\n\t};\n\n\tvar onfinish = function() {\n\t\twritable = false;\n\t\tif (!readable) callback.call(stream);\n\t};\n\n\tvar onend = function() {\n\t\treadable = false;\n\t\tif (!writable) callback.call(stream);\n\t};\n\n\tvar onexit = function(exitCode) {\n\t\tcallback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);\n\t};\n\n\tvar onerror = function(err) {\n\t\tcallback.call(stream, err);\n\t};\n\n\tvar onclose = function() {\n\t\tprocess.nextTick(onclosenexttick);\n\t};\n\n\tvar onclosenexttick = function() {\n\t\tif (cancelled) return;\n\t\tif (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));\n\t\tif (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));\n\t};\n\n\tvar onrequest = function() {\n\t\tstream.req.on('finish', onfinish);\n\t};\n\n\tif (isRequest(stream)) {\n\t\tstream.on('complete', onfinish);\n\t\tstream.on('abort', onclose);\n\t\tif (stream.req) onrequest();\n\t\telse stream.on('request', onrequest);\n\t} else if (writable && !ws) { // legacy streams\n\t\tstream.on('end', onlegacyfinish);\n\t\tstream.on('close', onlegacyfinish);\n\t}\n\n\tif (isChildProcess(stream)) stream.on('exit', onexit);\n\n\tstream.on('end', onend);\n\tstream.on('finish', onfinish);\n\tif (opts.error !== false) stream.on('error', onerror);\n\tstream.on('close', onclose);\n\n\treturn function() {\n\t\tcancelled = true;\n\t\tstream.removeListener('complete', onfinish);\n\t\tstream.removeListener('abort', onclose);\n\t\tstream.removeListener('request', onrequest);\n\t\tif (stream.req) stream.req.removeListener('finish', onfinish);\n\t\tstream.removeListener('end', onlegacyfinish);\n\t\tstream.removeListener('close', onlegacyfinish);\n\t\tstream.removeListener('finish', onfinish);\n\t\tstream.removeListener('exit', onexit);\n\t\tstream.removeListener('end', onend);\n\t\tstream.removeListener('error', onerror);\n\t\tstream.removeListener('close', onclose);\n\t};\n};\n\nmodule.exports = eos;\n","'use strict';\n\nconst stringify = require('./lib/stringify');\nconst compile = require('./lib/compile');\nconst expand = require('./lib/expand');\nconst parse = require('./lib/parse');\n\n/**\n * Expand the given pattern or create a regex-compatible string.\n *\n * ```js\n * const braces = require('braces');\n * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']\n * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']\n * ```\n * @param {String} `str`\n * @param {Object} `options`\n * @return {String}\n * @api public\n */\n\nconst braces = (input, options = {}) => {\n let output = [];\n\n if (Array.isArray(input)) {\n for (let pattern of input) {\n let result = braces.create(pattern, options);\n if (Array.isArray(result)) {\n output.push(...result);\n } else {\n output.push(result);\n }\n }\n } else {\n output = [].concat(braces.create(input, options));\n }\n\n if (options && options.expand === true && options.nodupes === true) {\n output = [...new Set(output)];\n }\n return output;\n};\n\n/**\n * Parse the given `str` with the given `options`.\n *\n * ```js\n * // braces.parse(pattern, [, options]);\n * const ast = braces.parse('a/{b,c}/d');\n * console.log(ast);\n * ```\n * @param {String} pattern Brace pattern to parse\n * @param {Object} options\n * @return {Object} Returns an AST\n * @api public\n */\n\nbraces.parse = (input, options = {}) => parse(input, options);\n\n/**\n * Creates a braces string from an AST, or an AST node.\n *\n * ```js\n * const braces = require('braces');\n * let ast = braces.parse('foo/{a,b}/bar');\n * console.log(stringify(ast.nodes[2])); //=> '{a,b}'\n * ```\n * @param {String} `input` Brace pattern or AST.\n * @param {Object} `options`\n * @return {Array} Returns an array of expanded values.\n * @api public\n */\n\nbraces.stringify = (input, options = {}) => {\n if (typeof input === 'string') {\n return stringify(braces.parse(input, options), options);\n }\n return stringify(input, options);\n};\n\n/**\n * Compiles a brace pattern into a regex-compatible, optimized string.\n * This method is called by the main [braces](#braces) function by default.\n *\n * ```js\n * const braces = require('braces');\n * console.log(braces.compile('a/{b,c}/d'));\n * //=> ['a/(b|c)/d']\n * ```\n * @param {String} `input` Brace pattern or AST.\n * @param {Object} `options`\n * @return {Array} Returns an array of expanded values.\n * @api public\n */\n\nbraces.compile = (input, options = {}) => {\n if (typeof input === 'string') {\n input = braces.parse(input, options);\n }\n return compile(input, options);\n};\n\n/**\n * Expands a brace pattern into an array. This method is called by the\n * main [braces](#braces) function when `options.expand` is true. Before\n * using this method it's recommended that you read the [performance notes](#performance))\n * and advantages of using [.compile](#compile) instead.\n *\n * ```js\n * const braces = require('braces');\n * console.log(braces.expand('a/{b,c}/d'));\n * //=> ['a/b/d', 'a/c/d'];\n * ```\n * @param {String} `pattern` Brace pattern\n * @param {Object} `options`\n * @return {Array} Returns an array of expanded values.\n * @api public\n */\n\nbraces.expand = (input, options = {}) => {\n if (typeof input === 'string') {\n input = braces.parse(input, options);\n }\n\n let result = expand(input, options);\n\n // filter out empty strings if specified\n if (options.noempty === true) {\n result = result.filter(Boolean);\n }\n\n // filter out duplicates if specified\n if (options.nodupes === true) {\n result = [...new Set(result)];\n }\n\n return result;\n};\n\n/**\n * Processes a brace pattern and returns either an expanded array\n * (if `options.expand` is true), a highly optimized regex-compatible string.\n * This method is called by the main [braces](#braces) function.\n *\n * ```js\n * const braces = require('braces');\n * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}'))\n * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'\n * ```\n * @param {String} `pattern` Brace pattern\n * @param {Object} `options`\n * @return {Array} Returns an array of expanded values.\n * @api public\n */\n\nbraces.create = (input, options = {}) => {\n if (input === '' || input.length < 3) {\n return [input];\n }\n\n return options.expand !== true\n ? braces.compile(input, options)\n : braces.expand(input, options);\n};\n\n/**\n * Expose \"braces\"\n */\n\nmodule.exports = braces;\n","'use strict';\n\nconst fill = require('fill-range');\nconst utils = require('./utils');\n\nconst compile = (ast, options = {}) => {\n let walk = (node, parent = {}) => {\n let invalidBlock = utils.isInvalidBrace(parent);\n let invalidNode = node.invalid === true && options.escapeInvalid === true;\n let invalid = invalidBlock === true || invalidNode === true;\n let prefix = options.escapeInvalid === true ? '\\\\' : '';\n let output = '';\n\n if (node.isOpen === true) {\n return prefix + node.value;\n }\n if (node.isClose === true) {\n return prefix + node.value;\n }\n\n if (node.type === 'open') {\n return invalid ? (prefix + node.value) : '(';\n }\n\n if (node.type === 'close') {\n return invalid ? (prefix + node.value) : ')';\n }\n\n if (node.type === 'comma') {\n return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|');\n }\n\n if (node.value) {\n return node.value;\n }\n\n if (node.nodes && node.ranges > 0) {\n let args = utils.reduce(node.nodes);\n let range = fill(...args, { ...options, wrap: false, toRegex: true });\n\n if (range.length !== 0) {\n return args.length > 1 && range.length > 1 ? `(${range})` : range;\n }\n }\n\n if (node.nodes) {\n for (let child of node.nodes) {\n output += walk(child, node);\n }\n }\n return output;\n };\n\n return walk(ast);\n};\n\nmodule.exports = compile;\n","'use strict';\n\nmodule.exports = {\n MAX_LENGTH: 1024 * 64,\n\n // Digits\n CHAR_0: '0', /* 0 */\n CHAR_9: '9', /* 9 */\n\n // Alphabet chars.\n CHAR_UPPERCASE_A: 'A', /* A */\n CHAR_LOWERCASE_A: 'a', /* a */\n CHAR_UPPERCASE_Z: 'Z', /* Z */\n CHAR_LOWERCASE_Z: 'z', /* z */\n\n CHAR_LEFT_PARENTHESES: '(', /* ( */\n CHAR_RIGHT_PARENTHESES: ')', /* ) */\n\n CHAR_ASTERISK: '*', /* * */\n\n // Non-alphabetic chars.\n CHAR_AMPERSAND: '&', /* & */\n CHAR_AT: '@', /* @ */\n CHAR_BACKSLASH: '\\\\', /* \\ */\n CHAR_BACKTICK: '`', /* ` */\n CHAR_CARRIAGE_RETURN: '\\r', /* \\r */\n CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */\n CHAR_COLON: ':', /* : */\n CHAR_COMMA: ',', /* , */\n CHAR_DOLLAR: '$', /* . */\n CHAR_DOT: '.', /* . */\n CHAR_DOUBLE_QUOTE: '\"', /* \" */\n CHAR_EQUAL: '=', /* = */\n CHAR_EXCLAMATION_MARK: '!', /* ! */\n CHAR_FORM_FEED: '\\f', /* \\f */\n CHAR_FORWARD_SLASH: '/', /* / */\n CHAR_HASH: '#', /* # */\n CHAR_HYPHEN_MINUS: '-', /* - */\n CHAR_LEFT_ANGLE_BRACKET: '<', /* < */\n CHAR_LEFT_CURLY_BRACE: '{', /* { */\n CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */\n CHAR_LINE_FEED: '\\n', /* \\n */\n CHAR_NO_BREAK_SPACE: '\\u00A0', /* \\u00A0 */\n CHAR_PERCENT: '%', /* % */\n CHAR_PLUS: '+', /* + */\n CHAR_QUESTION_MARK: '?', /* ? */\n CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */\n CHAR_RIGHT_CURLY_BRACE: '}', /* } */\n CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */\n CHAR_SEMICOLON: ';', /* ; */\n CHAR_SINGLE_QUOTE: '\\'', /* ' */\n CHAR_SPACE: ' ', /* */\n CHAR_TAB: '\\t', /* \\t */\n CHAR_UNDERSCORE: '_', /* _ */\n CHAR_VERTICAL_LINE: '|', /* | */\n CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\\uFEFF' /* \\uFEFF */\n};\n","'use strict';\n\nconst fill = require('fill-range');\nconst stringify = require('./stringify');\nconst utils = require('./utils');\n\nconst append = (queue = '', stash = '', enclose = false) => {\n let result = [];\n\n queue = [].concat(queue);\n stash = [].concat(stash);\n\n if (!stash.length) return queue;\n if (!queue.length) {\n return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash;\n }\n\n for (let item of queue) {\n if (Array.isArray(item)) {\n for (let value of item) {\n result.push(append(value, stash, enclose));\n }\n } else {\n for (let ele of stash) {\n if (enclose === true && typeof ele === 'string') ele = `{${ele}}`;\n result.push(Array.isArray(ele) ? append(item, ele, enclose) : (item + ele));\n }\n }\n }\n return utils.flatten(result);\n};\n\nconst expand = (ast, options = {}) => {\n let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit;\n\n let walk = (node, parent = {}) => {\n node.queue = [];\n\n let p = parent;\n let q = parent.queue;\n\n while (p.type !== 'brace' && p.type !== 'root' && p.parent) {\n p = p.parent;\n q = p.queue;\n }\n\n if (node.invalid || node.dollar) {\n q.push(append(q.pop(), stringify(node, options)));\n return;\n }\n\n if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) {\n q.push(append(q.pop(), ['{}']));\n return;\n }\n\n if (node.nodes && node.ranges > 0) {\n let args = utils.reduce(node.nodes);\n\n if (utils.exceedsLimit(...args, options.step, rangeLimit)) {\n throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.');\n }\n\n let range = fill(...args, options);\n if (range.length === 0) {\n range = stringify(node, options);\n }\n\n q.push(append(q.pop(), range));\n node.nodes = [];\n return;\n }\n\n let enclose = utils.encloseBrace(node);\n let queue = node.queue;\n let block = node;\n\n while (block.type !== 'brace' && block.type !== 'root' && block.parent) {\n block = block.parent;\n queue = block.queue;\n }\n\n for (let i = 0; i < node.nodes.length; i++) {\n let child = node.nodes[i];\n\n if (child.type === 'comma' && node.type === 'brace') {\n if (i === 1) queue.push('');\n queue.push('');\n continue;\n }\n\n if (child.type === 'close') {\n q.push(append(q.pop(), queue, enclose));\n continue;\n }\n\n if (child.value && child.type !== 'open') {\n queue.push(append(queue.pop(), child.value));\n continue;\n }\n\n if (child.nodes) {\n walk(child, node);\n }\n }\n\n return queue;\n };\n\n return utils.flatten(walk(ast));\n};\n\nmodule.exports = expand;\n","'use strict';\n\nconst stringify = require('./stringify');\n\n/**\n * Constants\n */\n\nconst {\n MAX_LENGTH,\n CHAR_BACKSLASH, /* \\ */\n CHAR_BACKTICK, /* ` */\n CHAR_COMMA, /* , */\n CHAR_DOT, /* . */\n CHAR_LEFT_PARENTHESES, /* ( */\n CHAR_RIGHT_PARENTHESES, /* ) */\n CHAR_LEFT_CURLY_BRACE, /* { */\n CHAR_RIGHT_CURLY_BRACE, /* } */\n CHAR_LEFT_SQUARE_BRACKET, /* [ */\n CHAR_RIGHT_SQUARE_BRACKET, /* ] */\n CHAR_DOUBLE_QUOTE, /* \" */\n CHAR_SINGLE_QUOTE, /* ' */\n CHAR_NO_BREAK_SPACE,\n CHAR_ZERO_WIDTH_NOBREAK_SPACE\n} = require('./constants');\n\n/**\n * parse\n */\n\nconst parse = (input, options = {}) => {\n if (typeof input !== 'string') {\n throw new TypeError('Expected a string');\n }\n\n let opts = options || {};\n let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;\n if (input.length > max) {\n throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`);\n }\n\n let ast = { type: 'root', input, nodes: [] };\n let stack = [ast];\n let block = ast;\n let prev = ast;\n let brackets = 0;\n let length = input.length;\n let index = 0;\n let depth = 0;\n let value;\n let memo = {};\n\n /**\n * Helpers\n */\n\n const advance = () => input[index++];\n const push = node => {\n if (node.type === 'text' && prev.type === 'dot') {\n prev.type = 'text';\n }\n\n if (prev && prev.type === 'text' && node.type === 'text') {\n prev.value += node.value;\n return;\n }\n\n block.nodes.push(node);\n node.parent = block;\n node.prev = prev;\n prev = node;\n return node;\n };\n\n push({ type: 'bos' });\n\n while (index < length) {\n block = stack[stack.length - 1];\n value = advance();\n\n /**\n * Invalid chars\n */\n\n if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) {\n continue;\n }\n\n /**\n * Escaped chars\n */\n\n if (value === CHAR_BACKSLASH) {\n push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() });\n continue;\n }\n\n /**\n * Right square bracket (literal): ']'\n */\n\n if (value === CHAR_RIGHT_SQUARE_BRACKET) {\n push({ type: 'text', value: '\\\\' + value });\n continue;\n }\n\n /**\n * Left square bracket: '['\n */\n\n if (value === CHAR_LEFT_SQUARE_BRACKET) {\n brackets++;\n\n let closed = true;\n let next;\n\n while (index < length && (next = advance())) {\n value += next;\n\n if (next === CHAR_LEFT_SQUARE_BRACKET) {\n brackets++;\n continue;\n }\n\n if (next === CHAR_BACKSLASH) {\n value += advance();\n continue;\n }\n\n if (next === CHAR_RIGHT_SQUARE_BRACKET) {\n brackets--;\n\n if (brackets === 0) {\n break;\n }\n }\n }\n\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Parentheses\n */\n\n if (value === CHAR_LEFT_PARENTHESES) {\n block = push({ type: 'paren', nodes: [] });\n stack.push(block);\n push({ type: 'text', value });\n continue;\n }\n\n if (value === CHAR_RIGHT_PARENTHESES) {\n if (block.type !== 'paren') {\n push({ type: 'text', value });\n continue;\n }\n block = stack.pop();\n push({ type: 'text', value });\n block = stack[stack.length - 1];\n continue;\n }\n\n /**\n * Quotes: '|\"|`\n */\n\n if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) {\n let open = value;\n let next;\n\n if (options.keepQuotes !== true) {\n value = '';\n }\n\n while (index < length && (next = advance())) {\n if (next === CHAR_BACKSLASH) {\n value += next + advance();\n continue;\n }\n\n if (next === open) {\n if (options.keepQuotes === true) value += next;\n break;\n }\n\n value += next;\n }\n\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Left curly brace: '{'\n */\n\n if (value === CHAR_LEFT_CURLY_BRACE) {\n depth++;\n\n let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true;\n let brace = {\n type: 'brace',\n open: true,\n close: false,\n dollar,\n depth,\n commas: 0,\n ranges: 0,\n nodes: []\n };\n\n block = push(brace);\n stack.push(block);\n push({ type: 'open', value });\n continue;\n }\n\n /**\n * Right curly brace: '}'\n */\n\n if (value === CHAR_RIGHT_CURLY_BRACE) {\n if (block.type !== 'brace') {\n push({ type: 'text', value });\n continue;\n }\n\n let type = 'close';\n block = stack.pop();\n block.close = true;\n\n push({ type, value });\n depth--;\n\n block = stack[stack.length - 1];\n continue;\n }\n\n /**\n * Comma: ','\n */\n\n if (value === CHAR_COMMA && depth > 0) {\n if (block.ranges > 0) {\n block.ranges = 0;\n let open = block.nodes.shift();\n block.nodes = [open, { type: 'text', value: stringify(block) }];\n }\n\n push({ type: 'comma', value });\n block.commas++;\n continue;\n }\n\n /**\n * Dot: '.'\n */\n\n if (value === CHAR_DOT && depth > 0 && block.commas === 0) {\n let siblings = block.nodes;\n\n if (depth === 0 || siblings.length === 0) {\n push({ type: 'text', value });\n continue;\n }\n\n if (prev.type === 'dot') {\n block.range = [];\n prev.value += value;\n prev.type = 'range';\n\n if (block.nodes.length !== 3 && block.nodes.length !== 5) {\n block.invalid = true;\n block.ranges = 0;\n prev.type = 'text';\n continue;\n }\n\n block.ranges++;\n block.args = [];\n continue;\n }\n\n if (prev.type === 'range') {\n siblings.pop();\n\n let before = siblings[siblings.length - 1];\n before.value += prev.value + value;\n prev = before;\n block.ranges--;\n continue;\n }\n\n push({ type: 'dot', value });\n continue;\n }\n\n /**\n * Text\n */\n\n push({ type: 'text', value });\n }\n\n // Mark imbalanced braces and brackets as invalid\n do {\n block = stack.pop();\n\n if (block.type !== 'root') {\n block.nodes.forEach(node => {\n if (!node.nodes) {\n if (node.type === 'open') node.isOpen = true;\n if (node.type === 'close') node.isClose = true;\n if (!node.nodes) node.type = 'text';\n node.invalid = true;\n }\n });\n\n // get the location of the block on parent.nodes (block's siblings)\n let parent = stack[stack.length - 1];\n let index = parent.nodes.indexOf(block);\n // replace the (invalid) block with it's nodes\n parent.nodes.splice(index, 1, ...block.nodes);\n }\n } while (stack.length > 0);\n\n push({ type: 'eos' });\n return ast;\n};\n\nmodule.exports = parse;\n","'use strict';\n\nconst utils = require('./utils');\n\nmodule.exports = (ast, options = {}) => {\n let stringify = (node, parent = {}) => {\n let invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent);\n let invalidNode = node.invalid === true && options.escapeInvalid === true;\n let output = '';\n\n if (node.value) {\n if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) {\n return '\\\\' + node.value;\n }\n return node.value;\n }\n\n if (node.value) {\n return node.value;\n }\n\n if (node.nodes) {\n for (let child of node.nodes) {\n output += stringify(child);\n }\n }\n return output;\n };\n\n return stringify(ast);\n};\n\n","'use strict';\n\nexports.isInteger = num => {\n if (typeof num === 'number') {\n return Number.isInteger(num);\n }\n if (typeof num === 'string' && num.trim() !== '') {\n return Number.isInteger(Number(num));\n }\n return false;\n};\n\n/**\n * Find a node of the given type\n */\n\nexports.find = (node, type) => node.nodes.find(node => node.type === type);\n\n/**\n * Find a node of the given type\n */\n\nexports.exceedsLimit = (min, max, step = 1, limit) => {\n if (limit === false) return false;\n if (!exports.isInteger(min) || !exports.isInteger(max)) return false;\n return ((Number(max) - Number(min)) / Number(step)) >= limit;\n};\n\n/**\n * Escape the given node with '\\\\' before node.value\n */\n\nexports.escapeNode = (block, n = 0, type) => {\n let node = block.nodes[n];\n if (!node) return;\n\n if ((type && node.type === type) || node.type === 'open' || node.type === 'close') {\n if (node.escaped !== true) {\n node.value = '\\\\' + node.value;\n node.escaped = true;\n }\n }\n};\n\n/**\n * Returns true if the given brace node should be enclosed in literal braces\n */\n\nexports.encloseBrace = node => {\n if (node.type !== 'brace') return false;\n if ((node.commas >> 0 + node.ranges >> 0) === 0) {\n node.invalid = true;\n return true;\n }\n return false;\n};\n\n/**\n * Returns true if a brace node is invalid.\n */\n\nexports.isInvalidBrace = block => {\n if (block.type !== 'brace') return false;\n if (block.invalid === true || block.dollar) return true;\n if ((block.commas >> 0 + block.ranges >> 0) === 0) {\n block.invalid = true;\n return true;\n }\n if (block.open !== true || block.close !== true) {\n block.invalid = true;\n return true;\n }\n return false;\n};\n\n/**\n * Returns true if a node is an open or close node\n */\n\nexports.isOpenOrClose = node => {\n if (node.type === 'open' || node.type === 'close') {\n return true;\n }\n return node.open === true || node.close === true;\n};\n\n/**\n * Reduce an array of text nodes.\n */\n\nexports.reduce = nodes => nodes.reduce((acc, node) => {\n if (node.type === 'text') acc.push(node.value);\n if (node.type === 'range') node.type = 'text';\n return acc;\n}, []);\n\n/**\n * Flatten an array\n */\n\nexports.flatten = (...args) => {\n const result = [];\n const flat = arr => {\n for (let i = 0; i < arr.length; i++) {\n let ele = arr[i];\n Array.isArray(ele) ? flat(ele, result) : ele !== void 0 && result.push(ele);\n }\n return result;\n };\n flat(args);\n return result;\n};\n","/*!\n * fill-range \n *\n * Copyright (c) 2014-present, Jon Schlinkert.\n * Licensed under the MIT License.\n */\n\n'use strict';\n\nconst util = require('util');\nconst toRegexRange = require('to-regex-range');\n\nconst isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);\n\nconst transform = toNumber => {\n return value => toNumber === true ? Number(value) : String(value);\n};\n\nconst isValidValue = value => {\n return typeof value === 'number' || (typeof value === 'string' && value !== '');\n};\n\nconst isNumber = num => Number.isInteger(+num);\n\nconst zeros = input => {\n let value = `${input}`;\n let index = -1;\n if (value[0] === '-') value = value.slice(1);\n if (value === '0') return false;\n while (value[++index] === '0');\n return index > 0;\n};\n\nconst stringify = (start, end, options) => {\n if (typeof start === 'string' || typeof end === 'string') {\n return true;\n }\n return options.stringify === true;\n};\n\nconst pad = (input, maxLength, toNumber) => {\n if (maxLength > 0) {\n let dash = input[0] === '-' ? '-' : '';\n if (dash) input = input.slice(1);\n input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0'));\n }\n if (toNumber === false) {\n return String(input);\n }\n return input;\n};\n\nconst toMaxLen = (input, maxLength) => {\n let negative = input[0] === '-' ? '-' : '';\n if (negative) {\n input = input.slice(1);\n maxLength--;\n }\n while (input.length < maxLength) input = '0' + input;\n return negative ? ('-' + input) : input;\n};\n\nconst toSequence = (parts, options) => {\n parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);\n parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);\n\n let prefix = options.capture ? '' : '?:';\n let positives = '';\n let negatives = '';\n let result;\n\n if (parts.positives.length) {\n positives = parts.positives.join('|');\n }\n\n if (parts.negatives.length) {\n negatives = `-(${prefix}${parts.negatives.join('|')})`;\n }\n\n if (positives && negatives) {\n result = `${positives}|${negatives}`;\n } else {\n result = positives || negatives;\n }\n\n if (options.wrap) {\n return `(${prefix}${result})`;\n }\n\n return result;\n};\n\nconst toRange = (a, b, isNumbers, options) => {\n if (isNumbers) {\n return toRegexRange(a, b, { wrap: false, ...options });\n }\n\n let start = String.fromCharCode(a);\n if (a === b) return start;\n\n let stop = String.fromCharCode(b);\n return `[${start}-${stop}]`;\n};\n\nconst toRegex = (start, end, options) => {\n if (Array.isArray(start)) {\n let wrap = options.wrap === true;\n let prefix = options.capture ? '' : '?:';\n return wrap ? `(${prefix}${start.join('|')})` : start.join('|');\n }\n return toRegexRange(start, end, options);\n};\n\nconst rangeError = (...args) => {\n return new RangeError('Invalid range arguments: ' + util.inspect(...args));\n};\n\nconst invalidRange = (start, end, options) => {\n if (options.strictRanges === true) throw rangeError([start, end]);\n return [];\n};\n\nconst invalidStep = (step, options) => {\n if (options.strictRanges === true) {\n throw new TypeError(`Expected step \"${step}\" to be a number`);\n }\n return [];\n};\n\nconst fillNumbers = (start, end, step = 1, options = {}) => {\n let a = Number(start);\n let b = Number(end);\n\n if (!Number.isInteger(a) || !Number.isInteger(b)) {\n if (options.strictRanges === true) throw rangeError([start, end]);\n return [];\n }\n\n // fix negative zero\n if (a === 0) a = 0;\n if (b === 0) b = 0;\n\n let descending = a > b;\n let startString = String(start);\n let endString = String(end);\n let stepString = String(step);\n step = Math.max(Math.abs(step), 1);\n\n let padded = zeros(startString) || zeros(endString) || zeros(stepString);\n let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0;\n let toNumber = padded === false && stringify(start, end, options) === false;\n let format = options.transform || transform(toNumber);\n\n if (options.toRegex && step === 1) {\n return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options);\n }\n\n let parts = { negatives: [], positives: [] };\n let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num));\n let range = [];\n let index = 0;\n\n while (descending ? a >= b : a <= b) {\n if (options.toRegex === true && step > 1) {\n push(a);\n } else {\n range.push(pad(format(a, index), maxLen, toNumber));\n }\n a = descending ? a - step : a + step;\n index++;\n }\n\n if (options.toRegex === true) {\n return step > 1\n ? toSequence(parts, options)\n : toRegex(range, null, { wrap: false, ...options });\n }\n\n return range;\n};\n\nconst fillLetters = (start, end, step = 1, options = {}) => {\n if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) {\n return invalidRange(start, end, options);\n }\n\n\n let format = options.transform || (val => String.fromCharCode(val));\n let a = `${start}`.charCodeAt(0);\n let b = `${end}`.charCodeAt(0);\n\n let descending = a > b;\n let min = Math.min(a, b);\n let max = Math.max(a, b);\n\n if (options.toRegex && step === 1) {\n return toRange(min, max, false, options);\n }\n\n let range = [];\n let index = 0;\n\n while (descending ? a >= b : a <= b) {\n range.push(format(a, index));\n a = descending ? a - step : a + step;\n index++;\n }\n\n if (options.toRegex === true) {\n return toRegex(range, null, { wrap: false, options });\n }\n\n return range;\n};\n\nconst fill = (start, end, step, options = {}) => {\n if (end == null && isValidValue(start)) {\n return [start];\n }\n\n if (!isValidValue(start) || !isValidValue(end)) {\n return invalidRange(start, end, options);\n }\n\n if (typeof step === 'function') {\n return fill(start, end, 1, { transform: step });\n }\n\n if (isObject(step)) {\n return fill(start, end, 0, step);\n }\n\n let opts = { ...options };\n if (opts.capture === true) opts.wrap = true;\n step = step || opts.step || 1;\n\n if (!isNumber(step)) {\n if (step != null && !isObject(step)) return invalidStep(step, opts);\n return fill(start, end, 1, step);\n }\n\n if (isNumber(start) && isNumber(end)) {\n return fillNumbers(start, end, step, opts);\n }\n\n return fillLetters(start, end, Math.max(Math.abs(step), 1), opts);\n};\n\nmodule.exports = fill;\n","/*!\n * is-number \n *\n * Copyright (c) 2014-present, Jon Schlinkert.\n * Released under the MIT License.\n */\n\n'use strict';\n\nmodule.exports = function(num) {\n if (typeof num === 'number') {\n return num - num === 0;\n }\n if (typeof num === 'string' && num.trim() !== '') {\n return Number.isFinite ? Number.isFinite(+num) : isFinite(+num);\n }\n return false;\n};\n","'use strict';\n\nconst util = require('util');\nconst braces = require('braces');\nconst picomatch = require('picomatch');\nconst utils = require('picomatch/lib/utils');\nconst isEmptyString = val => typeof val === 'string' && (val === '' || val === './');\n\n/**\n * Returns an array of strings that match one or more glob patterns.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm(list, patterns[, options]);\n *\n * console.log(mm(['a.js', 'a.txt'], ['*.js']));\n * //=> [ 'a.js' ]\n * ```\n * @param {String|Array} list List of strings to match.\n * @param {String|Array} patterns One or more glob patterns to use for matching.\n * @param {Object} options See available [options](#options)\n * @return {Array} Returns an array of matches\n * @summary false\n * @api public\n */\n\nconst micromatch = (list, patterns, options) => {\n patterns = [].concat(patterns);\n list = [].concat(list);\n\n let omit = new Set();\n let keep = new Set();\n let items = new Set();\n let negatives = 0;\n\n let onResult = state => {\n items.add(state.output);\n if (options && options.onResult) {\n options.onResult(state);\n }\n };\n\n for (let i = 0; i < patterns.length; i++) {\n let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true);\n let negated = isMatch.state.negated || isMatch.state.negatedExtglob;\n if (negated) negatives++;\n\n for (let item of list) {\n let matched = isMatch(item, true);\n\n let match = negated ? !matched.isMatch : matched.isMatch;\n if (!match) continue;\n\n if (negated) {\n omit.add(matched.output);\n } else {\n omit.delete(matched.output);\n keep.add(matched.output);\n }\n }\n }\n\n let result = negatives === patterns.length ? [...items] : [...keep];\n let matches = result.filter(item => !omit.has(item));\n\n if (options && matches.length === 0) {\n if (options.failglob === true) {\n throw new Error(`No matches found for \"${patterns.join(', ')}\"`);\n }\n\n if (options.nonull === true || options.nullglob === true) {\n return options.unescape ? patterns.map(p => p.replace(/\\\\/g, '')) : patterns;\n }\n }\n\n return matches;\n};\n\n/**\n * Backwards compatibility\n */\n\nmicromatch.match = micromatch;\n\n/**\n * Returns a matcher function from the given glob `pattern` and `options`.\n * The returned function takes a string to match as its only argument and returns\n * true if the string is a match.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.matcher(pattern[, options]);\n *\n * const isMatch = mm.matcher('*.!(*a)');\n * console.log(isMatch('a.a')); //=> false\n * console.log(isMatch('a.b')); //=> true\n * ```\n * @param {String} `pattern` Glob pattern\n * @param {Object} `options`\n * @return {Function} Returns a matcher function.\n * @api public\n */\n\nmicromatch.matcher = (pattern, options) => picomatch(pattern, options);\n\n/**\n * Returns true if **any** of the given glob `patterns` match the specified `string`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.isMatch(string, patterns[, options]);\n *\n * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true\n * console.log(mm.isMatch('a.a', 'b.*')); //=> false\n * ```\n * @param {String} str The string to test.\n * @param {String|Array} patterns One or more glob patterns to use for matching.\n * @param {Object} [options] See available [options](#options).\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\nmicromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str);\n\n/**\n * Backwards compatibility\n */\n\nmicromatch.any = micromatch.isMatch;\n\n/**\n * Returns a list of strings that _**do not match any**_ of the given `patterns`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.not(list, patterns[, options]);\n *\n * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a'));\n * //=> ['b.b', 'c.c']\n * ```\n * @param {Array} `list` Array of strings to match.\n * @param {String|Array} `patterns` One or more glob pattern to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Array} Returns an array of strings that **do not match** the given patterns.\n * @api public\n */\n\nmicromatch.not = (list, patterns, options = {}) => {\n patterns = [].concat(patterns).map(String);\n let result = new Set();\n let items = [];\n\n let onResult = state => {\n if (options.onResult) options.onResult(state);\n items.push(state.output);\n };\n\n let matches = micromatch(list, patterns, { ...options, onResult });\n\n for (let item of items) {\n if (!matches.includes(item)) {\n result.add(item);\n }\n }\n return [...result];\n};\n\n/**\n * Returns true if the given `string` contains the given pattern. Similar\n * to [.isMatch](#isMatch) but the pattern can match any part of the string.\n *\n * ```js\n * var mm = require('micromatch');\n * // mm.contains(string, pattern[, options]);\n *\n * console.log(mm.contains('aa/bb/cc', '*b'));\n * //=> true\n * console.log(mm.contains('aa/bb/cc', '*d'));\n * //=> false\n * ```\n * @param {String} `str` The string to match.\n * @param {String|Array} `patterns` Glob pattern to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns true if the patter matches any part of `str`.\n * @api public\n */\n\nmicromatch.contains = (str, pattern, options) => {\n if (typeof str !== 'string') {\n throw new TypeError(`Expected a string: \"${util.inspect(str)}\"`);\n }\n\n if (Array.isArray(pattern)) {\n return pattern.some(p => micromatch.contains(str, p, options));\n }\n\n if (typeof pattern === 'string') {\n if (isEmptyString(str) || isEmptyString(pattern)) {\n return false;\n }\n\n if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) {\n return true;\n }\n }\n\n return micromatch.isMatch(str, pattern, { ...options, contains: true });\n};\n\n/**\n * Filter the keys of the given object with the given `glob` pattern\n * and `options`. Does not attempt to match nested keys. If you need this feature,\n * use [glob-object][] instead.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.matchKeys(object, patterns[, options]);\n *\n * const obj = { aa: 'a', ab: 'b', ac: 'c' };\n * console.log(mm.matchKeys(obj, '*b'));\n * //=> { ab: 'b' }\n * ```\n * @param {Object} `object` The object with keys to filter.\n * @param {String|Array} `patterns` One or more glob patterns to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Object} Returns an object with only keys that match the given patterns.\n * @api public\n */\n\nmicromatch.matchKeys = (obj, patterns, options) => {\n if (!utils.isObject(obj)) {\n throw new TypeError('Expected the first argument to be an object');\n }\n let keys = micromatch(Object.keys(obj), patterns, options);\n let res = {};\n for (let key of keys) res[key] = obj[key];\n return res;\n};\n\n/**\n * Returns true if some of the strings in the given `list` match any of the given glob `patterns`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.some(list, patterns[, options]);\n *\n * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js']));\n * // true\n * console.log(mm.some(['foo.js'], ['*.js', '!foo.js']));\n * // false\n * ```\n * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found.\n * @param {String|Array} `patterns` One or more glob patterns to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\nmicromatch.some = (list, patterns, options) => {\n let items = [].concat(list);\n\n for (let pattern of [].concat(patterns)) {\n let isMatch = picomatch(String(pattern), options);\n if (items.some(item => isMatch(item))) {\n return true;\n }\n }\n return false;\n};\n\n/**\n * Returns true if every string in the given `list` matches\n * any of the given glob `patterns`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.every(list, patterns[, options]);\n *\n * console.log(mm.every('foo.js', ['foo.js']));\n * // true\n * console.log(mm.every(['foo.js', 'bar.js'], ['*.js']));\n * // true\n * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js']));\n * // false\n * console.log(mm.every(['foo.js'], ['*.js', '!foo.js']));\n * // false\n * ```\n * @param {String|Array} `list` The string or array of strings to test.\n * @param {String|Array} `patterns` One or more glob patterns to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\nmicromatch.every = (list, patterns, options) => {\n let items = [].concat(list);\n\n for (let pattern of [].concat(patterns)) {\n let isMatch = picomatch(String(pattern), options);\n if (!items.every(item => isMatch(item))) {\n return false;\n }\n }\n return true;\n};\n\n/**\n * Returns true if **all** of the given `patterns` match\n * the specified string.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.all(string, patterns[, options]);\n *\n * console.log(mm.all('foo.js', ['foo.js']));\n * // true\n *\n * console.log(mm.all('foo.js', ['*.js', '!foo.js']));\n * // false\n *\n * console.log(mm.all('foo.js', ['*.js', 'foo.js']));\n * // true\n *\n * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js']));\n * // true\n * ```\n * @param {String|Array} `str` The string to test.\n * @param {String|Array} `patterns` One or more glob patterns to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\nmicromatch.all = (str, patterns, options) => {\n if (typeof str !== 'string') {\n throw new TypeError(`Expected a string: \"${util.inspect(str)}\"`);\n }\n\n return [].concat(patterns).every(p => picomatch(p, options)(str));\n};\n\n/**\n * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.capture(pattern, string[, options]);\n *\n * console.log(mm.capture('test/*.js', 'test/foo.js'));\n * //=> ['foo']\n * console.log(mm.capture('test/*.js', 'foo/bar.css'));\n * //=> null\n * ```\n * @param {String} `glob` Glob pattern to use for matching.\n * @param {String} `input` String to match\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns an array of captures if the input matches the glob pattern, otherwise `null`.\n * @api public\n */\n\nmicromatch.capture = (glob, input, options) => {\n let posix = utils.isWindows(options);\n let regex = picomatch.makeRe(String(glob), { ...options, capture: true });\n let match = regex.exec(posix ? utils.toPosixSlashes(input) : input);\n\n if (match) {\n return match.slice(1).map(v => v === void 0 ? '' : v);\n }\n};\n\n/**\n * Create a regular expression from the given glob `pattern`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.makeRe(pattern[, options]);\n *\n * console.log(mm.makeRe('*.js'));\n * //=> /^(?:(\\.[\\\\\\/])?(?!\\.)(?=.)[^\\/]*?\\.js)$/\n * ```\n * @param {String} `pattern` A glob pattern to convert to regex.\n * @param {Object} `options`\n * @return {RegExp} Returns a regex created from the given pattern.\n * @api public\n */\n\nmicromatch.makeRe = (...args) => picomatch.makeRe(...args);\n\n/**\n * Scan a glob pattern to separate the pattern into segments. Used\n * by the [split](#split) method.\n *\n * ```js\n * const mm = require('micromatch');\n * const state = mm.scan(pattern[, options]);\n * ```\n * @param {String} `pattern`\n * @param {Object} `options`\n * @return {Object} Returns an object with\n * @api public\n */\n\nmicromatch.scan = (...args) => picomatch.scan(...args);\n\n/**\n * Parse a glob pattern to create the source string for a regular\n * expression.\n *\n * ```js\n * const mm = require('micromatch');\n * const state = mm(pattern[, options]);\n * ```\n * @param {String} `glob`\n * @param {Object} `options`\n * @return {Object} Returns an object with useful properties and output to be used as regex source string.\n * @api public\n */\n\nmicromatch.parse = (patterns, options) => {\n let res = [];\n for (let pattern of [].concat(patterns || [])) {\n for (let str of braces(String(pattern), options)) {\n res.push(picomatch.parse(str, options));\n }\n }\n return res;\n};\n\n/**\n * Process the given brace `pattern`.\n *\n * ```js\n * const { braces } = require('micromatch');\n * console.log(braces('foo/{a,b,c}/bar'));\n * //=> [ 'foo/(a|b|c)/bar' ]\n *\n * console.log(braces('foo/{a,b,c}/bar', { expand: true }));\n * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ]\n * ```\n * @param {String} `pattern` String with brace pattern to process.\n * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options.\n * @return {Array}\n * @api public\n */\n\nmicromatch.braces = (pattern, options) => {\n if (typeof pattern !== 'string') throw new TypeError('Expected a string');\n if ((options && options.nobrace === true) || !/\\{.*\\}/.test(pattern)) {\n return [pattern];\n }\n return braces(pattern, options);\n};\n\n/**\n * Expand braces\n */\n\nmicromatch.braceExpand = (pattern, options) => {\n if (typeof pattern !== 'string') throw new TypeError('Expected a string');\n return micromatch.braces(pattern, { ...options, expand: true });\n};\n\n/**\n * Expose micromatch\n */\n\nmodule.exports = micromatch;\n","/*!\n * to-regex-range \n *\n * Copyright (c) 2015-present, Jon Schlinkert.\n * Released under the MIT License.\n */\n\n'use strict';\n\nconst isNumber = require('is-number');\n\nconst toRegexRange = (min, max, options) => {\n if (isNumber(min) === false) {\n throw new TypeError('toRegexRange: expected the first argument to be a number');\n }\n\n if (max === void 0 || min === max) {\n return String(min);\n }\n\n if (isNumber(max) === false) {\n throw new TypeError('toRegexRange: expected the second argument to be a number.');\n }\n\n let opts = { relaxZeros: true, ...options };\n if (typeof opts.strictZeros === 'boolean') {\n opts.relaxZeros = opts.strictZeros === false;\n }\n\n let relax = String(opts.relaxZeros);\n let shorthand = String(opts.shorthand);\n let capture = String(opts.capture);\n let wrap = String(opts.wrap);\n let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap;\n\n if (toRegexRange.cache.hasOwnProperty(cacheKey)) {\n return toRegexRange.cache[cacheKey].result;\n }\n\n let a = Math.min(min, max);\n let b = Math.max(min, max);\n\n if (Math.abs(a - b) === 1) {\n let result = min + '|' + max;\n if (opts.capture) {\n return `(${result})`;\n }\n if (opts.wrap === false) {\n return result;\n }\n return `(?:${result})`;\n }\n\n let isPadded = hasPadding(min) || hasPadding(max);\n let state = { min, max, a, b };\n let positives = [];\n let negatives = [];\n\n if (isPadded) {\n state.isPadded = isPadded;\n state.maxLen = String(state.max).length;\n }\n\n if (a < 0) {\n let newMin = b < 0 ? Math.abs(b) : 1;\n negatives = splitToPatterns(newMin, Math.abs(a), state, opts);\n a = state.a = 0;\n }\n\n if (b >= 0) {\n positives = splitToPatterns(a, b, state, opts);\n }\n\n state.negatives = negatives;\n state.positives = positives;\n state.result = collatePatterns(negatives, positives, opts);\n\n if (opts.capture === true) {\n state.result = `(${state.result})`;\n } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) {\n state.result = `(?:${state.result})`;\n }\n\n toRegexRange.cache[cacheKey] = state;\n return state.result;\n};\n\nfunction collatePatterns(neg, pos, options) {\n let onlyNegative = filterPatterns(neg, pos, '-', false, options) || [];\n let onlyPositive = filterPatterns(pos, neg, '', false, options) || [];\n let intersected = filterPatterns(neg, pos, '-?', true, options) || [];\n let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive);\n return subpatterns.join('|');\n}\n\nfunction splitToRanges(min, max) {\n let nines = 1;\n let zeros = 1;\n\n let stop = countNines(min, nines);\n let stops = new Set([max]);\n\n while (min <= stop && stop <= max) {\n stops.add(stop);\n nines += 1;\n stop = countNines(min, nines);\n }\n\n stop = countZeros(max + 1, zeros) - 1;\n\n while (min < stop && stop <= max) {\n stops.add(stop);\n zeros += 1;\n stop = countZeros(max + 1, zeros) - 1;\n }\n\n stops = [...stops];\n stops.sort(compare);\n return stops;\n}\n\n/**\n * Convert a range to a regex pattern\n * @param {Number} `start`\n * @param {Number} `stop`\n * @return {String}\n */\n\nfunction rangeToPattern(start, stop, options) {\n if (start === stop) {\n return { pattern: start, count: [], digits: 0 };\n }\n\n let zipped = zip(start, stop);\n let digits = zipped.length;\n let pattern = '';\n let count = 0;\n\n for (let i = 0; i < digits; i++) {\n let [startDigit, stopDigit] = zipped[i];\n\n if (startDigit === stopDigit) {\n pattern += startDigit;\n\n } else if (startDigit !== '0' || stopDigit !== '9') {\n pattern += toCharacterClass(startDigit, stopDigit, options);\n\n } else {\n count++;\n }\n }\n\n if (count) {\n pattern += options.shorthand === true ? '\\\\d' : '[0-9]';\n }\n\n return { pattern, count: [count], digits };\n}\n\nfunction splitToPatterns(min, max, tok, options) {\n let ranges = splitToRanges(min, max);\n let tokens = [];\n let start = min;\n let prev;\n\n for (let i = 0; i < ranges.length; i++) {\n let max = ranges[i];\n let obj = rangeToPattern(String(start), String(max), options);\n let zeros = '';\n\n if (!tok.isPadded && prev && prev.pattern === obj.pattern) {\n if (prev.count.length > 1) {\n prev.count.pop();\n }\n\n prev.count.push(obj.count[0]);\n prev.string = prev.pattern + toQuantifier(prev.count);\n start = max + 1;\n continue;\n }\n\n if (tok.isPadded) {\n zeros = padZeros(max, tok, options);\n }\n\n obj.string = zeros + obj.pattern + toQuantifier(obj.count);\n tokens.push(obj);\n start = max + 1;\n prev = obj;\n }\n\n return tokens;\n}\n\nfunction filterPatterns(arr, comparison, prefix, intersection, options) {\n let result = [];\n\n for (let ele of arr) {\n let { string } = ele;\n\n // only push if _both_ are negative...\n if (!intersection && !contains(comparison, 'string', string)) {\n result.push(prefix + string);\n }\n\n // or _both_ are positive\n if (intersection && contains(comparison, 'string', string)) {\n result.push(prefix + string);\n }\n }\n return result;\n}\n\n/**\n * Zip strings\n */\n\nfunction zip(a, b) {\n let arr = [];\n for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]);\n return arr;\n}\n\nfunction compare(a, b) {\n return a > b ? 1 : b > a ? -1 : 0;\n}\n\nfunction contains(arr, key, val) {\n return arr.some(ele => ele[key] === val);\n}\n\nfunction countNines(min, len) {\n return Number(String(min).slice(0, -len) + '9'.repeat(len));\n}\n\nfunction countZeros(integer, zeros) {\n return integer - (integer % Math.pow(10, zeros));\n}\n\nfunction toQuantifier(digits) {\n let [start = 0, stop = ''] = digits;\n if (stop || start > 1) {\n return `{${start + (stop ? ',' + stop : '')}}`;\n }\n return '';\n}\n\nfunction toCharacterClass(a, b, options) {\n return `[${a}${(b - a === 1) ? '' : '-'}${b}]`;\n}\n\nfunction hasPadding(str) {\n return /^-?(0+)\\d/.test(str);\n}\n\nfunction padZeros(value, tok, options) {\n if (!tok.isPadded) {\n return value;\n }\n\n let diff = Math.abs(tok.maxLen - String(value).length);\n let relax = options.relaxZeros !== false;\n\n switch (diff) {\n case 0:\n return '';\n case 1:\n return relax ? '0?' : '0';\n case 2:\n return relax ? '0{0,2}' : '00';\n default: {\n return relax ? `0{0,${diff}}` : `0{${diff}}`;\n }\n }\n}\n\n/**\n * Cache\n */\n\ntoRegexRange.cache = {};\ntoRegexRange.clearCache = () => (toRegexRange.cache = {});\n\n/**\n * Expose `toRegexRange`\n */\n\nmodule.exports = toRegexRange;\n","\"use strict\";\r\nconst taskManager = require(\"./managers/tasks\");\r\nconst async_1 = require(\"./providers/async\");\r\nconst stream_1 = require(\"./providers/stream\");\r\nconst sync_1 = require(\"./providers/sync\");\r\nconst settings_1 = require(\"./settings\");\r\nconst utils = require(\"./utils\");\r\nasync function FastGlob(source, options) {\r\n assertPatternsInput(source);\r\n const works = getWorks(source, async_1.default, options);\r\n const result = await Promise.all(works);\r\n return utils.array.flatten(result);\r\n}\r\n// https://github.com/typescript-eslint/typescript-eslint/issues/60\r\n// eslint-disable-next-line no-redeclare\r\n(function (FastGlob) {\r\n function sync(source, options) {\r\n assertPatternsInput(source);\r\n const works = getWorks(source, sync_1.default, options);\r\n return utils.array.flatten(works);\r\n }\r\n FastGlob.sync = sync;\r\n function stream(source, options) {\r\n assertPatternsInput(source);\r\n const works = getWorks(source, stream_1.default, options);\r\n /**\r\n * The stream returned by the provider cannot work with an asynchronous iterator.\r\n * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams.\r\n * This affects performance (+25%). I don't see best solution right now.\r\n */\r\n return utils.stream.merge(works);\r\n }\r\n FastGlob.stream = stream;\r\n function generateTasks(source, options) {\r\n assertPatternsInput(source);\r\n const patterns = [].concat(source);\r\n const settings = new settings_1.default(options);\r\n return taskManager.generate(patterns, settings);\r\n }\r\n FastGlob.generateTasks = generateTasks;\r\n function isDynamicPattern(source, options) {\r\n assertPatternsInput(source);\r\n const settings = new settings_1.default(options);\r\n return utils.pattern.isDynamicPattern(source, settings);\r\n }\r\n FastGlob.isDynamicPattern = isDynamicPattern;\r\n function escapePath(source) {\r\n assertPatternsInput(source);\r\n return utils.path.escape(source);\r\n }\r\n FastGlob.escapePath = escapePath;\r\n})(FastGlob || (FastGlob = {}));\r\nfunction getWorks(source, _Provider, options) {\r\n const patterns = [].concat(source);\r\n const settings = new settings_1.default(options);\r\n const tasks = taskManager.generate(patterns, settings);\r\n const provider = new _Provider(settings);\r\n return tasks.map(provider.read, provider);\r\n}\r\nfunction assertPatternsInput(input) {\r\n const source = [].concat(input);\r\n const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item));\r\n if (!isValidSource) {\r\n throw new TypeError('Patterns must be a string (non empty) or an array of strings');\r\n }\r\n}\r\nmodule.exports = FastGlob;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0;\r\nconst utils = require(\"../utils\");\r\nfunction generate(patterns, settings) {\r\n const positivePatterns = getPositivePatterns(patterns);\r\n const negativePatterns = getNegativePatternsAsPositive(patterns, settings.ignore);\r\n const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings));\r\n const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings));\r\n const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false);\r\n const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true);\r\n return staticTasks.concat(dynamicTasks);\r\n}\r\nexports.generate = generate;\r\nfunction convertPatternsToTasks(positive, negative, dynamic) {\r\n const positivePatternsGroup = groupPatternsByBaseDirectory(positive);\r\n // When we have a global group – there is no reason to divide the patterns into independent tasks.\r\n // In this case, the global task covers the rest.\r\n if ('.' in positivePatternsGroup) {\r\n const task = convertPatternGroupToTask('.', positive, negative, dynamic);\r\n return [task];\r\n }\r\n return convertPatternGroupsToTasks(positivePatternsGroup, negative, dynamic);\r\n}\r\nexports.convertPatternsToTasks = convertPatternsToTasks;\r\nfunction getPositivePatterns(patterns) {\r\n return utils.pattern.getPositivePatterns(patterns);\r\n}\r\nexports.getPositivePatterns = getPositivePatterns;\r\nfunction getNegativePatternsAsPositive(patterns, ignore) {\r\n const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore);\r\n const positive = negative.map(utils.pattern.convertToPositivePattern);\r\n return positive;\r\n}\r\nexports.getNegativePatternsAsPositive = getNegativePatternsAsPositive;\r\nfunction groupPatternsByBaseDirectory(patterns) {\r\n const group = {};\r\n return patterns.reduce((collection, pattern) => {\r\n const base = utils.pattern.getBaseDirectory(pattern);\r\n if (base in collection) {\r\n collection[base].push(pattern);\r\n }\r\n else {\r\n collection[base] = [pattern];\r\n }\r\n return collection;\r\n }, group);\r\n}\r\nexports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory;\r\nfunction convertPatternGroupsToTasks(positive, negative, dynamic) {\r\n return Object.keys(positive).map((base) => {\r\n return convertPatternGroupToTask(base, positive[base], negative, dynamic);\r\n });\r\n}\r\nexports.convertPatternGroupsToTasks = convertPatternGroupsToTasks;\r\nfunction convertPatternGroupToTask(base, positive, negative, dynamic) {\r\n return {\r\n dynamic,\r\n positive,\r\n negative,\r\n base,\r\n patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern))\r\n };\r\n}\r\nexports.convertPatternGroupToTask = convertPatternGroupToTask;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst stream_1 = require(\"../readers/stream\");\r\nconst provider_1 = require(\"./provider\");\r\nclass ProviderAsync extends provider_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._reader = new stream_1.default(this._settings);\r\n }\r\n read(task) {\r\n const root = this._getRootDirectory(task);\r\n const options = this._getReaderOptions(task);\r\n const entries = [];\r\n return new Promise((resolve, reject) => {\r\n const stream = this.api(root, task, options);\r\n stream.once('error', reject);\r\n stream.on('data', (entry) => entries.push(options.transform(entry)));\r\n stream.once('end', () => resolve(entries));\r\n });\r\n }\r\n api(root, task, options) {\r\n if (task.dynamic) {\r\n return this._reader.dynamic(root, options);\r\n }\r\n return this._reader.static(task.patterns, options);\r\n }\r\n}\r\nexports.default = ProviderAsync;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nconst partial_1 = require(\"../matchers/partial\");\r\nclass DeepFilter {\r\n constructor(_settings, _micromatchOptions) {\r\n this._settings = _settings;\r\n this._micromatchOptions = _micromatchOptions;\r\n }\r\n getFilter(basePath, positive, negative) {\r\n const matcher = this._getMatcher(positive);\r\n const negativeRe = this._getNegativePatternsRe(negative);\r\n return (entry) => this._filter(basePath, entry, matcher, negativeRe);\r\n }\r\n _getMatcher(patterns) {\r\n return new partial_1.default(patterns, this._settings, this._micromatchOptions);\r\n }\r\n _getNegativePatternsRe(patterns) {\r\n const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern);\r\n return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions);\r\n }\r\n _filter(basePath, entry, matcher, negativeRe) {\r\n if (this._isSkippedByDeep(basePath, entry.path)) {\r\n return false;\r\n }\r\n if (this._isSkippedSymbolicLink(entry)) {\r\n return false;\r\n }\r\n const filepath = utils.path.removeLeadingDotSegment(entry.path);\r\n if (this._isSkippedByPositivePatterns(filepath, matcher)) {\r\n return false;\r\n }\r\n return this._isSkippedByNegativePatterns(filepath, negativeRe);\r\n }\r\n _isSkippedByDeep(basePath, entryPath) {\r\n /**\r\n * Avoid unnecessary depth calculations when it doesn't matter.\r\n */\r\n if (this._settings.deep === Infinity) {\r\n return false;\r\n }\r\n return this._getEntryLevel(basePath, entryPath) >= this._settings.deep;\r\n }\r\n _getEntryLevel(basePath, entryPath) {\r\n const entryPathDepth = entryPath.split('/').length;\r\n if (basePath === '') {\r\n return entryPathDepth;\r\n }\r\n const basePathDepth = basePath.split('/').length;\r\n return entryPathDepth - basePathDepth;\r\n }\r\n _isSkippedSymbolicLink(entry) {\r\n return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink();\r\n }\r\n _isSkippedByPositivePatterns(entryPath, matcher) {\r\n return !this._settings.baseNameMatch && !matcher.match(entryPath);\r\n }\r\n _isSkippedByNegativePatterns(entryPath, patternsRe) {\r\n return !utils.pattern.matchAny(entryPath, patternsRe);\r\n }\r\n}\r\nexports.default = DeepFilter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nclass EntryFilter {\r\n constructor(_settings, _micromatchOptions) {\r\n this._settings = _settings;\r\n this._micromatchOptions = _micromatchOptions;\r\n this.index = new Map();\r\n }\r\n getFilter(positive, negative) {\r\n const positiveRe = utils.pattern.convertPatternsToRe(positive, this._micromatchOptions);\r\n const negativeRe = utils.pattern.convertPatternsToRe(negative, this._micromatchOptions);\r\n return (entry) => this._filter(entry, positiveRe, negativeRe);\r\n }\r\n _filter(entry, positiveRe, negativeRe) {\r\n if (this._settings.unique && this._isDuplicateEntry(entry)) {\r\n return false;\r\n }\r\n if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) {\r\n return false;\r\n }\r\n if (this._isSkippedByAbsoluteNegativePatterns(entry.path, negativeRe)) {\r\n return false;\r\n }\r\n const filepath = this._settings.baseNameMatch ? entry.name : entry.path;\r\n const isMatched = this._isMatchToPatterns(filepath, positiveRe) && !this._isMatchToPatterns(entry.path, negativeRe);\r\n if (this._settings.unique && isMatched) {\r\n this._createIndexRecord(entry);\r\n }\r\n return isMatched;\r\n }\r\n _isDuplicateEntry(entry) {\r\n return this.index.has(entry.path);\r\n }\r\n _createIndexRecord(entry) {\r\n this.index.set(entry.path, undefined);\r\n }\r\n _onlyFileFilter(entry) {\r\n return this._settings.onlyFiles && !entry.dirent.isFile();\r\n }\r\n _onlyDirectoryFilter(entry) {\r\n return this._settings.onlyDirectories && !entry.dirent.isDirectory();\r\n }\r\n _isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) {\r\n if (!this._settings.absolute) {\r\n return false;\r\n }\r\n const fullpath = utils.path.makeAbsolute(this._settings.cwd, entryPath);\r\n return utils.pattern.matchAny(fullpath, patternsRe);\r\n }\r\n _isMatchToPatterns(entryPath, patternsRe) {\r\n const filepath = utils.path.removeLeadingDotSegment(entryPath);\r\n return utils.pattern.matchAny(filepath, patternsRe);\r\n }\r\n}\r\nexports.default = EntryFilter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nclass ErrorFilter {\r\n constructor(_settings) {\r\n this._settings = _settings;\r\n }\r\n getFilter() {\r\n return (error) => this._isNonFatalError(error);\r\n }\r\n _isNonFatalError(error) {\r\n return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors;\r\n }\r\n}\r\nexports.default = ErrorFilter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nclass Matcher {\r\n constructor(_patterns, _settings, _micromatchOptions) {\r\n this._patterns = _patterns;\r\n this._settings = _settings;\r\n this._micromatchOptions = _micromatchOptions;\r\n this._storage = [];\r\n this._fillStorage();\r\n }\r\n _fillStorage() {\r\n /**\r\n * The original pattern may include `{,*,**,a/*}`, which will lead to problems with matching (unresolved level).\r\n * So, before expand patterns with brace expansion into separated patterns.\r\n */\r\n const patterns = utils.pattern.expandPatternsWithBraceExpansion(this._patterns);\r\n for (const pattern of patterns) {\r\n const segments = this._getPatternSegments(pattern);\r\n const sections = this._splitSegmentsIntoSections(segments);\r\n this._storage.push({\r\n complete: sections.length <= 1,\r\n pattern,\r\n segments,\r\n sections\r\n });\r\n }\r\n }\r\n _getPatternSegments(pattern) {\r\n const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions);\r\n return parts.map((part) => {\r\n const dynamic = utils.pattern.isDynamicPattern(part, this._settings);\r\n if (!dynamic) {\r\n return {\r\n dynamic: false,\r\n pattern: part\r\n };\r\n }\r\n return {\r\n dynamic: true,\r\n pattern: part,\r\n patternRe: utils.pattern.makeRe(part, this._micromatchOptions)\r\n };\r\n });\r\n }\r\n _splitSegmentsIntoSections(segments) {\r\n return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern));\r\n }\r\n}\r\nexports.default = Matcher;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst matcher_1 = require(\"./matcher\");\r\nclass PartialMatcher extends matcher_1.default {\r\n match(filepath) {\r\n const parts = filepath.split('/');\r\n const levels = parts.length;\r\n const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels);\r\n for (const pattern of patterns) {\r\n const section = pattern.sections[0];\r\n /**\r\n * In this case, the pattern has a globstar and we must read all directories unconditionally,\r\n * but only if the level has reached the end of the first group.\r\n *\r\n * fixtures/{a,b}/**\r\n * ^ true/false ^ always true\r\n */\r\n if (!pattern.complete && levels > section.length) {\r\n return true;\r\n }\r\n const match = parts.every((part, index) => {\r\n const segment = pattern.segments[index];\r\n if (segment.dynamic && segment.patternRe.test(part)) {\r\n return true;\r\n }\r\n if (!segment.dynamic && segment.pattern === part) {\r\n return true;\r\n }\r\n return false;\r\n });\r\n if (match) {\r\n return true;\r\n }\r\n }\r\n return false;\r\n }\r\n}\r\nexports.default = PartialMatcher;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst path = require(\"path\");\r\nconst deep_1 = require(\"./filters/deep\");\r\nconst entry_1 = require(\"./filters/entry\");\r\nconst error_1 = require(\"./filters/error\");\r\nconst entry_2 = require(\"./transformers/entry\");\r\nclass Provider {\r\n constructor(_settings) {\r\n this._settings = _settings;\r\n this.errorFilter = new error_1.default(this._settings);\r\n this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions());\r\n this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions());\r\n this.entryTransformer = new entry_2.default(this._settings);\r\n }\r\n _getRootDirectory(task) {\r\n return path.resolve(this._settings.cwd, task.base);\r\n }\r\n _getReaderOptions(task) {\r\n const basePath = task.base === '.' ? '' : task.base;\r\n return {\r\n basePath,\r\n pathSegmentSeparator: '/',\r\n concurrency: this._settings.concurrency,\r\n deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative),\r\n entryFilter: this.entryFilter.getFilter(task.positive, task.negative),\r\n errorFilter: this.errorFilter.getFilter(),\r\n followSymbolicLinks: this._settings.followSymbolicLinks,\r\n fs: this._settings.fs,\r\n stats: this._settings.stats,\r\n throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink,\r\n transform: this.entryTransformer.getTransformer()\r\n };\r\n }\r\n _getMicromatchOptions() {\r\n return {\r\n dot: this._settings.dot,\r\n matchBase: this._settings.baseNameMatch,\r\n nobrace: !this._settings.braceExpansion,\r\n nocase: !this._settings.caseSensitiveMatch,\r\n noext: !this._settings.extglob,\r\n noglobstar: !this._settings.globstar,\r\n posix: true,\r\n strictSlashes: false\r\n };\r\n }\r\n}\r\nexports.default = Provider;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst stream_1 = require(\"stream\");\r\nconst stream_2 = require(\"../readers/stream\");\r\nconst provider_1 = require(\"./provider\");\r\nclass ProviderStream extends provider_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._reader = new stream_2.default(this._settings);\r\n }\r\n read(task) {\r\n const root = this._getRootDirectory(task);\r\n const options = this._getReaderOptions(task);\r\n const source = this.api(root, task, options);\r\n const destination = new stream_1.Readable({ objectMode: true, read: () => { } });\r\n source\r\n .once('error', (error) => destination.emit('error', error))\r\n .on('data', (entry) => destination.emit('data', options.transform(entry)))\r\n .once('end', () => destination.emit('end'));\r\n destination\r\n .once('close', () => source.destroy());\r\n return destination;\r\n }\r\n api(root, task, options) {\r\n if (task.dynamic) {\r\n return this._reader.dynamic(root, options);\r\n }\r\n return this._reader.static(task.patterns, options);\r\n }\r\n}\r\nexports.default = ProviderStream;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst sync_1 = require(\"../readers/sync\");\r\nconst provider_1 = require(\"./provider\");\r\nclass ProviderSync extends provider_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._reader = new sync_1.default(this._settings);\r\n }\r\n read(task) {\r\n const root = this._getRootDirectory(task);\r\n const options = this._getReaderOptions(task);\r\n const entries = this.api(root, task, options);\r\n return entries.map(options.transform);\r\n }\r\n api(root, task, options) {\r\n if (task.dynamic) {\r\n return this._reader.dynamic(root, options);\r\n }\r\n return this._reader.static(task.patterns, options);\r\n }\r\n}\r\nexports.default = ProviderSync;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nclass EntryTransformer {\r\n constructor(_settings) {\r\n this._settings = _settings;\r\n }\r\n getTransformer() {\r\n return (entry) => this._transform(entry);\r\n }\r\n _transform(entry) {\r\n let filepath = entry.path;\r\n if (this._settings.absolute) {\r\n filepath = utils.path.makeAbsolute(this._settings.cwd, filepath);\r\n filepath = utils.path.unixify(filepath);\r\n }\r\n if (this._settings.markDirectories && entry.dirent.isDirectory()) {\r\n filepath += '/';\r\n }\r\n if (!this._settings.objectMode) {\r\n return filepath;\r\n }\r\n return Object.assign(Object.assign({}, entry), { path: filepath });\r\n }\r\n}\r\nexports.default = EntryTransformer;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst path = require(\"path\");\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst utils = require(\"../utils\");\r\nclass Reader {\r\n constructor(_settings) {\r\n this._settings = _settings;\r\n this._fsStatSettings = new fsStat.Settings({\r\n followSymbolicLink: this._settings.followSymbolicLinks,\r\n fs: this._settings.fs,\r\n throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks\r\n });\r\n }\r\n _getFullEntryPath(filepath) {\r\n return path.resolve(this._settings.cwd, filepath);\r\n }\r\n _makeEntry(stats, pattern) {\r\n const entry = {\r\n name: pattern,\r\n path: pattern,\r\n dirent: utils.fs.createDirentFromStats(pattern, stats)\r\n };\r\n if (this._settings.stats) {\r\n entry.stats = stats;\r\n }\r\n return entry;\r\n }\r\n _isFatalError(error) {\r\n return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors;\r\n }\r\n}\r\nexports.default = Reader;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst stream_1 = require(\"stream\");\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst fsWalk = require(\"@nodelib/fs.walk\");\r\nconst reader_1 = require(\"./reader\");\r\nclass ReaderStream extends reader_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._walkStream = fsWalk.walkStream;\r\n this._stat = fsStat.stat;\r\n }\r\n dynamic(root, options) {\r\n return this._walkStream(root, options);\r\n }\r\n static(patterns, options) {\r\n const filepaths = patterns.map(this._getFullEntryPath, this);\r\n const stream = new stream_1.PassThrough({ objectMode: true });\r\n stream._write = (index, _enc, done) => {\r\n return this._getEntry(filepaths[index], patterns[index], options)\r\n .then((entry) => {\r\n if (entry !== null && options.entryFilter(entry)) {\r\n stream.push(entry);\r\n }\r\n if (index === filepaths.length - 1) {\r\n stream.end();\r\n }\r\n done();\r\n })\r\n .catch(done);\r\n };\r\n for (let i = 0; i < filepaths.length; i++) {\r\n stream.write(i);\r\n }\r\n return stream;\r\n }\r\n _getEntry(filepath, pattern, options) {\r\n return this._getStat(filepath)\r\n .then((stats) => this._makeEntry(stats, pattern))\r\n .catch((error) => {\r\n if (options.errorFilter(error)) {\r\n return null;\r\n }\r\n throw error;\r\n });\r\n }\r\n _getStat(filepath) {\r\n return new Promise((resolve, reject) => {\r\n this._stat(filepath, this._fsStatSettings, (error, stats) => {\r\n return error === null ? resolve(stats) : reject(error);\r\n });\r\n });\r\n }\r\n}\r\nexports.default = ReaderStream;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst fsWalk = require(\"@nodelib/fs.walk\");\r\nconst reader_1 = require(\"./reader\");\r\nclass ReaderSync extends reader_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._walkSync = fsWalk.walkSync;\r\n this._statSync = fsStat.statSync;\r\n }\r\n dynamic(root, options) {\r\n return this._walkSync(root, options);\r\n }\r\n static(patterns, options) {\r\n const entries = [];\r\n for (const pattern of patterns) {\r\n const filepath = this._getFullEntryPath(pattern);\r\n const entry = this._getEntry(filepath, pattern, options);\r\n if (entry === null || !options.entryFilter(entry)) {\r\n continue;\r\n }\r\n entries.push(entry);\r\n }\r\n return entries;\r\n }\r\n _getEntry(filepath, pattern, options) {\r\n try {\r\n const stats = this._getStat(filepath);\r\n return this._makeEntry(stats, pattern);\r\n }\r\n catch (error) {\r\n if (options.errorFilter(error)) {\r\n return null;\r\n }\r\n throw error;\r\n }\r\n }\r\n _getStat(filepath) {\r\n return this._statSync(filepath, this._fsStatSettings);\r\n }\r\n}\r\nexports.default = ReaderSync;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0;\r\nconst fs = require(\"fs\");\r\nconst os = require(\"os\");\r\n/**\r\n * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero.\r\n * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107\r\n */\r\nconst CPU_COUNT = Math.max(os.cpus().length, 1);\r\nexports.DEFAULT_FILE_SYSTEM_ADAPTER = {\r\n lstat: fs.lstat,\r\n lstatSync: fs.lstatSync,\r\n stat: fs.stat,\r\n statSync: fs.statSync,\r\n readdir: fs.readdir,\r\n readdirSync: fs.readdirSync\r\n};\r\nclass Settings {\r\n constructor(_options = {}) {\r\n this._options = _options;\r\n this.absolute = this._getValue(this._options.absolute, false);\r\n this.baseNameMatch = this._getValue(this._options.baseNameMatch, false);\r\n this.braceExpansion = this._getValue(this._options.braceExpansion, true);\r\n this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true);\r\n this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT);\r\n this.cwd = this._getValue(this._options.cwd, process.cwd());\r\n this.deep = this._getValue(this._options.deep, Infinity);\r\n this.dot = this._getValue(this._options.dot, false);\r\n this.extglob = this._getValue(this._options.extglob, true);\r\n this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true);\r\n this.fs = this._getFileSystemMethods(this._options.fs);\r\n this.globstar = this._getValue(this._options.globstar, true);\r\n this.ignore = this._getValue(this._options.ignore, []);\r\n this.markDirectories = this._getValue(this._options.markDirectories, false);\r\n this.objectMode = this._getValue(this._options.objectMode, false);\r\n this.onlyDirectories = this._getValue(this._options.onlyDirectories, false);\r\n this.onlyFiles = this._getValue(this._options.onlyFiles, true);\r\n this.stats = this._getValue(this._options.stats, false);\r\n this.suppressErrors = this._getValue(this._options.suppressErrors, false);\r\n this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false);\r\n this.unique = this._getValue(this._options.unique, true);\r\n if (this.onlyDirectories) {\r\n this.onlyFiles = false;\r\n }\r\n if (this.stats) {\r\n this.objectMode = true;\r\n }\r\n }\r\n _getValue(option, value) {\r\n return option === undefined ? value : option;\r\n }\r\n _getFileSystemMethods(methods = {}) {\r\n return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods);\r\n }\r\n}\r\nexports.default = Settings;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.splitWhen = exports.flatten = void 0;\r\nfunction flatten(items) {\r\n return items.reduce((collection, item) => [].concat(collection, item), []);\r\n}\r\nexports.flatten = flatten;\r\nfunction splitWhen(items, predicate) {\r\n const result = [[]];\r\n let groupIndex = 0;\r\n for (const item of items) {\r\n if (predicate(item)) {\r\n groupIndex++;\r\n result[groupIndex] = [];\r\n }\r\n else {\r\n result[groupIndex].push(item);\r\n }\r\n }\r\n return result;\r\n}\r\nexports.splitWhen = splitWhen;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.isEnoentCodeError = void 0;\r\nfunction isEnoentCodeError(error) {\r\n return error.code === 'ENOENT';\r\n}\r\nexports.isEnoentCodeError = isEnoentCodeError;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.createDirentFromStats = void 0;\r\nclass DirentFromStats {\r\n constructor(name, stats) {\r\n this.name = name;\r\n this.isBlockDevice = stats.isBlockDevice.bind(stats);\r\n this.isCharacterDevice = stats.isCharacterDevice.bind(stats);\r\n this.isDirectory = stats.isDirectory.bind(stats);\r\n this.isFIFO = stats.isFIFO.bind(stats);\r\n this.isFile = stats.isFile.bind(stats);\r\n this.isSocket = stats.isSocket.bind(stats);\r\n this.isSymbolicLink = stats.isSymbolicLink.bind(stats);\r\n }\r\n}\r\nfunction createDirentFromStats(name, stats) {\r\n return new DirentFromStats(name, stats);\r\n}\r\nexports.createDirentFromStats = createDirentFromStats;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0;\r\nconst array = require(\"./array\");\r\nexports.array = array;\r\nconst errno = require(\"./errno\");\r\nexports.errno = errno;\r\nconst fs = require(\"./fs\");\r\nexports.fs = fs;\r\nconst path = require(\"./path\");\r\nexports.path = path;\r\nconst pattern = require(\"./pattern\");\r\nexports.pattern = pattern;\r\nconst stream = require(\"./stream\");\r\nexports.stream = stream;\r\nconst string = require(\"./string\");\r\nexports.string = string;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.removeLeadingDotSegment = exports.escape = exports.makeAbsolute = exports.unixify = void 0;\r\nconst path = require(\"path\");\r\nconst LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\\\\r\nconst UNESCAPED_GLOB_SYMBOLS_RE = /(\\\\?)([()*?[\\]{|}]|^!|[!+@](?=\\())/g;\r\n/**\r\n * Designed to work only with simple paths: `dir\\\\file`.\r\n */\r\nfunction unixify(filepath) {\r\n return filepath.replace(/\\\\/g, '/');\r\n}\r\nexports.unixify = unixify;\r\nfunction makeAbsolute(cwd, filepath) {\r\n return path.resolve(cwd, filepath);\r\n}\r\nexports.makeAbsolute = makeAbsolute;\r\nfunction escape(pattern) {\r\n return pattern.replace(UNESCAPED_GLOB_SYMBOLS_RE, '\\\\$2');\r\n}\r\nexports.escape = escape;\r\nfunction removeLeadingDotSegment(entry) {\r\n // We do not use `startsWith` because this is 10x slower than current implementation for some cases.\r\n // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with\r\n if (entry.charAt(0) === '.') {\r\n const secondCharactery = entry.charAt(1);\r\n if (secondCharactery === '/' || secondCharactery === '\\\\') {\r\n return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT);\r\n }\r\n }\r\n return entry;\r\n}\r\nexports.removeLeadingDotSegment = removeLeadingDotSegment;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0;\r\nconst path = require(\"path\");\r\nconst globParent = require(\"glob-parent\");\r\nconst micromatch = require(\"micromatch\");\r\nconst picomatch = require(\"picomatch\");\r\nconst GLOBSTAR = '**';\r\nconst ESCAPE_SYMBOL = '\\\\';\r\nconst COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/;\r\nconst REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\\[.*]/;\r\nconst REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\\(.*\\|.*\\)/;\r\nconst GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\\(.*\\)/;\r\nconst BRACE_EXPANSIONS_SYMBOLS_RE = /{.*(?:,|\\.\\.).*}/;\r\nfunction isStaticPattern(pattern, options = {}) {\r\n return !isDynamicPattern(pattern, options);\r\n}\r\nexports.isStaticPattern = isStaticPattern;\r\nfunction isDynamicPattern(pattern, options = {}) {\r\n /**\r\n * A special case with an empty string is necessary for matching patterns that start with a forward slash.\r\n * An empty string cannot be a dynamic pattern.\r\n * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'.\r\n */\r\n if (pattern === '') {\r\n return false;\r\n }\r\n /**\r\n * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check\r\n * filepath directly (without read directory).\r\n */\r\n if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) {\r\n return true;\r\n }\r\n if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) {\r\n return true;\r\n }\r\n if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) {\r\n return true;\r\n }\r\n if (options.braceExpansion !== false && BRACE_EXPANSIONS_SYMBOLS_RE.test(pattern)) {\r\n return true;\r\n }\r\n return false;\r\n}\r\nexports.isDynamicPattern = isDynamicPattern;\r\nfunction convertToPositivePattern(pattern) {\r\n return isNegativePattern(pattern) ? pattern.slice(1) : pattern;\r\n}\r\nexports.convertToPositivePattern = convertToPositivePattern;\r\nfunction convertToNegativePattern(pattern) {\r\n return '!' + pattern;\r\n}\r\nexports.convertToNegativePattern = convertToNegativePattern;\r\nfunction isNegativePattern(pattern) {\r\n return pattern.startsWith('!') && pattern[1] !== '(';\r\n}\r\nexports.isNegativePattern = isNegativePattern;\r\nfunction isPositivePattern(pattern) {\r\n return !isNegativePattern(pattern);\r\n}\r\nexports.isPositivePattern = isPositivePattern;\r\nfunction getNegativePatterns(patterns) {\r\n return patterns.filter(isNegativePattern);\r\n}\r\nexports.getNegativePatterns = getNegativePatterns;\r\nfunction getPositivePatterns(patterns) {\r\n return patterns.filter(isPositivePattern);\r\n}\r\nexports.getPositivePatterns = getPositivePatterns;\r\nfunction getBaseDirectory(pattern) {\r\n return globParent(pattern, { flipBackslashes: false });\r\n}\r\nexports.getBaseDirectory = getBaseDirectory;\r\nfunction hasGlobStar(pattern) {\r\n return pattern.includes(GLOBSTAR);\r\n}\r\nexports.hasGlobStar = hasGlobStar;\r\nfunction endsWithSlashGlobStar(pattern) {\r\n return pattern.endsWith('/' + GLOBSTAR);\r\n}\r\nexports.endsWithSlashGlobStar = endsWithSlashGlobStar;\r\nfunction isAffectDepthOfReadingPattern(pattern) {\r\n const basename = path.basename(pattern);\r\n return endsWithSlashGlobStar(pattern) || isStaticPattern(basename);\r\n}\r\nexports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern;\r\nfunction expandPatternsWithBraceExpansion(patterns) {\r\n return patterns.reduce((collection, pattern) => {\r\n return collection.concat(expandBraceExpansion(pattern));\r\n }, []);\r\n}\r\nexports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion;\r\nfunction expandBraceExpansion(pattern) {\r\n return micromatch.braces(pattern, {\r\n expand: true,\r\n nodupes: true\r\n });\r\n}\r\nexports.expandBraceExpansion = expandBraceExpansion;\r\nfunction getPatternParts(pattern, options) {\r\n let { parts } = picomatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true }));\r\n /**\r\n * The scan method returns an empty array in some cases.\r\n * See micromatch/picomatch#58 for more details.\r\n */\r\n if (parts.length === 0) {\r\n parts = [pattern];\r\n }\r\n /**\r\n * The scan method does not return an empty part for the pattern with a forward slash.\r\n * This is another part of micromatch/picomatch#58.\r\n */\r\n if (parts[0].startsWith('/')) {\r\n parts[0] = parts[0].slice(1);\r\n parts.unshift('');\r\n }\r\n return parts;\r\n}\r\nexports.getPatternParts = getPatternParts;\r\nfunction makeRe(pattern, options) {\r\n return micromatch.makeRe(pattern, options);\r\n}\r\nexports.makeRe = makeRe;\r\nfunction convertPatternsToRe(patterns, options) {\r\n return patterns.map((pattern) => makeRe(pattern, options));\r\n}\r\nexports.convertPatternsToRe = convertPatternsToRe;\r\nfunction matchAny(entry, patternsRe) {\r\n return patternsRe.some((patternRe) => patternRe.test(entry));\r\n}\r\nexports.matchAny = matchAny;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.merge = void 0;\r\nconst merge2 = require(\"merge2\");\r\nfunction merge(streams) {\r\n const mergedStream = merge2(streams);\r\n streams.forEach((stream) => {\r\n stream.once('error', (error) => mergedStream.emit('error', error));\r\n });\r\n mergedStream.once('close', () => propagateCloseEventToSources(streams));\r\n mergedStream.once('end', () => propagateCloseEventToSources(streams));\r\n return mergedStream;\r\n}\r\nexports.merge = merge;\r\nfunction propagateCloseEventToSources(streams) {\r\n streams.forEach((stream) => stream.emit('close'));\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.isEmpty = exports.isString = void 0;\r\nfunction isString(input) {\r\n return typeof input === 'string';\r\n}\r\nexports.isString = isString;\r\nfunction isEmpty(input) {\r\n return input === '';\r\n}\r\nexports.isEmpty = isEmpty;\r\n","'use strict'\n\nvar reusify = require('reusify')\n\nfunction fastqueue (context, worker, concurrency) {\n if (typeof context === 'function') {\n concurrency = worker\n worker = context\n context = null\n }\n\n if (concurrency < 1) {\n throw new Error('fastqueue concurrency must be greater than 1')\n }\n\n var cache = reusify(Task)\n var queueHead = null\n var queueTail = null\n var _running = 0\n var errorHandler = null\n\n var self = {\n push: push,\n drain: noop,\n saturated: noop,\n pause: pause,\n paused: false,\n concurrency: concurrency,\n running: running,\n resume: resume,\n idle: idle,\n length: length,\n getQueue: getQueue,\n unshift: unshift,\n empty: noop,\n kill: kill,\n killAndDrain: killAndDrain,\n error: error\n }\n\n return self\n\n function running () {\n return _running\n }\n\n function pause () {\n self.paused = true\n }\n\n function length () {\n var current = queueHead\n var counter = 0\n\n while (current) {\n current = current.next\n counter++\n }\n\n return counter\n }\n\n function getQueue () {\n var current = queueHead\n var tasks = []\n\n while (current) {\n tasks.push(current.value)\n current = current.next\n }\n\n return tasks\n }\n\n function resume () {\n if (!self.paused) return\n self.paused = false\n for (var i = 0; i < self.concurrency; i++) {\n _running++\n release()\n }\n }\n\n function idle () {\n return _running === 0 && self.length() === 0\n }\n\n function push (value, done) {\n var current = cache.get()\n\n current.context = context\n current.release = release\n current.value = value\n current.callback = done || noop\n current.errorHandler = errorHandler\n\n if (_running === self.concurrency || self.paused) {\n if (queueTail) {\n queueTail.next = current\n queueTail = current\n } else {\n queueHead = current\n queueTail = current\n self.saturated()\n }\n } else {\n _running++\n worker.call(context, current.value, current.worked)\n }\n }\n\n function unshift (value, done) {\n var current = cache.get()\n\n current.context = context\n current.release = release\n current.value = value\n current.callback = done || noop\n\n if (_running === self.concurrency || self.paused) {\n if (queueHead) {\n current.next = queueHead\n queueHead = current\n } else {\n queueHead = current\n queueTail = current\n self.saturated()\n }\n } else {\n _running++\n worker.call(context, current.value, current.worked)\n }\n }\n\n function release (holder) {\n if (holder) {\n cache.release(holder)\n }\n var next = queueHead\n if (next) {\n if (!self.paused) {\n if (queueTail === queueHead) {\n queueTail = null\n }\n queueHead = next.next\n next.next = null\n worker.call(context, next.value, next.worked)\n if (queueTail === null) {\n self.empty()\n }\n } else {\n _running--\n }\n } else if (--_running === 0) {\n self.drain()\n }\n }\n\n function kill () {\n queueHead = null\n queueTail = null\n self.drain = noop\n }\n\n function killAndDrain () {\n queueHead = null\n queueTail = null\n self.drain()\n self.drain = noop\n }\n\n function error (handler) {\n errorHandler = handler\n }\n}\n\nfunction noop () {}\n\nfunction Task () {\n this.value = null\n this.callback = noop\n this.next = null\n this.release = noop\n this.context = null\n this.errorHandler = null\n\n var self = this\n\n this.worked = function worked (err, result) {\n var callback = self.callback\n var errorHandler = self.errorHandler\n var val = self.value\n self.value = null\n self.callback = noop\n if (self.errorHandler) {\n errorHandler(err, val)\n }\n callback.call(self.context, err, result)\n self.release(self)\n }\n}\n\nmodule.exports = fastqueue\n","'use strict';\n\nvar isGlob = require('is-glob');\nvar pathPosixDirname = require('path').posix.dirname;\nvar isWin32 = require('os').platform() === 'win32';\n\nvar slash = '/';\nvar backslash = /\\\\/g;\nvar enclosure = /[\\{\\[].*[\\/]*.*[\\}\\]]$/;\nvar globby = /(^|[^\\\\])([\\{\\[]|\\([^\\)]+$)/;\nvar escaped = /\\\\([\\!\\*\\?\\|\\[\\]\\(\\)\\{\\}])/g;\n\n/**\n * @param {string} str\n * @param {Object} opts\n * @param {boolean} [opts.flipBackslashes=true]\n */\nmodule.exports = function globParent(str, opts) {\n var options = Object.assign({ flipBackslashes: true }, opts);\n\n // flip windows path separators\n if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) {\n str = str.replace(backslash, slash);\n }\n\n // special case for strings ending in enclosure containing path separator\n if (enclosure.test(str)) {\n str += slash;\n }\n\n // preserves full path in case of trailing path separator\n str += 'a';\n\n // remove path parts that are globby\n do {\n str = pathPosixDirname(str);\n } while (isGlob(str) || globby.test(str));\n\n // remove escape chars and return result\n return str.replace(escaped, '$1');\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst types_1 = require(\"./types\");\nfunction createRejection(error, ...beforeErrorGroups) {\n const promise = (async () => {\n if (error instanceof types_1.RequestError) {\n try {\n for (const hooks of beforeErrorGroups) {\n if (hooks) {\n for (const hook of hooks) {\n // eslint-disable-next-line no-await-in-loop\n error = await hook(error);\n }\n }\n }\n }\n catch (error_) {\n error = error_;\n }\n }\n throw error;\n })();\n const returnPromise = () => promise;\n promise.json = returnPromise;\n promise.text = returnPromise;\n promise.buffer = returnPromise;\n promise.on = returnPromise;\n return promise;\n}\nexports.default = createRejection;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst events_1 = require(\"events\");\nconst is_1 = require(\"@sindresorhus/is\");\nconst PCancelable = require(\"p-cancelable\");\nconst types_1 = require(\"./types\");\nconst parse_body_1 = require(\"./parse-body\");\nconst core_1 = require(\"../core\");\nconst proxy_events_1 = require(\"../core/utils/proxy-events\");\nconst get_buffer_1 = require(\"../core/utils/get-buffer\");\nconst is_response_ok_1 = require(\"../core/utils/is-response-ok\");\nconst proxiedRequestEvents = [\n 'request',\n 'response',\n 'redirect',\n 'uploadProgress',\n 'downloadProgress'\n];\nfunction asPromise(normalizedOptions) {\n let globalRequest;\n let globalResponse;\n const emitter = new events_1.EventEmitter();\n const promise = new PCancelable((resolve, reject, onCancel) => {\n const makeRequest = (retryCount) => {\n const request = new core_1.default(undefined, normalizedOptions);\n request.retryCount = retryCount;\n request._noPipe = true;\n onCancel(() => request.destroy());\n onCancel.shouldReject = false;\n onCancel(() => reject(new types_1.CancelError(request)));\n globalRequest = request;\n request.once('response', async (response) => {\n var _a;\n response.retryCount = retryCount;\n if (response.request.aborted) {\n // Canceled while downloading - will throw a `CancelError` or `TimeoutError` error\n return;\n }\n // Download body\n let rawBody;\n try {\n rawBody = await get_buffer_1.default(request);\n response.rawBody = rawBody;\n }\n catch (_b) {\n // The same error is caught below.\n // See request.once('error')\n return;\n }\n if (request._isAboutToError) {\n return;\n }\n // Parse body\n const contentEncoding = ((_a = response.headers['content-encoding']) !== null && _a !== void 0 ? _a : '').toLowerCase();\n const isCompressed = ['gzip', 'deflate', 'br'].includes(contentEncoding);\n const { options } = request;\n if (isCompressed && !options.decompress) {\n response.body = rawBody;\n }\n else {\n try {\n response.body = parse_body_1.default(response, options.responseType, options.parseJson, options.encoding);\n }\n catch (error) {\n // Fallback to `utf8`\n response.body = rawBody.toString();\n if (is_response_ok_1.isResponseOk(response)) {\n request._beforeError(error);\n return;\n }\n }\n }\n try {\n for (const [index, hook] of options.hooks.afterResponse.entries()) {\n // @ts-expect-error TS doesn't notice that CancelableRequest is a Promise\n // eslint-disable-next-line no-await-in-loop\n response = await hook(response, async (updatedOptions) => {\n const typedOptions = core_1.default.normalizeArguments(undefined, {\n ...updatedOptions,\n retry: {\n calculateDelay: () => 0\n },\n throwHttpErrors: false,\n resolveBodyOnly: false\n }, options);\n // Remove any further hooks for that request, because we'll call them anyway.\n // The loop continues. We don't want duplicates (asPromise recursion).\n typedOptions.hooks.afterResponse = typedOptions.hooks.afterResponse.slice(0, index);\n for (const hook of typedOptions.hooks.beforeRetry) {\n // eslint-disable-next-line no-await-in-loop\n await hook(typedOptions);\n }\n const promise = asPromise(typedOptions);\n onCancel(() => {\n promise.catch(() => { });\n promise.cancel();\n });\n return promise;\n });\n }\n }\n catch (error) {\n request._beforeError(new types_1.RequestError(error.message, error, request));\n return;\n }\n if (!is_response_ok_1.isResponseOk(response)) {\n request._beforeError(new types_1.HTTPError(response));\n return;\n }\n globalResponse = response;\n resolve(request.options.resolveBodyOnly ? response.body : response);\n });\n const onError = (error) => {\n if (promise.isCanceled) {\n return;\n }\n const { options } = request;\n if (error instanceof types_1.HTTPError && !options.throwHttpErrors) {\n const { response } = error;\n resolve(request.options.resolveBodyOnly ? response.body : response);\n return;\n }\n reject(error);\n };\n request.once('error', onError);\n const previousBody = request.options.body;\n request.once('retry', (newRetryCount, error) => {\n var _a, _b;\n if (previousBody === ((_a = error.request) === null || _a === void 0 ? void 0 : _a.options.body) && is_1.default.nodeStream((_b = error.request) === null || _b === void 0 ? void 0 : _b.options.body)) {\n onError(error);\n return;\n }\n makeRequest(newRetryCount);\n });\n proxy_events_1.default(request, emitter, proxiedRequestEvents);\n };\n makeRequest(0);\n });\n promise.on = (event, fn) => {\n emitter.on(event, fn);\n return promise;\n };\n const shortcut = (responseType) => {\n const newPromise = (async () => {\n // Wait until downloading has ended\n await promise;\n const { options } = globalResponse.request;\n return parse_body_1.default(globalResponse, responseType, options.parseJson, options.encoding);\n })();\n Object.defineProperties(newPromise, Object.getOwnPropertyDescriptors(promise));\n return newPromise;\n };\n promise.json = () => {\n const { headers } = globalRequest.options;\n if (!globalRequest.writableFinished && headers.accept === undefined) {\n headers.accept = 'application/json';\n }\n return shortcut('json');\n };\n promise.buffer = () => shortcut('buffer');\n promise.text = () => shortcut('text');\n return promise;\n}\nexports.default = asPromise;\n__exportStar(require(\"./types\"), exports);\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst is_1 = require(\"@sindresorhus/is\");\nconst normalizeArguments = (options, defaults) => {\n if (is_1.default.null_(options.encoding)) {\n throw new TypeError('To get a Buffer, set `options.responseType` to `buffer` instead');\n }\n is_1.assert.any([is_1.default.string, is_1.default.undefined], options.encoding);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.resolveBodyOnly);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.methodRewriting);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.isStream);\n is_1.assert.any([is_1.default.string, is_1.default.undefined], options.responseType);\n // `options.responseType`\n if (options.responseType === undefined) {\n options.responseType = 'text';\n }\n // `options.retry`\n const { retry } = options;\n if (defaults) {\n options.retry = { ...defaults.retry };\n }\n else {\n options.retry = {\n calculateDelay: retryObject => retryObject.computedValue,\n limit: 0,\n methods: [],\n statusCodes: [],\n errorCodes: [],\n maxRetryAfter: undefined\n };\n }\n if (is_1.default.object(retry)) {\n options.retry = {\n ...options.retry,\n ...retry\n };\n options.retry.methods = [...new Set(options.retry.methods.map(method => method.toUpperCase()))];\n options.retry.statusCodes = [...new Set(options.retry.statusCodes)];\n options.retry.errorCodes = [...new Set(options.retry.errorCodes)];\n }\n else if (is_1.default.number(retry)) {\n options.retry.limit = retry;\n }\n if (is_1.default.undefined(options.retry.maxRetryAfter)) {\n options.retry.maxRetryAfter = Math.min(\n // TypeScript is not smart enough to handle `.filter(x => is.number(x))`.\n // eslint-disable-next-line unicorn/no-fn-reference-in-iterator\n ...[options.timeout.request, options.timeout.connect].filter(is_1.default.number));\n }\n // `options.pagination`\n if (is_1.default.object(options.pagination)) {\n if (defaults) {\n options.pagination = {\n ...defaults.pagination,\n ...options.pagination\n };\n }\n const { pagination } = options;\n if (!is_1.default.function_(pagination.transform)) {\n throw new Error('`options.pagination.transform` must be implemented');\n }\n if (!is_1.default.function_(pagination.shouldContinue)) {\n throw new Error('`options.pagination.shouldContinue` must be implemented');\n }\n if (!is_1.default.function_(pagination.filter)) {\n throw new TypeError('`options.pagination.filter` must be implemented');\n }\n if (!is_1.default.function_(pagination.paginate)) {\n throw new Error('`options.pagination.paginate` must be implemented');\n }\n }\n // JSON mode\n if (options.responseType === 'json' && options.headers.accept === undefined) {\n options.headers.accept = 'application/json';\n }\n return options;\n};\nexports.default = normalizeArguments;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst types_1 = require(\"./types\");\nconst parseBody = (response, responseType, parseJson, encoding) => {\n const { rawBody } = response;\n try {\n if (responseType === 'text') {\n return rawBody.toString(encoding);\n }\n if (responseType === 'json') {\n return rawBody.length === 0 ? '' : parseJson(rawBody.toString());\n }\n if (responseType === 'buffer') {\n return rawBody;\n }\n throw new types_1.ParseError({\n message: `Unknown body type '${responseType}'`,\n name: 'Error'\n }, response);\n }\n catch (error) {\n throw new types_1.ParseError(error, response);\n }\n};\nexports.default = parseBody;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CancelError = exports.ParseError = void 0;\nconst core_1 = require(\"../core\");\n/**\nAn error to be thrown when server response code is 2xx, and parsing body fails.\nIncludes a `response` property.\n*/\nclass ParseError extends core_1.RequestError {\n constructor(error, response) {\n const { options } = response.request;\n super(`${error.message} in \"${options.url.toString()}\"`, error, response.request);\n this.name = 'ParseError';\n }\n}\nexports.ParseError = ParseError;\n/**\nAn error to be thrown when the request is aborted with `.cancel()`.\n*/\nclass CancelError extends core_1.RequestError {\n constructor(request) {\n super('Promise was canceled', {}, request);\n this.name = 'CancelError';\n }\n get isCanceled() {\n return true;\n }\n}\nexports.CancelError = CancelError;\n__exportStar(require(\"../core\"), exports);\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.retryAfterStatusCodes = void 0;\nexports.retryAfterStatusCodes = new Set([413, 429, 503]);\nconst calculateRetryDelay = ({ attemptCount, retryOptions, error, retryAfter }) => {\n if (attemptCount > retryOptions.limit) {\n return 0;\n }\n const hasMethod = retryOptions.methods.includes(error.options.method);\n const hasErrorCode = retryOptions.errorCodes.includes(error.code);\n const hasStatusCode = error.response && retryOptions.statusCodes.includes(error.response.statusCode);\n if (!hasMethod || (!hasErrorCode && !hasStatusCode)) {\n return 0;\n }\n if (error.response) {\n if (retryAfter) {\n if (retryOptions.maxRetryAfter === undefined || retryAfter > retryOptions.maxRetryAfter) {\n return 0;\n }\n return retryAfter;\n }\n if (error.response.statusCode === 413) {\n return 0;\n }\n }\n const noise = Math.random() * 100;\n return ((2 ** (attemptCount - 1)) * 1000) + noise;\n};\nexports.default = calculateRetryDelay;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.UnsupportedProtocolError = exports.ReadError = exports.TimeoutError = exports.UploadError = exports.CacheError = exports.HTTPError = exports.MaxRedirectsError = exports.RequestError = exports.setNonEnumerableProperties = exports.knownHookEvents = exports.withoutBody = exports.kIsNormalizedAlready = void 0;\nconst util_1 = require(\"util\");\nconst stream_1 = require(\"stream\");\nconst fs_1 = require(\"fs\");\nconst url_1 = require(\"url\");\nconst http = require(\"http\");\nconst http_1 = require(\"http\");\nconst https = require(\"https\");\nconst http_timer_1 = require(\"@szmarczak/http-timer\");\nconst cacheable_lookup_1 = require(\"cacheable-lookup\");\nconst CacheableRequest = require(\"cacheable-request\");\nconst decompressResponse = require(\"decompress-response\");\n// @ts-expect-error Missing types\nconst http2wrapper = require(\"http2-wrapper\");\nconst lowercaseKeys = require(\"lowercase-keys\");\nconst is_1 = require(\"@sindresorhus/is\");\nconst get_body_size_1 = require(\"./utils/get-body-size\");\nconst is_form_data_1 = require(\"./utils/is-form-data\");\nconst proxy_events_1 = require(\"./utils/proxy-events\");\nconst timed_out_1 = require(\"./utils/timed-out\");\nconst url_to_options_1 = require(\"./utils/url-to-options\");\nconst options_to_url_1 = require(\"./utils/options-to-url\");\nconst weakable_map_1 = require(\"./utils/weakable-map\");\nconst get_buffer_1 = require(\"./utils/get-buffer\");\nconst dns_ip_version_1 = require(\"./utils/dns-ip-version\");\nconst is_response_ok_1 = require(\"./utils/is-response-ok\");\nconst deprecation_warning_1 = require(\"../utils/deprecation-warning\");\nconst normalize_arguments_1 = require(\"../as-promise/normalize-arguments\");\nconst calculate_retry_delay_1 = require(\"./calculate-retry-delay\");\nconst globalDnsCache = new cacheable_lookup_1.default();\nconst kRequest = Symbol('request');\nconst kResponse = Symbol('response');\nconst kResponseSize = Symbol('responseSize');\nconst kDownloadedSize = Symbol('downloadedSize');\nconst kBodySize = Symbol('bodySize');\nconst kUploadedSize = Symbol('uploadedSize');\nconst kServerResponsesPiped = Symbol('serverResponsesPiped');\nconst kUnproxyEvents = Symbol('unproxyEvents');\nconst kIsFromCache = Symbol('isFromCache');\nconst kCancelTimeouts = Symbol('cancelTimeouts');\nconst kStartedReading = Symbol('startedReading');\nconst kStopReading = Symbol('stopReading');\nconst kTriggerRead = Symbol('triggerRead');\nconst kBody = Symbol('body');\nconst kJobs = Symbol('jobs');\nconst kOriginalResponse = Symbol('originalResponse');\nconst kRetryTimeout = Symbol('retryTimeout');\nexports.kIsNormalizedAlready = Symbol('isNormalizedAlready');\nconst supportsBrotli = is_1.default.string(process.versions.brotli);\nexports.withoutBody = new Set(['GET', 'HEAD']);\nexports.knownHookEvents = [\n 'init',\n 'beforeRequest',\n 'beforeRedirect',\n 'beforeError',\n 'beforeRetry',\n // Promise-Only\n 'afterResponse'\n];\nfunction validateSearchParameters(searchParameters) {\n // eslint-disable-next-line guard-for-in\n for (const key in searchParameters) {\n const value = searchParameters[key];\n if (!is_1.default.string(value) && !is_1.default.number(value) && !is_1.default.boolean(value) && !is_1.default.null_(value) && !is_1.default.undefined(value)) {\n throw new TypeError(`The \\`searchParams\\` value '${String(value)}' must be a string, number, boolean or null`);\n }\n }\n}\nfunction isClientRequest(clientRequest) {\n return is_1.default.object(clientRequest) && !('statusCode' in clientRequest);\n}\nconst cacheableStore = new weakable_map_1.default();\nconst waitForOpenFile = async (file) => new Promise((resolve, reject) => {\n const onError = (error) => {\n reject(error);\n };\n // Node.js 12 has incomplete types\n if (!file.pending) {\n resolve();\n }\n file.once('error', onError);\n file.once('ready', () => {\n file.off('error', onError);\n resolve();\n });\n});\nconst redirectCodes = new Set([300, 301, 302, 303, 304, 307, 308]);\nconst nonEnumerableProperties = [\n 'context',\n 'body',\n 'json',\n 'form'\n];\nexports.setNonEnumerableProperties = (sources, to) => {\n // Non enumerable properties shall not be merged\n const properties = {};\n for (const source of sources) {\n if (!source) {\n continue;\n }\n for (const name of nonEnumerableProperties) {\n if (!(name in source)) {\n continue;\n }\n properties[name] = {\n writable: true,\n configurable: true,\n enumerable: false,\n // @ts-expect-error TS doesn't see the check above\n value: source[name]\n };\n }\n }\n Object.defineProperties(to, properties);\n};\n/**\nAn error to be thrown when a request fails.\nContains a `code` property with error class code, like `ECONNREFUSED`.\n*/\nclass RequestError extends Error {\n constructor(message, error, self) {\n var _a;\n super(message);\n Error.captureStackTrace(this, this.constructor);\n this.name = 'RequestError';\n this.code = error.code;\n if (self instanceof Request) {\n Object.defineProperty(this, 'request', {\n enumerable: false,\n value: self\n });\n Object.defineProperty(this, 'response', {\n enumerable: false,\n value: self[kResponse]\n });\n Object.defineProperty(this, 'options', {\n // This fails because of TS 3.7.2 useDefineForClassFields\n // Ref: https://github.com/microsoft/TypeScript/issues/34972\n enumerable: false,\n value: self.options\n });\n }\n else {\n Object.defineProperty(this, 'options', {\n // This fails because of TS 3.7.2 useDefineForClassFields\n // Ref: https://github.com/microsoft/TypeScript/issues/34972\n enumerable: false,\n value: self\n });\n }\n this.timings = (_a = this.request) === null || _a === void 0 ? void 0 : _a.timings;\n // Recover the original stacktrace\n if (is_1.default.string(error.stack) && is_1.default.string(this.stack)) {\n const indexOfMessage = this.stack.indexOf(this.message) + this.message.length;\n const thisStackTrace = this.stack.slice(indexOfMessage).split('\\n').reverse();\n const errorStackTrace = error.stack.slice(error.stack.indexOf(error.message) + error.message.length).split('\\n').reverse();\n // Remove duplicated traces\n while (errorStackTrace.length !== 0 && errorStackTrace[0] === thisStackTrace[0]) {\n thisStackTrace.shift();\n }\n this.stack = `${this.stack.slice(0, indexOfMessage)}${thisStackTrace.reverse().join('\\n')}${errorStackTrace.reverse().join('\\n')}`;\n }\n }\n}\nexports.RequestError = RequestError;\n/**\nAn error to be thrown when the server redirects you more than ten times.\nIncludes a `response` property.\n*/\nclass MaxRedirectsError extends RequestError {\n constructor(request) {\n super(`Redirected ${request.options.maxRedirects} times. Aborting.`, {}, request);\n this.name = 'MaxRedirectsError';\n }\n}\nexports.MaxRedirectsError = MaxRedirectsError;\n/**\nAn error to be thrown when the server response code is not 2xx nor 3xx if `options.followRedirect` is `true`, but always except for 304.\nIncludes a `response` property.\n*/\nclass HTTPError extends RequestError {\n constructor(response) {\n super(`Response code ${response.statusCode} (${response.statusMessage})`, {}, response.request);\n this.name = 'HTTPError';\n }\n}\nexports.HTTPError = HTTPError;\n/**\nAn error to be thrown when a cache method fails.\nFor example, if the database goes down or there's a filesystem error.\n*/\nclass CacheError extends RequestError {\n constructor(error, request) {\n super(error.message, error, request);\n this.name = 'CacheError';\n }\n}\nexports.CacheError = CacheError;\n/**\nAn error to be thrown when the request body is a stream and an error occurs while reading from that stream.\n*/\nclass UploadError extends RequestError {\n constructor(error, request) {\n super(error.message, error, request);\n this.name = 'UploadError';\n }\n}\nexports.UploadError = UploadError;\n/**\nAn error to be thrown when the request is aborted due to a timeout.\nIncludes an `event` and `timings` property.\n*/\nclass TimeoutError extends RequestError {\n constructor(error, timings, request) {\n super(error.message, error, request);\n this.name = 'TimeoutError';\n this.event = error.event;\n this.timings = timings;\n }\n}\nexports.TimeoutError = TimeoutError;\n/**\nAn error to be thrown when reading from response stream fails.\n*/\nclass ReadError extends RequestError {\n constructor(error, request) {\n super(error.message, error, request);\n this.name = 'ReadError';\n }\n}\nexports.ReadError = ReadError;\n/**\nAn error to be thrown when given an unsupported protocol.\n*/\nclass UnsupportedProtocolError extends RequestError {\n constructor(options) {\n super(`Unsupported protocol \"${options.url.protocol}\"`, {}, options);\n this.name = 'UnsupportedProtocolError';\n }\n}\nexports.UnsupportedProtocolError = UnsupportedProtocolError;\nconst proxiedRequestEvents = [\n 'socket',\n 'connect',\n 'continue',\n 'information',\n 'upgrade',\n 'timeout'\n];\nclass Request extends stream_1.Duplex {\n constructor(url, options = {}, defaults) {\n super({\n // This must be false, to enable throwing after destroy\n // It is used for retry logic in Promise API\n autoDestroy: false,\n // It needs to be zero because we're just proxying the data to another stream\n highWaterMark: 0\n });\n this[kDownloadedSize] = 0;\n this[kUploadedSize] = 0;\n this.requestInitialized = false;\n this[kServerResponsesPiped] = new Set();\n this.redirects = [];\n this[kStopReading] = false;\n this[kTriggerRead] = false;\n this[kJobs] = [];\n this.retryCount = 0;\n // TODO: Remove this when targeting Node.js >= 12\n this._progressCallbacks = [];\n const unlockWrite = () => this._unlockWrite();\n const lockWrite = () => this._lockWrite();\n this.on('pipe', (source) => {\n source.prependListener('data', unlockWrite);\n source.on('data', lockWrite);\n source.prependListener('end', unlockWrite);\n source.on('end', lockWrite);\n });\n this.on('unpipe', (source) => {\n source.off('data', unlockWrite);\n source.off('data', lockWrite);\n source.off('end', unlockWrite);\n source.off('end', lockWrite);\n });\n this.on('pipe', source => {\n if (source instanceof http_1.IncomingMessage) {\n this.options.headers = {\n ...source.headers,\n ...this.options.headers\n };\n }\n });\n const { json, body, form } = options;\n if (json || body || form) {\n this._lockWrite();\n }\n if (exports.kIsNormalizedAlready in options) {\n this.options = options;\n }\n else {\n try {\n // @ts-expect-error Common TypeScript bug saying that `this.constructor` is not accessible\n this.options = this.constructor.normalizeArguments(url, options, defaults);\n }\n catch (error) {\n // TODO: Move this to `_destroy()`\n if (is_1.default.nodeStream(options.body)) {\n options.body.destroy();\n }\n this.destroy(error);\n return;\n }\n }\n (async () => {\n var _a;\n try {\n if (this.options.body instanceof fs_1.ReadStream) {\n await waitForOpenFile(this.options.body);\n }\n const { url: normalizedURL } = this.options;\n if (!normalizedURL) {\n throw new TypeError('Missing `url` property');\n }\n this.requestUrl = normalizedURL.toString();\n decodeURI(this.requestUrl);\n await this._finalizeBody();\n await this._makeRequest();\n if (this.destroyed) {\n (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.destroy();\n return;\n }\n // Queued writes etc.\n for (const job of this[kJobs]) {\n job();\n }\n // Prevent memory leak\n this[kJobs].length = 0;\n this.requestInitialized = true;\n }\n catch (error) {\n if (error instanceof RequestError) {\n this._beforeError(error);\n return;\n }\n // This is a workaround for https://github.com/nodejs/node/issues/33335\n if (!this.destroyed) {\n this.destroy(error);\n }\n }\n })();\n }\n static normalizeArguments(url, options, defaults) {\n var _a, _b, _c, _d, _e;\n const rawOptions = options;\n if (is_1.default.object(url) && !is_1.default.urlInstance(url)) {\n options = { ...defaults, ...url, ...options };\n }\n else {\n if (url && options && options.url !== undefined) {\n throw new TypeError('The `url` option is mutually exclusive with the `input` argument');\n }\n options = { ...defaults, ...options };\n if (url !== undefined) {\n options.url = url;\n }\n if (is_1.default.urlInstance(options.url)) {\n options.url = new url_1.URL(options.url.toString());\n }\n }\n // TODO: Deprecate URL options in Got 12.\n // Support extend-specific options\n if (options.cache === false) {\n options.cache = undefined;\n }\n if (options.dnsCache === false) {\n options.dnsCache = undefined;\n }\n // Nice type assertions\n is_1.assert.any([is_1.default.string, is_1.default.undefined], options.method);\n is_1.assert.any([is_1.default.object, is_1.default.undefined], options.headers);\n is_1.assert.any([is_1.default.string, is_1.default.urlInstance, is_1.default.undefined], options.prefixUrl);\n is_1.assert.any([is_1.default.object, is_1.default.undefined], options.cookieJar);\n is_1.assert.any([is_1.default.object, is_1.default.string, is_1.default.undefined], options.searchParams);\n is_1.assert.any([is_1.default.object, is_1.default.string, is_1.default.undefined], options.cache);\n is_1.assert.any([is_1.default.object, is_1.default.number, is_1.default.undefined], options.timeout);\n is_1.assert.any([is_1.default.object, is_1.default.undefined], options.context);\n is_1.assert.any([is_1.default.object, is_1.default.undefined], options.hooks);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.decompress);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.ignoreInvalidCookies);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.followRedirect);\n is_1.assert.any([is_1.default.number, is_1.default.undefined], options.maxRedirects);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.throwHttpErrors);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.http2);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.allowGetBody);\n is_1.assert.any([is_1.default.string, is_1.default.undefined], options.localAddress);\n is_1.assert.any([dns_ip_version_1.isDnsLookupIpVersion, is_1.default.undefined], options.dnsLookupIpVersion);\n is_1.assert.any([is_1.default.object, is_1.default.undefined], options.https);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.rejectUnauthorized);\n if (options.https) {\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.https.rejectUnauthorized);\n is_1.assert.any([is_1.default.function_, is_1.default.undefined], options.https.checkServerIdentity);\n is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.certificateAuthority);\n is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.key);\n is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.certificate);\n is_1.assert.any([is_1.default.string, is_1.default.undefined], options.https.passphrase);\n is_1.assert.any([is_1.default.string, is_1.default.buffer, is_1.default.array, is_1.default.undefined], options.https.pfx);\n }\n is_1.assert.any([is_1.default.object, is_1.default.undefined], options.cacheOptions);\n // `options.method`\n if (is_1.default.string(options.method)) {\n options.method = options.method.toUpperCase();\n }\n else {\n options.method = 'GET';\n }\n // `options.headers`\n if (options.headers === (defaults === null || defaults === void 0 ? void 0 : defaults.headers)) {\n options.headers = { ...options.headers };\n }\n else {\n options.headers = lowercaseKeys({ ...(defaults === null || defaults === void 0 ? void 0 : defaults.headers), ...options.headers });\n }\n // Disallow legacy `url.Url`\n if ('slashes' in options) {\n throw new TypeError('The legacy `url.Url` has been deprecated. Use `URL` instead.');\n }\n // `options.auth`\n if ('auth' in options) {\n throw new TypeError('Parameter `auth` is deprecated. Use `username` / `password` instead.');\n }\n // `options.searchParams`\n if ('searchParams' in options) {\n if (options.searchParams && options.searchParams !== (defaults === null || defaults === void 0 ? void 0 : defaults.searchParams)) {\n let searchParameters;\n if (is_1.default.string(options.searchParams) || (options.searchParams instanceof url_1.URLSearchParams)) {\n searchParameters = new url_1.URLSearchParams(options.searchParams);\n }\n else {\n validateSearchParameters(options.searchParams);\n searchParameters = new url_1.URLSearchParams();\n // eslint-disable-next-line guard-for-in\n for (const key in options.searchParams) {\n const value = options.searchParams[key];\n if (value === null) {\n searchParameters.append(key, '');\n }\n else if (value !== undefined) {\n searchParameters.append(key, value);\n }\n }\n }\n // `normalizeArguments()` is also used to merge options\n (_a = defaults === null || defaults === void 0 ? void 0 : defaults.searchParams) === null || _a === void 0 ? void 0 : _a.forEach((value, key) => {\n // Only use default if one isn't already defined\n if (!searchParameters.has(key)) {\n searchParameters.append(key, value);\n }\n });\n options.searchParams = searchParameters;\n }\n }\n // `options.username` & `options.password`\n options.username = (_b = options.username) !== null && _b !== void 0 ? _b : '';\n options.password = (_c = options.password) !== null && _c !== void 0 ? _c : '';\n // `options.prefixUrl` & `options.url`\n if (is_1.default.undefined(options.prefixUrl)) {\n options.prefixUrl = (_d = defaults === null || defaults === void 0 ? void 0 : defaults.prefixUrl) !== null && _d !== void 0 ? _d : '';\n }\n else {\n options.prefixUrl = options.prefixUrl.toString();\n if (options.prefixUrl !== '' && !options.prefixUrl.endsWith('/')) {\n options.prefixUrl += '/';\n }\n }\n if (is_1.default.string(options.url)) {\n if (options.url.startsWith('/')) {\n throw new Error('`input` must not start with a slash when using `prefixUrl`');\n }\n options.url = options_to_url_1.default(options.prefixUrl + options.url, options);\n }\n else if ((is_1.default.undefined(options.url) && options.prefixUrl !== '') || options.protocol) {\n options.url = options_to_url_1.default(options.prefixUrl, options);\n }\n if (options.url) {\n if ('port' in options) {\n delete options.port;\n }\n // Make it possible to change `options.prefixUrl`\n let { prefixUrl } = options;\n Object.defineProperty(options, 'prefixUrl', {\n set: (value) => {\n const url = options.url;\n if (!url.href.startsWith(value)) {\n throw new Error(`Cannot change \\`prefixUrl\\` from ${prefixUrl} to ${value}: ${url.href}`);\n }\n options.url = new url_1.URL(value + url.href.slice(prefixUrl.length));\n prefixUrl = value;\n },\n get: () => prefixUrl\n });\n // Support UNIX sockets\n let { protocol } = options.url;\n if (protocol === 'unix:') {\n protocol = 'http:';\n options.url = new url_1.URL(`http://unix${options.url.pathname}${options.url.search}`);\n }\n // Set search params\n if (options.searchParams) {\n // eslint-disable-next-line @typescript-eslint/no-base-to-string\n options.url.search = options.searchParams.toString();\n }\n // Protocol check\n if (protocol !== 'http:' && protocol !== 'https:') {\n throw new UnsupportedProtocolError(options);\n }\n // Update `username`\n if (options.username === '') {\n options.username = options.url.username;\n }\n else {\n options.url.username = options.username;\n }\n // Update `password`\n if (options.password === '') {\n options.password = options.url.password;\n }\n else {\n options.url.password = options.password;\n }\n }\n // `options.cookieJar`\n const { cookieJar } = options;\n if (cookieJar) {\n let { setCookie, getCookieString } = cookieJar;\n is_1.assert.function_(setCookie);\n is_1.assert.function_(getCookieString);\n /* istanbul ignore next: Horrible `tough-cookie` v3 check */\n if (setCookie.length === 4 && getCookieString.length === 0) {\n setCookie = util_1.promisify(setCookie.bind(options.cookieJar));\n getCookieString = util_1.promisify(getCookieString.bind(options.cookieJar));\n options.cookieJar = {\n setCookie,\n getCookieString: getCookieString\n };\n }\n }\n // `options.cache`\n const { cache } = options;\n if (cache) {\n if (!cacheableStore.has(cache)) {\n cacheableStore.set(cache, new CacheableRequest(((requestOptions, handler) => {\n const result = requestOptions[kRequest](requestOptions, handler);\n // TODO: remove this when `cacheable-request` supports async request functions.\n if (is_1.default.promise(result)) {\n // @ts-expect-error\n // We only need to implement the error handler in order to support HTTP2 caching.\n // The result will be a promise anyway.\n result.once = (event, handler) => {\n if (event === 'error') {\n result.catch(handler);\n }\n else if (event === 'abort') {\n // The empty catch is needed here in case when\n // it rejects before it's `await`ed in `_makeRequest`.\n (async () => {\n try {\n const request = (await result);\n request.once('abort', handler);\n }\n catch (_a) { }\n })();\n }\n else {\n /* istanbul ignore next: safety check */\n throw new Error(`Unknown HTTP2 promise event: ${event}`);\n }\n return result;\n };\n }\n return result;\n }), cache));\n }\n }\n // `options.cacheOptions`\n options.cacheOptions = { ...options.cacheOptions };\n // `options.dnsCache`\n if (options.dnsCache === true) {\n options.dnsCache = globalDnsCache;\n }\n else if (!is_1.default.undefined(options.dnsCache) && !options.dnsCache.lookup) {\n throw new TypeError(`Parameter \\`dnsCache\\` must be a CacheableLookup instance or a boolean, got ${is_1.default(options.dnsCache)}`);\n }\n // `options.timeout`\n if (is_1.default.number(options.timeout)) {\n options.timeout = { request: options.timeout };\n }\n else if (defaults && options.timeout !== defaults.timeout) {\n options.timeout = {\n ...defaults.timeout,\n ...options.timeout\n };\n }\n else {\n options.timeout = { ...options.timeout };\n }\n // `options.context`\n if (!options.context) {\n options.context = {};\n }\n // `options.hooks`\n const areHooksDefault = options.hooks === (defaults === null || defaults === void 0 ? void 0 : defaults.hooks);\n options.hooks = { ...options.hooks };\n for (const event of exports.knownHookEvents) {\n if (event in options.hooks) {\n if (is_1.default.array(options.hooks[event])) {\n // See https://github.com/microsoft/TypeScript/issues/31445#issuecomment-576929044\n options.hooks[event] = [...options.hooks[event]];\n }\n else {\n throw new TypeError(`Parameter \\`${event}\\` must be an Array, got ${is_1.default(options.hooks[event])}`);\n }\n }\n else {\n options.hooks[event] = [];\n }\n }\n if (defaults && !areHooksDefault) {\n for (const event of exports.knownHookEvents) {\n const defaultHooks = defaults.hooks[event];\n if (defaultHooks.length > 0) {\n // See https://github.com/microsoft/TypeScript/issues/31445#issuecomment-576929044\n options.hooks[event] = [\n ...defaults.hooks[event],\n ...options.hooks[event]\n ];\n }\n }\n }\n // DNS options\n if ('family' in options) {\n deprecation_warning_1.default('\"options.family\" was never documented, please use \"options.dnsLookupIpVersion\"');\n }\n // HTTPS options\n if (defaults === null || defaults === void 0 ? void 0 : defaults.https) {\n options.https = { ...defaults.https, ...options.https };\n }\n if ('rejectUnauthorized' in options) {\n deprecation_warning_1.default('\"options.rejectUnauthorized\" is now deprecated, please use \"options.https.rejectUnauthorized\"');\n }\n if ('checkServerIdentity' in options) {\n deprecation_warning_1.default('\"options.checkServerIdentity\" was never documented, please use \"options.https.checkServerIdentity\"');\n }\n if ('ca' in options) {\n deprecation_warning_1.default('\"options.ca\" was never documented, please use \"options.https.certificateAuthority\"');\n }\n if ('key' in options) {\n deprecation_warning_1.default('\"options.key\" was never documented, please use \"options.https.key\"');\n }\n if ('cert' in options) {\n deprecation_warning_1.default('\"options.cert\" was never documented, please use \"options.https.certificate\"');\n }\n if ('passphrase' in options) {\n deprecation_warning_1.default('\"options.passphrase\" was never documented, please use \"options.https.passphrase\"');\n }\n if ('pfx' in options) {\n deprecation_warning_1.default('\"options.pfx\" was never documented, please use \"options.https.pfx\"');\n }\n // Other options\n if ('followRedirects' in options) {\n throw new TypeError('The `followRedirects` option does not exist. Use `followRedirect` instead.');\n }\n if (options.agent) {\n for (const key in options.agent) {\n if (key !== 'http' && key !== 'https' && key !== 'http2') {\n throw new TypeError(`Expected the \\`options.agent\\` properties to be \\`http\\`, \\`https\\` or \\`http2\\`, got \\`${key}\\``);\n }\n }\n }\n options.maxRedirects = (_e = options.maxRedirects) !== null && _e !== void 0 ? _e : 0;\n // Set non-enumerable properties\n exports.setNonEnumerableProperties([defaults, rawOptions], options);\n return normalize_arguments_1.default(options, defaults);\n }\n _lockWrite() {\n const onLockedWrite = () => {\n throw new TypeError('The payload has been already provided');\n };\n this.write = onLockedWrite;\n this.end = onLockedWrite;\n }\n _unlockWrite() {\n this.write = super.write;\n this.end = super.end;\n }\n async _finalizeBody() {\n const { options } = this;\n const { headers } = options;\n const isForm = !is_1.default.undefined(options.form);\n const isJSON = !is_1.default.undefined(options.json);\n const isBody = !is_1.default.undefined(options.body);\n const hasPayload = isForm || isJSON || isBody;\n const cannotHaveBody = exports.withoutBody.has(options.method) && !(options.method === 'GET' && options.allowGetBody);\n this._cannotHaveBody = cannotHaveBody;\n if (hasPayload) {\n if (cannotHaveBody) {\n throw new TypeError(`The \\`${options.method}\\` method cannot be used with a body`);\n }\n if ([isBody, isForm, isJSON].filter(isTrue => isTrue).length > 1) {\n throw new TypeError('The `body`, `json` and `form` options are mutually exclusive');\n }\n if (isBody &&\n !(options.body instanceof stream_1.Readable) &&\n !is_1.default.string(options.body) &&\n !is_1.default.buffer(options.body) &&\n !is_form_data_1.default(options.body)) {\n throw new TypeError('The `body` option must be a stream.Readable, string or Buffer');\n }\n if (isForm && !is_1.default.object(options.form)) {\n throw new TypeError('The `form` option must be an Object');\n }\n {\n // Serialize body\n const noContentType = !is_1.default.string(headers['content-type']);\n if (isBody) {\n // Special case for https://github.com/form-data/form-data\n if (is_form_data_1.default(options.body) && noContentType) {\n headers['content-type'] = `multipart/form-data; boundary=${options.body.getBoundary()}`;\n }\n this[kBody] = options.body;\n }\n else if (isForm) {\n if (noContentType) {\n headers['content-type'] = 'application/x-www-form-urlencoded';\n }\n this[kBody] = (new url_1.URLSearchParams(options.form)).toString();\n }\n else {\n if (noContentType) {\n headers['content-type'] = 'application/json';\n }\n this[kBody] = options.stringifyJson(options.json);\n }\n const uploadBodySize = await get_body_size_1.default(this[kBody], options.headers);\n // See https://tools.ietf.org/html/rfc7230#section-3.3.2\n // A user agent SHOULD send a Content-Length in a request message when\n // no Transfer-Encoding is sent and the request method defines a meaning\n // for an enclosed payload body. For example, a Content-Length header\n // field is normally sent in a POST request even when the value is 0\n // (indicating an empty payload body). A user agent SHOULD NOT send a\n // Content-Length header field when the request message does not contain\n // a payload body and the method semantics do not anticipate such a\n // body.\n if (is_1.default.undefined(headers['content-length']) && is_1.default.undefined(headers['transfer-encoding'])) {\n if (!cannotHaveBody && !is_1.default.undefined(uploadBodySize)) {\n headers['content-length'] = String(uploadBodySize);\n }\n }\n }\n }\n else if (cannotHaveBody) {\n this._lockWrite();\n }\n else {\n this._unlockWrite();\n }\n this[kBodySize] = Number(headers['content-length']) || undefined;\n }\n async _onResponseBase(response) {\n const { options } = this;\n const { url } = options;\n this[kOriginalResponse] = response;\n if (options.decompress) {\n response = decompressResponse(response);\n }\n const statusCode = response.statusCode;\n const typedResponse = response;\n typedResponse.statusMessage = typedResponse.statusMessage ? typedResponse.statusMessage : http.STATUS_CODES[statusCode];\n typedResponse.url = options.url.toString();\n typedResponse.requestUrl = this.requestUrl;\n typedResponse.redirectUrls = this.redirects;\n typedResponse.request = this;\n typedResponse.isFromCache = response.fromCache || false;\n typedResponse.ip = this.ip;\n typedResponse.retryCount = this.retryCount;\n this[kIsFromCache] = typedResponse.isFromCache;\n this[kResponseSize] = Number(response.headers['content-length']) || undefined;\n this[kResponse] = response;\n response.once('end', () => {\n this[kResponseSize] = this[kDownloadedSize];\n this.emit('downloadProgress', this.downloadProgress);\n });\n response.once('error', (error) => {\n // Force clean-up, because some packages don't do this.\n // TODO: Fix decompress-response\n response.destroy();\n this._beforeError(new ReadError(error, this));\n });\n response.once('aborted', () => {\n this._beforeError(new ReadError({\n name: 'Error',\n message: 'The server aborted pending request',\n code: 'ECONNRESET'\n }, this));\n });\n this.emit('downloadProgress', this.downloadProgress);\n const rawCookies = response.headers['set-cookie'];\n if (is_1.default.object(options.cookieJar) && rawCookies) {\n let promises = rawCookies.map(async (rawCookie) => options.cookieJar.setCookie(rawCookie, url.toString()));\n if (options.ignoreInvalidCookies) {\n promises = promises.map(async (p) => p.catch(() => { }));\n }\n try {\n await Promise.all(promises);\n }\n catch (error) {\n this._beforeError(error);\n return;\n }\n }\n if (options.followRedirect && response.headers.location && redirectCodes.has(statusCode)) {\n // We're being redirected, we don't care about the response.\n // It'd be best to abort the request, but we can't because\n // we would have to sacrifice the TCP connection. We don't want that.\n response.resume();\n if (this[kRequest]) {\n this[kCancelTimeouts]();\n // eslint-disable-next-line @typescript-eslint/no-dynamic-delete\n delete this[kRequest];\n this[kUnproxyEvents]();\n }\n const shouldBeGet = statusCode === 303 && options.method !== 'GET' && options.method !== 'HEAD';\n if (shouldBeGet || !options.methodRewriting) {\n // Server responded with \"see other\", indicating that the resource exists at another location,\n // and the client should request it from that location via GET or HEAD.\n options.method = 'GET';\n if ('body' in options) {\n delete options.body;\n }\n if ('json' in options) {\n delete options.json;\n }\n if ('form' in options) {\n delete options.form;\n }\n this[kBody] = undefined;\n delete options.headers['content-length'];\n }\n if (this.redirects.length >= options.maxRedirects) {\n this._beforeError(new MaxRedirectsError(this));\n return;\n }\n try {\n // Do not remove. See https://github.com/sindresorhus/got/pull/214\n const redirectBuffer = Buffer.from(response.headers.location, 'binary').toString();\n // Handles invalid URLs. See https://github.com/sindresorhus/got/issues/604\n const redirectUrl = new url_1.URL(redirectBuffer, url);\n const redirectString = redirectUrl.toString();\n decodeURI(redirectString);\n // Redirecting to a different site, clear sensitive data.\n if (redirectUrl.hostname !== url.hostname || redirectUrl.port !== url.port) {\n if ('host' in options.headers) {\n delete options.headers.host;\n }\n if ('cookie' in options.headers) {\n delete options.headers.cookie;\n }\n if ('authorization' in options.headers) {\n delete options.headers.authorization;\n }\n if (options.username || options.password) {\n options.username = '';\n options.password = '';\n }\n }\n else {\n redirectUrl.username = options.username;\n redirectUrl.password = options.password;\n }\n this.redirects.push(redirectString);\n options.url = redirectUrl;\n for (const hook of options.hooks.beforeRedirect) {\n // eslint-disable-next-line no-await-in-loop\n await hook(options, typedResponse);\n }\n this.emit('redirect', typedResponse, options);\n await this._makeRequest();\n }\n catch (error) {\n this._beforeError(error);\n return;\n }\n return;\n }\n if (options.isStream && options.throwHttpErrors && !is_response_ok_1.isResponseOk(typedResponse)) {\n this._beforeError(new HTTPError(typedResponse));\n return;\n }\n response.on('readable', () => {\n if (this[kTriggerRead]) {\n this._read();\n }\n });\n this.on('resume', () => {\n response.resume();\n });\n this.on('pause', () => {\n response.pause();\n });\n response.once('end', () => {\n this.push(null);\n });\n this.emit('response', response);\n for (const destination of this[kServerResponsesPiped]) {\n if (destination.headersSent) {\n continue;\n }\n // eslint-disable-next-line guard-for-in\n for (const key in response.headers) {\n const isAllowed = options.decompress ? key !== 'content-encoding' : true;\n const value = response.headers[key];\n if (isAllowed) {\n destination.setHeader(key, value);\n }\n }\n destination.statusCode = statusCode;\n }\n }\n async _onResponse(response) {\n try {\n await this._onResponseBase(response);\n }\n catch (error) {\n /* istanbul ignore next: better safe than sorry */\n this._beforeError(error);\n }\n }\n _onRequest(request) {\n const { options } = this;\n const { timeout, url } = options;\n http_timer_1.default(request);\n this[kCancelTimeouts] = timed_out_1.default(request, timeout, url);\n const responseEventName = options.cache ? 'cacheableResponse' : 'response';\n request.once(responseEventName, (response) => {\n void this._onResponse(response);\n });\n request.once('error', (error) => {\n var _a;\n // Force clean-up, because some packages (e.g. nock) don't do this.\n request.destroy();\n // Node.js <= 12.18.2 mistakenly emits the response `end` first.\n (_a = request.res) === null || _a === void 0 ? void 0 : _a.removeAllListeners('end');\n error = error instanceof timed_out_1.TimeoutError ? new TimeoutError(error, this.timings, this) : new RequestError(error.message, error, this);\n this._beforeError(error);\n });\n this[kUnproxyEvents] = proxy_events_1.default(request, this, proxiedRequestEvents);\n this[kRequest] = request;\n this.emit('uploadProgress', this.uploadProgress);\n // Send body\n const body = this[kBody];\n const currentRequest = this.redirects.length === 0 ? this : request;\n if (is_1.default.nodeStream(body)) {\n body.pipe(currentRequest);\n body.once('error', (error) => {\n this._beforeError(new UploadError(error, this));\n });\n }\n else {\n this._unlockWrite();\n if (!is_1.default.undefined(body)) {\n this._writeRequest(body, undefined, () => { });\n currentRequest.end();\n this._lockWrite();\n }\n else if (this._cannotHaveBody || this._noPipe) {\n currentRequest.end();\n this._lockWrite();\n }\n }\n this.emit('request', request);\n }\n async _createCacheableRequest(url, options) {\n return new Promise((resolve, reject) => {\n // TODO: Remove `utils/url-to-options.ts` when `cacheable-request` is fixed\n Object.assign(options, url_to_options_1.default(url));\n // `http-cache-semantics` checks this\n // TODO: Fix this ignore.\n // @ts-expect-error\n delete options.url;\n let request;\n // This is ugly\n const cacheRequest = cacheableStore.get(options.cache)(options, async (response) => {\n // TODO: Fix `cacheable-response`\n response._readableState.autoDestroy = false;\n if (request) {\n (await request).emit('cacheableResponse', response);\n }\n resolve(response);\n });\n // Restore options\n options.url = url;\n cacheRequest.once('error', reject);\n cacheRequest.once('request', async (requestOrPromise) => {\n request = requestOrPromise;\n resolve(request);\n });\n });\n }\n async _makeRequest() {\n var _a, _b, _c, _d, _e;\n const { options } = this;\n const { headers } = options;\n for (const key in headers) {\n if (is_1.default.undefined(headers[key])) {\n // eslint-disable-next-line @typescript-eslint/no-dynamic-delete\n delete headers[key];\n }\n else if (is_1.default.null_(headers[key])) {\n throw new TypeError(`Use \\`undefined\\` instead of \\`null\\` to delete the \\`${key}\\` header`);\n }\n }\n if (options.decompress && is_1.default.undefined(headers['accept-encoding'])) {\n headers['accept-encoding'] = supportsBrotli ? 'gzip, deflate, br' : 'gzip, deflate';\n }\n // Set cookies\n if (options.cookieJar) {\n const cookieString = await options.cookieJar.getCookieString(options.url.toString());\n if (is_1.default.nonEmptyString(cookieString)) {\n options.headers.cookie = cookieString;\n }\n }\n for (const hook of options.hooks.beforeRequest) {\n // eslint-disable-next-line no-await-in-loop\n const result = await hook(options);\n if (!is_1.default.undefined(result)) {\n // @ts-expect-error Skip the type mismatch to support abstract responses\n options.request = () => result;\n break;\n }\n }\n if (options.body && this[kBody] !== options.body) {\n this[kBody] = options.body;\n }\n const { agent, request, timeout, url } = options;\n if (options.dnsCache && !('lookup' in options)) {\n options.lookup = options.dnsCache.lookup;\n }\n // UNIX sockets\n if (url.hostname === 'unix') {\n const matches = /(?.+?):(?.+)/.exec(`${url.pathname}${url.search}`);\n if (matches === null || matches === void 0 ? void 0 : matches.groups) {\n const { socketPath, path } = matches.groups;\n Object.assign(options, {\n socketPath,\n path,\n host: ''\n });\n }\n }\n const isHttps = url.protocol === 'https:';\n // Fallback function\n let fallbackFn;\n if (options.http2) {\n fallbackFn = http2wrapper.auto;\n }\n else {\n fallbackFn = isHttps ? https.request : http.request;\n }\n const realFn = (_a = options.request) !== null && _a !== void 0 ? _a : fallbackFn;\n // Cache support\n const fn = options.cache ? this._createCacheableRequest : realFn;\n // Pass an agent directly when HTTP2 is disabled\n if (agent && !options.http2) {\n options.agent = agent[isHttps ? 'https' : 'http'];\n }\n // Prepare plain HTTP request options\n options[kRequest] = realFn;\n delete options.request;\n // TODO: Fix this ignore.\n // @ts-expect-error\n delete options.timeout;\n const requestOptions = options;\n requestOptions.shared = (_b = options.cacheOptions) === null || _b === void 0 ? void 0 : _b.shared;\n requestOptions.cacheHeuristic = (_c = options.cacheOptions) === null || _c === void 0 ? void 0 : _c.cacheHeuristic;\n requestOptions.immutableMinTimeToLive = (_d = options.cacheOptions) === null || _d === void 0 ? void 0 : _d.immutableMinTimeToLive;\n requestOptions.ignoreCargoCult = (_e = options.cacheOptions) === null || _e === void 0 ? void 0 : _e.ignoreCargoCult;\n // If `dnsLookupIpVersion` is not present do not override `family`\n if (options.dnsLookupIpVersion !== undefined) {\n try {\n requestOptions.family = dns_ip_version_1.dnsLookupIpVersionToFamily(options.dnsLookupIpVersion);\n }\n catch (_f) {\n throw new Error('Invalid `dnsLookupIpVersion` option value');\n }\n }\n // HTTPS options remapping\n if (options.https) {\n if ('rejectUnauthorized' in options.https) {\n requestOptions.rejectUnauthorized = options.https.rejectUnauthorized;\n }\n if (options.https.checkServerIdentity) {\n requestOptions.checkServerIdentity = options.https.checkServerIdentity;\n }\n if (options.https.certificateAuthority) {\n requestOptions.ca = options.https.certificateAuthority;\n }\n if (options.https.certificate) {\n requestOptions.cert = options.https.certificate;\n }\n if (options.https.key) {\n requestOptions.key = options.https.key;\n }\n if (options.https.passphrase) {\n requestOptions.passphrase = options.https.passphrase;\n }\n if (options.https.pfx) {\n requestOptions.pfx = options.https.pfx;\n }\n }\n try {\n let requestOrResponse = await fn(url, requestOptions);\n if (is_1.default.undefined(requestOrResponse)) {\n requestOrResponse = fallbackFn(url, requestOptions);\n }\n // Restore options\n options.request = request;\n options.timeout = timeout;\n options.agent = agent;\n // HTTPS options restore\n if (options.https) {\n if ('rejectUnauthorized' in options.https) {\n delete requestOptions.rejectUnauthorized;\n }\n if (options.https.checkServerIdentity) {\n // @ts-expect-error - This one will be removed when we remove the alias.\n delete requestOptions.checkServerIdentity;\n }\n if (options.https.certificateAuthority) {\n delete requestOptions.ca;\n }\n if (options.https.certificate) {\n delete requestOptions.cert;\n }\n if (options.https.key) {\n delete requestOptions.key;\n }\n if (options.https.passphrase) {\n delete requestOptions.passphrase;\n }\n if (options.https.pfx) {\n delete requestOptions.pfx;\n }\n }\n if (isClientRequest(requestOrResponse)) {\n this._onRequest(requestOrResponse);\n // Emit the response after the stream has been ended\n }\n else if (this.writable) {\n this.once('finish', () => {\n void this._onResponse(requestOrResponse);\n });\n this._unlockWrite();\n this.end();\n this._lockWrite();\n }\n else {\n void this._onResponse(requestOrResponse);\n }\n }\n catch (error) {\n if (error instanceof CacheableRequest.CacheError) {\n throw new CacheError(error, this);\n }\n throw new RequestError(error.message, error, this);\n }\n }\n async _error(error) {\n try {\n for (const hook of this.options.hooks.beforeError) {\n // eslint-disable-next-line no-await-in-loop\n error = await hook(error);\n }\n }\n catch (error_) {\n error = new RequestError(error_.message, error_, this);\n }\n this.destroy(error);\n }\n _beforeError(error) {\n if (this[kStopReading]) {\n return;\n }\n const { options } = this;\n const retryCount = this.retryCount + 1;\n this[kStopReading] = true;\n if (!(error instanceof RequestError)) {\n error = new RequestError(error.message, error, this);\n }\n const typedError = error;\n const { response } = typedError;\n void (async () => {\n if (response && !response.body) {\n response.setEncoding(this._readableState.encoding);\n try {\n response.rawBody = await get_buffer_1.default(response);\n response.body = response.rawBody.toString();\n }\n catch (_a) { }\n }\n if (this.listenerCount('retry') !== 0) {\n let backoff;\n try {\n let retryAfter;\n if (response && 'retry-after' in response.headers) {\n retryAfter = Number(response.headers['retry-after']);\n if (Number.isNaN(retryAfter)) {\n retryAfter = Date.parse(response.headers['retry-after']) - Date.now();\n if (retryAfter <= 0) {\n retryAfter = 1;\n }\n }\n else {\n retryAfter *= 1000;\n }\n }\n backoff = await options.retry.calculateDelay({\n attemptCount: retryCount,\n retryOptions: options.retry,\n error: typedError,\n retryAfter,\n computedValue: calculate_retry_delay_1.default({\n attemptCount: retryCount,\n retryOptions: options.retry,\n error: typedError,\n retryAfter,\n computedValue: 0\n })\n });\n }\n catch (error_) {\n void this._error(new RequestError(error_.message, error_, this));\n return;\n }\n if (backoff) {\n const retry = async () => {\n try {\n for (const hook of this.options.hooks.beforeRetry) {\n // eslint-disable-next-line no-await-in-loop\n await hook(this.options, typedError, retryCount);\n }\n }\n catch (error_) {\n void this._error(new RequestError(error_.message, error, this));\n return;\n }\n // Something forced us to abort the retry\n if (this.destroyed) {\n return;\n }\n this.destroy();\n this.emit('retry', retryCount, error);\n };\n this[kRetryTimeout] = setTimeout(retry, backoff);\n return;\n }\n }\n void this._error(typedError);\n })();\n }\n _read() {\n this[kTriggerRead] = true;\n const response = this[kResponse];\n if (response && !this[kStopReading]) {\n // We cannot put this in the `if` above\n // because `.read()` also triggers the `end` event\n if (response.readableLength) {\n this[kTriggerRead] = false;\n }\n let data;\n while ((data = response.read()) !== null) {\n this[kDownloadedSize] += data.length;\n this[kStartedReading] = true;\n const progress = this.downloadProgress;\n if (progress.percent < 1) {\n this.emit('downloadProgress', progress);\n }\n this.push(data);\n }\n }\n }\n // Node.js 12 has incorrect types, so the encoding must be a string\n _write(chunk, encoding, callback) {\n const write = () => {\n this._writeRequest(chunk, encoding, callback);\n };\n if (this.requestInitialized) {\n write();\n }\n else {\n this[kJobs].push(write);\n }\n }\n _writeRequest(chunk, encoding, callback) {\n if (this[kRequest].destroyed) {\n // Probably the `ClientRequest` instance will throw\n return;\n }\n this._progressCallbacks.push(() => {\n this[kUploadedSize] += Buffer.byteLength(chunk, encoding);\n const progress = this.uploadProgress;\n if (progress.percent < 1) {\n this.emit('uploadProgress', progress);\n }\n });\n // TODO: What happens if it's from cache? Then this[kRequest] won't be defined.\n this[kRequest].write(chunk, encoding, (error) => {\n if (!error && this._progressCallbacks.length > 0) {\n this._progressCallbacks.shift()();\n }\n callback(error);\n });\n }\n _final(callback) {\n const endRequest = () => {\n // FIX: Node.js 10 calls the write callback AFTER the end callback!\n while (this._progressCallbacks.length !== 0) {\n this._progressCallbacks.shift()();\n }\n // We need to check if `this[kRequest]` is present,\n // because it isn't when we use cache.\n if (!(kRequest in this)) {\n callback();\n return;\n }\n if (this[kRequest].destroyed) {\n callback();\n return;\n }\n this[kRequest].end((error) => {\n if (!error) {\n this[kBodySize] = this[kUploadedSize];\n this.emit('uploadProgress', this.uploadProgress);\n this[kRequest].emit('upload-complete');\n }\n callback(error);\n });\n };\n if (this.requestInitialized) {\n endRequest();\n }\n else {\n this[kJobs].push(endRequest);\n }\n }\n _destroy(error, callback) {\n var _a;\n this[kStopReading] = true;\n // Prevent further retries\n clearTimeout(this[kRetryTimeout]);\n if (kRequest in this) {\n this[kCancelTimeouts]();\n // TODO: Remove the next `if` when these get fixed:\n // - https://github.com/nodejs/node/issues/32851\n if (!((_a = this[kResponse]) === null || _a === void 0 ? void 0 : _a.complete)) {\n this[kRequest].destroy();\n }\n }\n if (error !== null && !is_1.default.undefined(error) && !(error instanceof RequestError)) {\n error = new RequestError(error.message, error, this);\n }\n callback(error);\n }\n get _isAboutToError() {\n return this[kStopReading];\n }\n /**\n The remote IP address.\n */\n get ip() {\n var _a;\n return (_a = this.socket) === null || _a === void 0 ? void 0 : _a.remoteAddress;\n }\n /**\n Indicates whether the request has been aborted or not.\n */\n get aborted() {\n var _a, _b, _c;\n return ((_b = (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.destroyed) !== null && _b !== void 0 ? _b : this.destroyed) && !((_c = this[kOriginalResponse]) === null || _c === void 0 ? void 0 : _c.complete);\n }\n get socket() {\n var _a, _b;\n return (_b = (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.socket) !== null && _b !== void 0 ? _b : undefined;\n }\n /**\n Progress event for downloading (receiving a response).\n */\n get downloadProgress() {\n let percent;\n if (this[kResponseSize]) {\n percent = this[kDownloadedSize] / this[kResponseSize];\n }\n else if (this[kResponseSize] === this[kDownloadedSize]) {\n percent = 1;\n }\n else {\n percent = 0;\n }\n return {\n percent,\n transferred: this[kDownloadedSize],\n total: this[kResponseSize]\n };\n }\n /**\n Progress event for uploading (sending a request).\n */\n get uploadProgress() {\n let percent;\n if (this[kBodySize]) {\n percent = this[kUploadedSize] / this[kBodySize];\n }\n else if (this[kBodySize] === this[kUploadedSize]) {\n percent = 1;\n }\n else {\n percent = 0;\n }\n return {\n percent,\n transferred: this[kUploadedSize],\n total: this[kBodySize]\n };\n }\n /**\n The object contains the following properties:\n\n - `start` - Time when the request started.\n - `socket` - Time when a socket was assigned to the request.\n - `lookup` - Time when the DNS lookup finished.\n - `connect` - Time when the socket successfully connected.\n - `secureConnect` - Time when the socket securely connected.\n - `upload` - Time when the request finished uploading.\n - `response` - Time when the request fired `response` event.\n - `end` - Time when the response fired `end` event.\n - `error` - Time when the request fired `error` event.\n - `abort` - Time when the request fired `abort` event.\n - `phases`\n - `wait` - `timings.socket - timings.start`\n - `dns` - `timings.lookup - timings.socket`\n - `tcp` - `timings.connect - timings.lookup`\n - `tls` - `timings.secureConnect - timings.connect`\n - `request` - `timings.upload - (timings.secureConnect || timings.connect)`\n - `firstByte` - `timings.response - timings.upload`\n - `download` - `timings.end - timings.response`\n - `total` - `(timings.end || timings.error || timings.abort) - timings.start`\n\n If something has not been measured yet, it will be `undefined`.\n\n __Note__: The time is a `number` representing the milliseconds elapsed since the UNIX epoch.\n */\n get timings() {\n var _a;\n return (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.timings;\n }\n /**\n Whether the response was retrieved from the cache.\n */\n get isFromCache() {\n return this[kIsFromCache];\n }\n pipe(destination, options) {\n if (this[kStartedReading]) {\n throw new Error('Failed to pipe. The response has been emitted already.');\n }\n if (destination instanceof http_1.ServerResponse) {\n this[kServerResponsesPiped].add(destination);\n }\n return super.pipe(destination, options);\n }\n unpipe(destination) {\n if (destination instanceof http_1.ServerResponse) {\n this[kServerResponsesPiped].delete(destination);\n }\n super.unpipe(destination);\n return this;\n }\n}\nexports.default = Request;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.dnsLookupIpVersionToFamily = exports.isDnsLookupIpVersion = void 0;\nconst conversionTable = {\n auto: 0,\n ipv4: 4,\n ipv6: 6\n};\nexports.isDnsLookupIpVersion = (value) => {\n return value in conversionTable;\n};\nexports.dnsLookupIpVersionToFamily = (dnsLookupIpVersion) => {\n if (exports.isDnsLookupIpVersion(dnsLookupIpVersion)) {\n return conversionTable[dnsLookupIpVersion];\n }\n throw new Error('Invalid DNS lookup IP version');\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst fs_1 = require(\"fs\");\nconst util_1 = require(\"util\");\nconst is_1 = require(\"@sindresorhus/is\");\nconst is_form_data_1 = require(\"./is-form-data\");\nconst statAsync = util_1.promisify(fs_1.stat);\nexports.default = async (body, headers) => {\n if (headers && 'content-length' in headers) {\n return Number(headers['content-length']);\n }\n if (!body) {\n return 0;\n }\n if (is_1.default.string(body)) {\n return Buffer.byteLength(body);\n }\n if (is_1.default.buffer(body)) {\n return body.length;\n }\n if (is_form_data_1.default(body)) {\n return util_1.promisify(body.getLength.bind(body))();\n }\n if (body instanceof fs_1.ReadStream) {\n const { size } = await statAsync(body.path);\n if (size === 0) {\n return undefined;\n }\n return size;\n }\n return undefined;\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// TODO: Update https://github.com/sindresorhus/get-stream\nconst getBuffer = async (stream) => {\n const chunks = [];\n let length = 0;\n for await (const chunk of stream) {\n chunks.push(chunk);\n length += Buffer.byteLength(chunk);\n }\n if (Buffer.isBuffer(chunks[0])) {\n return Buffer.concat(chunks, length);\n }\n return Buffer.from(chunks.join(''));\n};\nexports.default = getBuffer;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst is_1 = require(\"@sindresorhus/is\");\nexports.default = (body) => is_1.default.nodeStream(body) && is_1.default.function_(body.getBoundary);\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isResponseOk = void 0;\nexports.isResponseOk = (response) => {\n const { statusCode } = response;\n const limitStatusCode = response.request.options.followRedirect ? 299 : 399;\n return (statusCode >= 200 && statusCode <= limitStatusCode) || statusCode === 304;\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n/* istanbul ignore file: deprecated */\nconst url_1 = require(\"url\");\nconst keys = [\n 'protocol',\n 'host',\n 'hostname',\n 'port',\n 'pathname',\n 'search'\n];\nexports.default = (origin, options) => {\n var _a, _b;\n if (options.path) {\n if (options.pathname) {\n throw new TypeError('Parameters `path` and `pathname` are mutually exclusive.');\n }\n if (options.search) {\n throw new TypeError('Parameters `path` and `search` are mutually exclusive.');\n }\n if (options.searchParams) {\n throw new TypeError('Parameters `path` and `searchParams` are mutually exclusive.');\n }\n }\n if (options.search && options.searchParams) {\n throw new TypeError('Parameters `search` and `searchParams` are mutually exclusive.');\n }\n if (!origin) {\n if (!options.protocol) {\n throw new TypeError('No URL protocol specified');\n }\n origin = `${options.protocol}//${(_b = (_a = options.hostname) !== null && _a !== void 0 ? _a : options.host) !== null && _b !== void 0 ? _b : ''}`;\n }\n const url = new url_1.URL(origin);\n if (options.path) {\n const searchIndex = options.path.indexOf('?');\n if (searchIndex === -1) {\n options.pathname = options.path;\n }\n else {\n options.pathname = options.path.slice(0, searchIndex);\n options.search = options.path.slice(searchIndex + 1);\n }\n delete options.path;\n }\n for (const key of keys) {\n if (options[key]) {\n url[key] = options[key].toString();\n }\n }\n return url;\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction default_1(from, to, events) {\n const fns = {};\n for (const event of events) {\n fns[event] = (...args) => {\n to.emit(event, ...args);\n };\n from.on(event, fns[event]);\n }\n return () => {\n for (const event of events) {\n from.off(event, fns[event]);\n }\n };\n}\nexports.default = default_1;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.TimeoutError = void 0;\nconst net = require(\"net\");\nconst unhandle_1 = require(\"./unhandle\");\nconst reentry = Symbol('reentry');\nconst noop = () => { };\nclass TimeoutError extends Error {\n constructor(threshold, event) {\n super(`Timeout awaiting '${event}' for ${threshold}ms`);\n this.event = event;\n this.name = 'TimeoutError';\n this.code = 'ETIMEDOUT';\n }\n}\nexports.TimeoutError = TimeoutError;\nexports.default = (request, delays, options) => {\n if (reentry in request) {\n return noop;\n }\n request[reentry] = true;\n const cancelers = [];\n const { once, unhandleAll } = unhandle_1.default();\n const addTimeout = (delay, callback, event) => {\n var _a;\n const timeout = setTimeout(callback, delay, delay, event);\n (_a = timeout.unref) === null || _a === void 0 ? void 0 : _a.call(timeout);\n const cancel = () => {\n clearTimeout(timeout);\n };\n cancelers.push(cancel);\n return cancel;\n };\n const { host, hostname } = options;\n const timeoutHandler = (delay, event) => {\n request.destroy(new TimeoutError(delay, event));\n };\n const cancelTimeouts = () => {\n for (const cancel of cancelers) {\n cancel();\n }\n unhandleAll();\n };\n request.once('error', error => {\n cancelTimeouts();\n // Save original behavior\n /* istanbul ignore next */\n if (request.listenerCount('error') === 0) {\n throw error;\n }\n });\n request.once('close', cancelTimeouts);\n once(request, 'response', (response) => {\n once(response, 'end', cancelTimeouts);\n });\n if (typeof delays.request !== 'undefined') {\n addTimeout(delays.request, timeoutHandler, 'request');\n }\n if (typeof delays.socket !== 'undefined') {\n const socketTimeoutHandler = () => {\n timeoutHandler(delays.socket, 'socket');\n };\n request.setTimeout(delays.socket, socketTimeoutHandler);\n // `request.setTimeout(0)` causes a memory leak.\n // We can just remove the listener and forget about the timer - it's unreffed.\n // See https://github.com/sindresorhus/got/issues/690\n cancelers.push(() => {\n request.removeListener('timeout', socketTimeoutHandler);\n });\n }\n once(request, 'socket', (socket) => {\n var _a;\n const { socketPath } = request;\n /* istanbul ignore next: hard to test */\n if (socket.connecting) {\n const hasPath = Boolean(socketPath !== null && socketPath !== void 0 ? socketPath : net.isIP((_a = hostname !== null && hostname !== void 0 ? hostname : host) !== null && _a !== void 0 ? _a : '') !== 0);\n if (typeof delays.lookup !== 'undefined' && !hasPath && typeof socket.address().address === 'undefined') {\n const cancelTimeout = addTimeout(delays.lookup, timeoutHandler, 'lookup');\n once(socket, 'lookup', cancelTimeout);\n }\n if (typeof delays.connect !== 'undefined') {\n const timeConnect = () => addTimeout(delays.connect, timeoutHandler, 'connect');\n if (hasPath) {\n once(socket, 'connect', timeConnect());\n }\n else {\n once(socket, 'lookup', (error) => {\n if (error === null) {\n once(socket, 'connect', timeConnect());\n }\n });\n }\n }\n if (typeof delays.secureConnect !== 'undefined' && options.protocol === 'https:') {\n once(socket, 'connect', () => {\n const cancelTimeout = addTimeout(delays.secureConnect, timeoutHandler, 'secureConnect');\n once(socket, 'secureConnect', cancelTimeout);\n });\n }\n }\n if (typeof delays.send !== 'undefined') {\n const timeRequest = () => addTimeout(delays.send, timeoutHandler, 'send');\n /* istanbul ignore next: hard to test */\n if (socket.connecting) {\n once(socket, 'connect', () => {\n once(request, 'upload-complete', timeRequest());\n });\n }\n else {\n once(request, 'upload-complete', timeRequest());\n }\n }\n });\n if (typeof delays.response !== 'undefined') {\n once(request, 'upload-complete', () => {\n const cancelTimeout = addTimeout(delays.response, timeoutHandler, 'response');\n once(request, 'response', cancelTimeout);\n });\n }\n return cancelTimeouts;\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// When attaching listeners, it's very easy to forget about them.\n// Especially if you do error handling and set timeouts.\n// So instead of checking if it's proper to throw an error on every timeout ever,\n// use this simple tool which will remove all listeners you have attached.\nexports.default = () => {\n const handlers = [];\n return {\n once(origin, event, fn) {\n origin.once(event, fn);\n handlers.push({ origin, event, fn });\n },\n unhandleAll() {\n for (const handler of handlers) {\n const { origin, event, fn } = handler;\n origin.removeListener(event, fn);\n }\n handlers.length = 0;\n }\n };\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst is_1 = require(\"@sindresorhus/is\");\nexports.default = (url) => {\n // Cast to URL\n url = url;\n const options = {\n protocol: url.protocol,\n hostname: is_1.default.string(url.hostname) && url.hostname.startsWith('[') ? url.hostname.slice(1, -1) : url.hostname,\n host: url.host,\n hash: url.hash,\n search: url.search,\n pathname: url.pathname,\n href: url.href,\n path: `${url.pathname || ''}${url.search || ''}`\n };\n if (is_1.default.string(url.port) && url.port.length > 0) {\n options.port = Number(url.port);\n }\n if (url.username || url.password) {\n options.auth = `${url.username || ''}:${url.password || ''}`;\n }\n return options;\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nclass WeakableMap {\n constructor() {\n this.weakMap = new WeakMap();\n this.map = new Map();\n }\n set(key, value) {\n if (typeof key === 'object') {\n this.weakMap.set(key, value);\n }\n else {\n this.map.set(key, value);\n }\n }\n get(key) {\n if (typeof key === 'object') {\n return this.weakMap.get(key);\n }\n return this.map.get(key);\n }\n has(key) {\n if (typeof key === 'object') {\n return this.weakMap.has(key);\n }\n return this.map.has(key);\n }\n}\nexports.default = WeakableMap;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.defaultHandler = void 0;\nconst is_1 = require(\"@sindresorhus/is\");\nconst as_promise_1 = require(\"./as-promise\");\nconst create_rejection_1 = require(\"./as-promise/create-rejection\");\nconst core_1 = require(\"./core\");\nconst deep_freeze_1 = require(\"./utils/deep-freeze\");\nconst errors = {\n RequestError: as_promise_1.RequestError,\n CacheError: as_promise_1.CacheError,\n ReadError: as_promise_1.ReadError,\n HTTPError: as_promise_1.HTTPError,\n MaxRedirectsError: as_promise_1.MaxRedirectsError,\n TimeoutError: as_promise_1.TimeoutError,\n ParseError: as_promise_1.ParseError,\n CancelError: as_promise_1.CancelError,\n UnsupportedProtocolError: as_promise_1.UnsupportedProtocolError,\n UploadError: as_promise_1.UploadError\n};\n// The `delay` package weighs 10KB (!)\nconst delay = async (ms) => new Promise(resolve => {\n setTimeout(resolve, ms);\n});\nconst { normalizeArguments } = core_1.default;\nconst mergeOptions = (...sources) => {\n let mergedOptions;\n for (const source of sources) {\n mergedOptions = normalizeArguments(undefined, source, mergedOptions);\n }\n return mergedOptions;\n};\nconst getPromiseOrStream = (options) => options.isStream ? new core_1.default(undefined, options) : as_promise_1.default(options);\nconst isGotInstance = (value) => ('defaults' in value && 'options' in value.defaults);\nconst aliases = [\n 'get',\n 'post',\n 'put',\n 'patch',\n 'head',\n 'delete'\n];\nexports.defaultHandler = (options, next) => next(options);\nconst callInitHooks = (hooks, options) => {\n if (hooks) {\n for (const hook of hooks) {\n hook(options);\n }\n }\n};\nconst create = (defaults) => {\n // Proxy properties from next handlers\n defaults._rawHandlers = defaults.handlers;\n defaults.handlers = defaults.handlers.map(fn => ((options, next) => {\n // This will be assigned by assigning result\n let root;\n const result = fn(options, newOptions => {\n root = next(newOptions);\n return root;\n });\n if (result !== root && !options.isStream && root) {\n const typedResult = result;\n const { then: promiseThen, catch: promiseCatch, finally: promiseFianlly } = typedResult;\n Object.setPrototypeOf(typedResult, Object.getPrototypeOf(root));\n Object.defineProperties(typedResult, Object.getOwnPropertyDescriptors(root));\n // These should point to the new promise\n // eslint-disable-next-line promise/prefer-await-to-then\n typedResult.then = promiseThen;\n typedResult.catch = promiseCatch;\n typedResult.finally = promiseFianlly;\n }\n return result;\n }));\n // Got interface\n const got = ((url, options = {}, _defaults) => {\n var _a, _b;\n let iteration = 0;\n const iterateHandlers = (newOptions) => {\n return defaults.handlers[iteration++](newOptions, iteration === defaults.handlers.length ? getPromiseOrStream : iterateHandlers);\n };\n // TODO: Remove this in Got 12.\n if (is_1.default.plainObject(url)) {\n const mergedOptions = {\n ...url,\n ...options\n };\n core_1.setNonEnumerableProperties([url, options], mergedOptions);\n options = mergedOptions;\n url = undefined;\n }\n try {\n // Call `init` hooks\n let initHookError;\n try {\n callInitHooks(defaults.options.hooks.init, options);\n callInitHooks((_a = options.hooks) === null || _a === void 0 ? void 0 : _a.init, options);\n }\n catch (error) {\n initHookError = error;\n }\n // Normalize options & call handlers\n const normalizedOptions = normalizeArguments(url, options, _defaults !== null && _defaults !== void 0 ? _defaults : defaults.options);\n normalizedOptions[core_1.kIsNormalizedAlready] = true;\n if (initHookError) {\n throw new as_promise_1.RequestError(initHookError.message, initHookError, normalizedOptions);\n }\n return iterateHandlers(normalizedOptions);\n }\n catch (error) {\n if (options.isStream) {\n throw error;\n }\n else {\n return create_rejection_1.default(error, defaults.options.hooks.beforeError, (_b = options.hooks) === null || _b === void 0 ? void 0 : _b.beforeError);\n }\n }\n });\n got.extend = (...instancesOrOptions) => {\n const optionsArray = [defaults.options];\n let handlers = [...defaults._rawHandlers];\n let isMutableDefaults;\n for (const value of instancesOrOptions) {\n if (isGotInstance(value)) {\n optionsArray.push(value.defaults.options);\n handlers.push(...value.defaults._rawHandlers);\n isMutableDefaults = value.defaults.mutableDefaults;\n }\n else {\n optionsArray.push(value);\n if ('handlers' in value) {\n handlers.push(...value.handlers);\n }\n isMutableDefaults = value.mutableDefaults;\n }\n }\n handlers = handlers.filter(handler => handler !== exports.defaultHandler);\n if (handlers.length === 0) {\n handlers.push(exports.defaultHandler);\n }\n return create({\n options: mergeOptions(...optionsArray),\n handlers,\n mutableDefaults: Boolean(isMutableDefaults)\n });\n };\n // Pagination\n const paginateEach = (async function* (url, options) {\n // TODO: Remove this `@ts-expect-error` when upgrading to TypeScript 4.\n // Error: Argument of type 'Merge> | undefined' is not assignable to parameter of type 'Options | undefined'.\n // @ts-expect-error\n let normalizedOptions = normalizeArguments(url, options, defaults.options);\n normalizedOptions.resolveBodyOnly = false;\n const pagination = normalizedOptions.pagination;\n if (!is_1.default.object(pagination)) {\n throw new TypeError('`options.pagination` must be implemented');\n }\n const all = [];\n let { countLimit } = pagination;\n let numberOfRequests = 0;\n while (numberOfRequests < pagination.requestLimit) {\n if (numberOfRequests !== 0) {\n // eslint-disable-next-line no-await-in-loop\n await delay(pagination.backoff);\n }\n // @ts-expect-error FIXME!\n // TODO: Throw when result is not an instance of Response\n // eslint-disable-next-line no-await-in-loop\n const result = (await got(undefined, undefined, normalizedOptions));\n // eslint-disable-next-line no-await-in-loop\n const parsed = await pagination.transform(result);\n const current = [];\n for (const item of parsed) {\n if (pagination.filter(item, all, current)) {\n if (!pagination.shouldContinue(item, all, current)) {\n return;\n }\n yield item;\n if (pagination.stackAllItems) {\n all.push(item);\n }\n current.push(item);\n if (--countLimit <= 0) {\n return;\n }\n }\n }\n const optionsToMerge = pagination.paginate(result, all, current);\n if (optionsToMerge === false) {\n return;\n }\n if (optionsToMerge === result.request.options) {\n normalizedOptions = result.request.options;\n }\n else if (optionsToMerge !== undefined) {\n normalizedOptions = normalizeArguments(undefined, optionsToMerge, normalizedOptions);\n }\n numberOfRequests++;\n }\n });\n got.paginate = paginateEach;\n got.paginate.all = (async (url, options) => {\n const results = [];\n for await (const item of paginateEach(url, options)) {\n results.push(item);\n }\n return results;\n });\n // For those who like very descriptive names\n got.paginate.each = paginateEach;\n // Stream API\n got.stream = ((url, options) => got(url, { ...options, isStream: true }));\n // Shortcuts\n for (const method of aliases) {\n got[method] = ((url, options) => got(url, { ...options, method }));\n got.stream[method] = ((url, options) => {\n return got(url, { ...options, method, isStream: true });\n });\n }\n Object.assign(got, errors);\n Object.defineProperty(got, 'defaults', {\n value: defaults.mutableDefaults ? defaults : deep_freeze_1.default(defaults),\n writable: defaults.mutableDefaults,\n configurable: defaults.mutableDefaults,\n enumerable: true\n });\n got.mergeOptions = mergeOptions;\n return got;\n};\nexports.default = create;\n__exportStar(require(\"./types\"), exports);\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst url_1 = require(\"url\");\nconst create_1 = require(\"./create\");\nconst defaults = {\n options: {\n method: 'GET',\n retry: {\n limit: 2,\n methods: [\n 'GET',\n 'PUT',\n 'HEAD',\n 'DELETE',\n 'OPTIONS',\n 'TRACE'\n ],\n statusCodes: [\n 408,\n 413,\n 429,\n 500,\n 502,\n 503,\n 504,\n 521,\n 522,\n 524\n ],\n errorCodes: [\n 'ETIMEDOUT',\n 'ECONNRESET',\n 'EADDRINUSE',\n 'ECONNREFUSED',\n 'EPIPE',\n 'ENOTFOUND',\n 'ENETUNREACH',\n 'EAI_AGAIN'\n ],\n maxRetryAfter: undefined,\n calculateDelay: ({ computedValue }) => computedValue\n },\n timeout: {},\n headers: {\n 'user-agent': 'got (https://github.com/sindresorhus/got)'\n },\n hooks: {\n init: [],\n beforeRequest: [],\n beforeRedirect: [],\n beforeRetry: [],\n beforeError: [],\n afterResponse: []\n },\n cache: undefined,\n dnsCache: undefined,\n decompress: true,\n throwHttpErrors: true,\n followRedirect: true,\n isStream: false,\n responseType: 'text',\n resolveBodyOnly: false,\n maxRedirects: 10,\n prefixUrl: '',\n methodRewriting: true,\n ignoreInvalidCookies: false,\n context: {},\n // TODO: Set this to `true` when Got 12 gets released\n http2: false,\n allowGetBody: false,\n https: undefined,\n pagination: {\n transform: (response) => {\n if (response.request.options.responseType === 'json') {\n return response.body;\n }\n return JSON.parse(response.body);\n },\n paginate: response => {\n if (!Reflect.has(response.headers, 'link')) {\n return false;\n }\n const items = response.headers.link.split(',');\n let next;\n for (const item of items) {\n const parsed = item.split(';');\n if (parsed[1].includes('next')) {\n next = parsed[0].trimStart().trim();\n next = next.slice(1, -1);\n break;\n }\n }\n if (next) {\n const options = {\n url: new url_1.URL(next)\n };\n return options;\n }\n return false;\n },\n filter: () => true,\n shouldContinue: () => true,\n countLimit: Infinity,\n backoff: 0,\n requestLimit: 10000,\n stackAllItems: true\n },\n parseJson: (text) => JSON.parse(text),\n stringifyJson: (object) => JSON.stringify(object),\n cacheOptions: {}\n },\n handlers: [create_1.defaultHandler],\n mutableDefaults: false\n};\nconst got = create_1.default(defaults);\nexports.default = got;\n// For CommonJS default export support\nmodule.exports = got;\nmodule.exports.default = got;\nmodule.exports.__esModule = true; // Workaround for TS issue: https://github.com/sindresorhus/got/pull/1267\n__exportStar(require(\"./create\"), exports);\n__exportStar(require(\"./as-promise\"), exports);\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst is_1 = require(\"@sindresorhus/is\");\nfunction deepFreeze(object) {\n for (const value of Object.values(object)) {\n if (is_1.default.plainObject(value) || is_1.default.array(value)) {\n deepFreeze(value);\n }\n }\n return Object.freeze(object);\n}\nexports.default = deepFreeze;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst alreadyWarned = new Set();\nexports.default = (message) => {\n if (alreadyWarned.has(message)) {\n return;\n }\n alreadyWarned.add(message);\n // @ts-expect-error Missing types.\n process.emitWarning(`Got: ${message}`, {\n type: 'DeprecationWarning'\n });\n};\n","'use strict';\n// rfc7231 6.1\nconst statusCodeCacheableByDefault = new Set([\n 200,\n 203,\n 204,\n 206,\n 300,\n 301,\n 404,\n 405,\n 410,\n 414,\n 501,\n]);\n\n// This implementation does not understand partial responses (206)\nconst understoodStatuses = new Set([\n 200,\n 203,\n 204,\n 300,\n 301,\n 302,\n 303,\n 307,\n 308,\n 404,\n 405,\n 410,\n 414,\n 501,\n]);\n\nconst errorStatusCodes = new Set([\n 500,\n 502,\n 503, \n 504,\n]);\n\nconst hopByHopHeaders = {\n date: true, // included, because we add Age update Date\n connection: true,\n 'keep-alive': true,\n 'proxy-authenticate': true,\n 'proxy-authorization': true,\n te: true,\n trailer: true,\n 'transfer-encoding': true,\n upgrade: true,\n};\n\nconst excludedFromRevalidationUpdate = {\n // Since the old body is reused, it doesn't make sense to change properties of the body\n 'content-length': true,\n 'content-encoding': true,\n 'transfer-encoding': true,\n 'content-range': true,\n};\n\nfunction toNumberOrZero(s) {\n const n = parseInt(s, 10);\n return isFinite(n) ? n : 0;\n}\n\n// RFC 5861\nfunction isErrorResponse(response) {\n // consider undefined response as faulty\n if(!response) {\n return true\n }\n return errorStatusCodes.has(response.status);\n}\n\nfunction parseCacheControl(header) {\n const cc = {};\n if (!header) return cc;\n\n // TODO: When there is more than one value present for a given directive (e.g., two Expires header fields, multiple Cache-Control: max-age directives),\n // the directive's value is considered invalid. Caches are encouraged to consider responses that have invalid freshness information to be stale\n const parts = header.trim().split(/\\s*,\\s*/); // TODO: lame parsing\n for (const part of parts) {\n const [k, v] = part.split(/\\s*=\\s*/, 2);\n cc[k] = v === undefined ? true : v.replace(/^\"|\"$/g, ''); // TODO: lame unquoting\n }\n\n return cc;\n}\n\nfunction formatCacheControl(cc) {\n let parts = [];\n for (const k in cc) {\n const v = cc[k];\n parts.push(v === true ? k : k + '=' + v);\n }\n if (!parts.length) {\n return undefined;\n }\n return parts.join(', ');\n}\n\nmodule.exports = class CachePolicy {\n constructor(\n req,\n res,\n {\n shared,\n cacheHeuristic,\n immutableMinTimeToLive,\n ignoreCargoCult,\n _fromObject,\n } = {}\n ) {\n if (_fromObject) {\n this._fromObject(_fromObject);\n return;\n }\n\n if (!res || !res.headers) {\n throw Error('Response headers missing');\n }\n this._assertRequestHasHeaders(req);\n\n this._responseTime = this.now();\n this._isShared = shared !== false;\n this._cacheHeuristic =\n undefined !== cacheHeuristic ? cacheHeuristic : 0.1; // 10% matches IE\n this._immutableMinTtl =\n undefined !== immutableMinTimeToLive\n ? immutableMinTimeToLive\n : 24 * 3600 * 1000;\n\n this._status = 'status' in res ? res.status : 200;\n this._resHeaders = res.headers;\n this._rescc = parseCacheControl(res.headers['cache-control']);\n this._method = 'method' in req ? req.method : 'GET';\n this._url = req.url;\n this._host = req.headers.host;\n this._noAuthorization = !req.headers.authorization;\n this._reqHeaders = res.headers.vary ? req.headers : null; // Don't keep all request headers if they won't be used\n this._reqcc = parseCacheControl(req.headers['cache-control']);\n\n // Assume that if someone uses legacy, non-standard uncecessary options they don't understand caching,\n // so there's no point stricly adhering to the blindly copy&pasted directives.\n if (\n ignoreCargoCult &&\n 'pre-check' in this._rescc &&\n 'post-check' in this._rescc\n ) {\n delete this._rescc['pre-check'];\n delete this._rescc['post-check'];\n delete this._rescc['no-cache'];\n delete this._rescc['no-store'];\n delete this._rescc['must-revalidate'];\n this._resHeaders = Object.assign({}, this._resHeaders, {\n 'cache-control': formatCacheControl(this._rescc),\n });\n delete this._resHeaders.expires;\n delete this._resHeaders.pragma;\n }\n\n // When the Cache-Control header field is not present in a request, caches MUST consider the no-cache request pragma-directive\n // as having the same effect as if \"Cache-Control: no-cache\" were present (see Section 5.2.1).\n if (\n res.headers['cache-control'] == null &&\n /no-cache/.test(res.headers.pragma)\n ) {\n this._rescc['no-cache'] = true;\n }\n }\n\n now() {\n return Date.now();\n }\n\n storable() {\n // The \"no-store\" request directive indicates that a cache MUST NOT store any part of either this request or any response to it.\n return !!(\n !this._reqcc['no-store'] &&\n // A cache MUST NOT store a response to any request, unless:\n // The request method is understood by the cache and defined as being cacheable, and\n ('GET' === this._method ||\n 'HEAD' === this._method ||\n ('POST' === this._method && this._hasExplicitExpiration())) &&\n // the response status code is understood by the cache, and\n understoodStatuses.has(this._status) &&\n // the \"no-store\" cache directive does not appear in request or response header fields, and\n !this._rescc['no-store'] &&\n // the \"private\" response directive does not appear in the response, if the cache is shared, and\n (!this._isShared || !this._rescc.private) &&\n // the Authorization header field does not appear in the request, if the cache is shared,\n (!this._isShared ||\n this._noAuthorization ||\n this._allowsStoringAuthenticated()) &&\n // the response either:\n // contains an Expires header field, or\n (this._resHeaders.expires ||\n // contains a max-age response directive, or\n // contains a s-maxage response directive and the cache is shared, or\n // contains a public response directive.\n this._rescc['max-age'] ||\n (this._isShared && this._rescc['s-maxage']) ||\n this._rescc.public ||\n // has a status code that is defined as cacheable by default\n statusCodeCacheableByDefault.has(this._status))\n );\n }\n\n _hasExplicitExpiration() {\n // 4.2.1 Calculating Freshness Lifetime\n return (\n (this._isShared && this._rescc['s-maxage']) ||\n this._rescc['max-age'] ||\n this._resHeaders.expires\n );\n }\n\n _assertRequestHasHeaders(req) {\n if (!req || !req.headers) {\n throw Error('Request headers missing');\n }\n }\n\n satisfiesWithoutRevalidation(req) {\n this._assertRequestHasHeaders(req);\n\n // When presented with a request, a cache MUST NOT reuse a stored response, unless:\n // the presented request does not contain the no-cache pragma (Section 5.4), nor the no-cache cache directive,\n // unless the stored response is successfully validated (Section 4.3), and\n const requestCC = parseCacheControl(req.headers['cache-control']);\n if (requestCC['no-cache'] || /no-cache/.test(req.headers.pragma)) {\n return false;\n }\n\n if (requestCC['max-age'] && this.age() > requestCC['max-age']) {\n return false;\n }\n\n if (\n requestCC['min-fresh'] &&\n this.timeToLive() < 1000 * requestCC['min-fresh']\n ) {\n return false;\n }\n\n // the stored response is either:\n // fresh, or allowed to be served stale\n if (this.stale()) {\n const allowsStale =\n requestCC['max-stale'] &&\n !this._rescc['must-revalidate'] &&\n (true === requestCC['max-stale'] ||\n requestCC['max-stale'] > this.age() - this.maxAge());\n if (!allowsStale) {\n return false;\n }\n }\n\n return this._requestMatches(req, false);\n }\n\n _requestMatches(req, allowHeadMethod) {\n // The presented effective request URI and that of the stored response match, and\n return (\n (!this._url || this._url === req.url) &&\n this._host === req.headers.host &&\n // the request method associated with the stored response allows it to be used for the presented request, and\n (!req.method ||\n this._method === req.method ||\n (allowHeadMethod && 'HEAD' === req.method)) &&\n // selecting header fields nominated by the stored response (if any) match those presented, and\n this._varyMatches(req)\n );\n }\n\n _allowsStoringAuthenticated() {\n // following Cache-Control response directives (Section 5.2.2) have such an effect: must-revalidate, public, and s-maxage.\n return (\n this._rescc['must-revalidate'] ||\n this._rescc.public ||\n this._rescc['s-maxage']\n );\n }\n\n _varyMatches(req) {\n if (!this._resHeaders.vary) {\n return true;\n }\n\n // A Vary header field-value of \"*\" always fails to match\n if (this._resHeaders.vary === '*') {\n return false;\n }\n\n const fields = this._resHeaders.vary\n .trim()\n .toLowerCase()\n .split(/\\s*,\\s*/);\n for (const name of fields) {\n if (req.headers[name] !== this._reqHeaders[name]) return false;\n }\n return true;\n }\n\n _copyWithoutHopByHopHeaders(inHeaders) {\n const headers = {};\n for (const name in inHeaders) {\n if (hopByHopHeaders[name]) continue;\n headers[name] = inHeaders[name];\n }\n // 9.1. Connection\n if (inHeaders.connection) {\n const tokens = inHeaders.connection.trim().split(/\\s*,\\s*/);\n for (const name of tokens) {\n delete headers[name];\n }\n }\n if (headers.warning) {\n const warnings = headers.warning.split(/,/).filter(warning => {\n return !/^\\s*1[0-9][0-9]/.test(warning);\n });\n if (!warnings.length) {\n delete headers.warning;\n } else {\n headers.warning = warnings.join(',').trim();\n }\n }\n return headers;\n }\n\n responseHeaders() {\n const headers = this._copyWithoutHopByHopHeaders(this._resHeaders);\n const age = this.age();\n\n // A cache SHOULD generate 113 warning if it heuristically chose a freshness\n // lifetime greater than 24 hours and the response's age is greater than 24 hours.\n if (\n age > 3600 * 24 &&\n !this._hasExplicitExpiration() &&\n this.maxAge() > 3600 * 24\n ) {\n headers.warning =\n (headers.warning ? `${headers.warning}, ` : '') +\n '113 - \"rfc7234 5.5.4\"';\n }\n headers.age = `${Math.round(age)}`;\n headers.date = new Date(this.now()).toUTCString();\n return headers;\n }\n\n /**\n * Value of the Date response header or current time if Date was invalid\n * @return timestamp\n */\n date() {\n const serverDate = Date.parse(this._resHeaders.date);\n if (isFinite(serverDate)) {\n return serverDate;\n }\n return this._responseTime;\n }\n\n /**\n * Value of the Age header, in seconds, updated for the current time.\n * May be fractional.\n *\n * @return Number\n */\n age() {\n let age = this._ageValue();\n\n const residentTime = (this.now() - this._responseTime) / 1000;\n return age + residentTime;\n }\n\n _ageValue() {\n return toNumberOrZero(this._resHeaders.age);\n }\n\n /**\n * Value of applicable max-age (or heuristic equivalent) in seconds. This counts since response's `Date`.\n *\n * For an up-to-date value, see `timeToLive()`.\n *\n * @return Number\n */\n maxAge() {\n if (!this.storable() || this._rescc['no-cache']) {\n return 0;\n }\n\n // Shared responses with cookies are cacheable according to the RFC, but IMHO it'd be unwise to do so by default\n // so this implementation requires explicit opt-in via public header\n if (\n this._isShared &&\n (this._resHeaders['set-cookie'] &&\n !this._rescc.public &&\n !this._rescc.immutable)\n ) {\n return 0;\n }\n\n if (this._resHeaders.vary === '*') {\n return 0;\n }\n\n if (this._isShared) {\n if (this._rescc['proxy-revalidate']) {\n return 0;\n }\n // if a response includes the s-maxage directive, a shared cache recipient MUST ignore the Expires field.\n if (this._rescc['s-maxage']) {\n return toNumberOrZero(this._rescc['s-maxage']);\n }\n }\n\n // If a response includes a Cache-Control field with the max-age directive, a recipient MUST ignore the Expires field.\n if (this._rescc['max-age']) {\n return toNumberOrZero(this._rescc['max-age']);\n }\n\n const defaultMinTtl = this._rescc.immutable ? this._immutableMinTtl : 0;\n\n const serverDate = this.date();\n if (this._resHeaders.expires) {\n const expires = Date.parse(this._resHeaders.expires);\n // A cache recipient MUST interpret invalid date formats, especially the value \"0\", as representing a time in the past (i.e., \"already expired\").\n if (Number.isNaN(expires) || expires < serverDate) {\n return 0;\n }\n return Math.max(defaultMinTtl, (expires - serverDate) / 1000);\n }\n\n if (this._resHeaders['last-modified']) {\n const lastModified = Date.parse(this._resHeaders['last-modified']);\n if (isFinite(lastModified) && serverDate > lastModified) {\n return Math.max(\n defaultMinTtl,\n ((serverDate - lastModified) / 1000) * this._cacheHeuristic\n );\n }\n }\n\n return defaultMinTtl;\n }\n\n timeToLive() {\n const age = this.maxAge() - this.age();\n const staleIfErrorAge = age + toNumberOrZero(this._rescc['stale-if-error']);\n const staleWhileRevalidateAge = age + toNumberOrZero(this._rescc['stale-while-revalidate']);\n return Math.max(0, age, staleIfErrorAge, staleWhileRevalidateAge) * 1000;\n }\n\n stale() {\n return this.maxAge() <= this.age();\n }\n\n _useStaleIfError() {\n return this.maxAge() + toNumberOrZero(this._rescc['stale-if-error']) > this.age();\n }\n\n useStaleWhileRevalidate() {\n return this.maxAge() + toNumberOrZero(this._rescc['stale-while-revalidate']) > this.age();\n }\n\n static fromObject(obj) {\n return new this(undefined, undefined, { _fromObject: obj });\n }\n\n _fromObject(obj) {\n if (this._responseTime) throw Error('Reinitialized');\n if (!obj || obj.v !== 1) throw Error('Invalid serialization');\n\n this._responseTime = obj.t;\n this._isShared = obj.sh;\n this._cacheHeuristic = obj.ch;\n this._immutableMinTtl =\n obj.imm !== undefined ? obj.imm : 24 * 3600 * 1000;\n this._status = obj.st;\n this._resHeaders = obj.resh;\n this._rescc = obj.rescc;\n this._method = obj.m;\n this._url = obj.u;\n this._host = obj.h;\n this._noAuthorization = obj.a;\n this._reqHeaders = obj.reqh;\n this._reqcc = obj.reqcc;\n }\n\n toObject() {\n return {\n v: 1,\n t: this._responseTime,\n sh: this._isShared,\n ch: this._cacheHeuristic,\n imm: this._immutableMinTtl,\n st: this._status,\n resh: this._resHeaders,\n rescc: this._rescc,\n m: this._method,\n u: this._url,\n h: this._host,\n a: this._noAuthorization,\n reqh: this._reqHeaders,\n reqcc: this._reqcc,\n };\n }\n\n /**\n * Headers for sending to the origin server to revalidate stale response.\n * Allows server to return 304 to allow reuse of the previous response.\n *\n * Hop by hop headers are always stripped.\n * Revalidation headers may be added or removed, depending on request.\n */\n revalidationHeaders(incomingReq) {\n this._assertRequestHasHeaders(incomingReq);\n const headers = this._copyWithoutHopByHopHeaders(incomingReq.headers);\n\n // This implementation does not understand range requests\n delete headers['if-range'];\n\n if (!this._requestMatches(incomingReq, true) || !this.storable()) {\n // revalidation allowed via HEAD\n // not for the same resource, or wasn't allowed to be cached anyway\n delete headers['if-none-match'];\n delete headers['if-modified-since'];\n return headers;\n }\n\n /* MUST send that entity-tag in any cache validation request (using If-Match or If-None-Match) if an entity-tag has been provided by the origin server. */\n if (this._resHeaders.etag) {\n headers['if-none-match'] = headers['if-none-match']\n ? `${headers['if-none-match']}, ${this._resHeaders.etag}`\n : this._resHeaders.etag;\n }\n\n // Clients MAY issue simple (non-subrange) GET requests with either weak validators or strong validators. Clients MUST NOT use weak validators in other forms of request.\n const forbidsWeakValidators =\n headers['accept-ranges'] ||\n headers['if-match'] ||\n headers['if-unmodified-since'] ||\n (this._method && this._method != 'GET');\n\n /* SHOULD send the Last-Modified value in non-subrange cache validation requests (using If-Modified-Since) if only a Last-Modified value has been provided by the origin server.\n Note: This implementation does not understand partial responses (206) */\n if (forbidsWeakValidators) {\n delete headers['if-modified-since'];\n\n if (headers['if-none-match']) {\n const etags = headers['if-none-match']\n .split(/,/)\n .filter(etag => {\n return !/^\\s*W\\//.test(etag);\n });\n if (!etags.length) {\n delete headers['if-none-match'];\n } else {\n headers['if-none-match'] = etags.join(',').trim();\n }\n }\n } else if (\n this._resHeaders['last-modified'] &&\n !headers['if-modified-since']\n ) {\n headers['if-modified-since'] = this._resHeaders['last-modified'];\n }\n\n return headers;\n }\n\n /**\n * Creates new CachePolicy with information combined from the previews response,\n * and the new revalidation response.\n *\n * Returns {policy, modified} where modified is a boolean indicating\n * whether the response body has been modified, and old cached body can't be used.\n *\n * @return {Object} {policy: CachePolicy, modified: Boolean}\n */\n revalidatedPolicy(request, response) {\n this._assertRequestHasHeaders(request);\n if(this._useStaleIfError() && isErrorResponse(response)) { // I consider the revalidation request unsuccessful\n return {\n modified: false,\n matches: false,\n policy: this,\n };\n }\n if (!response || !response.headers) {\n throw Error('Response headers missing');\n }\n\n // These aren't going to be supported exactly, since one CachePolicy object\n // doesn't know about all the other cached objects.\n let matches = false;\n if (response.status !== undefined && response.status != 304) {\n matches = false;\n } else if (\n response.headers.etag &&\n !/^\\s*W\\//.test(response.headers.etag)\n ) {\n // \"All of the stored responses with the same strong validator are selected.\n // If none of the stored responses contain the same strong validator,\n // then the cache MUST NOT use the new response to update any stored responses.\"\n matches =\n this._resHeaders.etag &&\n this._resHeaders.etag.replace(/^\\s*W\\//, '') ===\n response.headers.etag;\n } else if (this._resHeaders.etag && response.headers.etag) {\n // \"If the new response contains a weak validator and that validator corresponds\n // to one of the cache's stored responses,\n // then the most recent of those matching stored responses is selected for update.\"\n matches =\n this._resHeaders.etag.replace(/^\\s*W\\//, '') ===\n response.headers.etag.replace(/^\\s*W\\//, '');\n } else if (this._resHeaders['last-modified']) {\n matches =\n this._resHeaders['last-modified'] ===\n response.headers['last-modified'];\n } else {\n // If the new response does not include any form of validator (such as in the case where\n // a client generates an If-Modified-Since request from a source other than the Last-Modified\n // response header field), and there is only one stored response, and that stored response also\n // lacks a validator, then that stored response is selected for update.\n if (\n !this._resHeaders.etag &&\n !this._resHeaders['last-modified'] &&\n !response.headers.etag &&\n !response.headers['last-modified']\n ) {\n matches = true;\n }\n }\n\n if (!matches) {\n return {\n policy: new this.constructor(request, response),\n // Client receiving 304 without body, even if it's invalid/mismatched has no option\n // but to reuse a cached body. We don't have a good way to tell clients to do\n // error recovery in such case.\n modified: response.status != 304,\n matches: false,\n };\n }\n\n // use other header fields provided in the 304 (Not Modified) response to replace all instances\n // of the corresponding header fields in the stored response.\n const headers = {};\n for (const k in this._resHeaders) {\n headers[k] =\n k in response.headers && !excludedFromRevalidationUpdate[k]\n ? response.headers[k]\n : this._resHeaders[k];\n }\n\n const newResponse = Object.assign({}, response, {\n status: this._status,\n method: this._method,\n headers,\n });\n return {\n policy: new this.constructor(request, newResponse, {\n shared: this._isShared,\n cacheHeuristic: this._cacheHeuristic,\n immutableMinTimeToLive: this._immutableMinTtl,\n }),\n modified: false,\n matches: true,\n };\n }\n};\n","'use strict';\nconst EventEmitter = require('events');\nconst tls = require('tls');\nconst http2 = require('http2');\nconst QuickLRU = require('quick-lru');\n\nconst kCurrentStreamsCount = Symbol('currentStreamsCount');\nconst kRequest = Symbol('request');\nconst kOriginSet = Symbol('cachedOriginSet');\nconst kGracefullyClosing = Symbol('gracefullyClosing');\n\nconst nameKeys = [\n\t// `http2.connect()` options\n\t'maxDeflateDynamicTableSize',\n\t'maxSessionMemory',\n\t'maxHeaderListPairs',\n\t'maxOutstandingPings',\n\t'maxReservedRemoteStreams',\n\t'maxSendHeaderBlockLength',\n\t'paddingStrategy',\n\n\t// `tls.connect()` options\n\t'localAddress',\n\t'path',\n\t'rejectUnauthorized',\n\t'minDHSize',\n\n\t// `tls.createSecureContext()` options\n\t'ca',\n\t'cert',\n\t'clientCertEngine',\n\t'ciphers',\n\t'key',\n\t'pfx',\n\t'servername',\n\t'minVersion',\n\t'maxVersion',\n\t'secureProtocol',\n\t'crl',\n\t'honorCipherOrder',\n\t'ecdhCurve',\n\t'dhparam',\n\t'secureOptions',\n\t'sessionIdContext'\n];\n\nconst getSortedIndex = (array, value, compare) => {\n\tlet low = 0;\n\tlet high = array.length;\n\n\twhile (low < high) {\n\t\tconst mid = (low + high) >>> 1;\n\n\t\t/* istanbul ignore next */\n\t\tif (compare(array[mid], value)) {\n\t\t\t// This never gets called because we use descending sort. Better to have this anyway.\n\t\t\tlow = mid + 1;\n\t\t} else {\n\t\t\thigh = mid;\n\t\t}\n\t}\n\n\treturn low;\n};\n\nconst compareSessions = (a, b) => {\n\treturn a.remoteSettings.maxConcurrentStreams > b.remoteSettings.maxConcurrentStreams;\n};\n\n// See https://tools.ietf.org/html/rfc8336\nconst closeCoveredSessions = (where, session) => {\n\t// Clients SHOULD NOT emit new requests on any connection whose Origin\n\t// Set is a proper subset of another connection's Origin Set, and they\n\t// SHOULD close it once all outstanding requests are satisfied.\n\tfor (const coveredSession of where) {\n\t\tif (\n\t\t\t// The set is a proper subset when its length is less than the other set.\n\t\t\tcoveredSession[kOriginSet].length < session[kOriginSet].length &&\n\n\t\t\t// And the other set includes all elements of the subset.\n\t\t\tcoveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin)) &&\n\n\t\t\t// Makes sure that the session can handle all requests from the covered session.\n\t\t\tcoveredSession[kCurrentStreamsCount] + session[kCurrentStreamsCount] <= session.remoteSettings.maxConcurrentStreams\n\t\t) {\n\t\t\t// This allows pending requests to finish and prevents making new requests.\n\t\t\tgracefullyClose(coveredSession);\n\t\t}\n\t}\n};\n\n// This is basically inverted `closeCoveredSessions(...)`.\nconst closeSessionIfCovered = (where, coveredSession) => {\n\tfor (const session of where) {\n\t\tif (\n\t\t\tcoveredSession[kOriginSet].length < session[kOriginSet].length &&\n\t\t\tcoveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin)) &&\n\t\t\tcoveredSession[kCurrentStreamsCount] + session[kCurrentStreamsCount] <= session.remoteSettings.maxConcurrentStreams\n\t\t) {\n\t\t\tgracefullyClose(coveredSession);\n\t\t}\n\t}\n};\n\nconst getSessions = ({agent, isFree}) => {\n\tconst result = {};\n\n\t// eslint-disable-next-line guard-for-in\n\tfor (const normalizedOptions in agent.sessions) {\n\t\tconst sessions = agent.sessions[normalizedOptions];\n\n\t\tconst filtered = sessions.filter(session => {\n\t\t\tconst result = session[Agent.kCurrentStreamsCount] < session.remoteSettings.maxConcurrentStreams;\n\n\t\t\treturn isFree ? result : !result;\n\t\t});\n\n\t\tif (filtered.length !== 0) {\n\t\t\tresult[normalizedOptions] = filtered;\n\t\t}\n\t}\n\n\treturn result;\n};\n\nconst gracefullyClose = session => {\n\tsession[kGracefullyClosing] = true;\n\n\tif (session[kCurrentStreamsCount] === 0) {\n\t\tsession.close();\n\t}\n};\n\nclass Agent extends EventEmitter {\n\tconstructor({timeout = 60000, maxSessions = Infinity, maxFreeSessions = 10, maxCachedTlsSessions = 100} = {}) {\n\t\tsuper();\n\n\t\t// A session is considered busy when its current streams count\n\t\t// is equal to or greater than the `maxConcurrentStreams` value.\n\n\t\t// A session is considered free when its current streams count\n\t\t// is less than the `maxConcurrentStreams` value.\n\n\t\t// SESSIONS[NORMALIZED_OPTIONS] = [];\n\t\tthis.sessions = {};\n\n\t\t// The queue for creating new sessions. It looks like this:\n\t\t// QUEUE[NORMALIZED_OPTIONS][NORMALIZED_ORIGIN] = ENTRY_FUNCTION\n\t\t//\n\t\t// The entry function has `listeners`, `completed` and `destroyed` properties.\n\t\t// `listeners` is an array of objects containing `resolve` and `reject` functions.\n\t\t// `completed` is a boolean. It's set to true after ENTRY_FUNCTION is executed.\n\t\t// `destroyed` is a boolean. If it's set to true, the session will be destroyed if hasn't connected yet.\n\t\tthis.queue = {};\n\n\t\t// Each session will use this timeout value.\n\t\tthis.timeout = timeout;\n\n\t\t// Max sessions in total\n\t\tthis.maxSessions = maxSessions;\n\n\t\t// Max free sessions in total\n\t\t// TODO: decreasing `maxFreeSessions` should close some sessions\n\t\tthis.maxFreeSessions = maxFreeSessions;\n\n\t\tthis._freeSessionsCount = 0;\n\t\tthis._sessionsCount = 0;\n\n\t\t// We don't support push streams by default.\n\t\tthis.settings = {\n\t\t\tenablePush: false\n\t\t};\n\n\t\t// Reusing TLS sessions increases performance.\n\t\tthis.tlsSessionCache = new QuickLRU({maxSize: maxCachedTlsSessions});\n\t}\n\n\tstatic normalizeOrigin(url, servername) {\n\t\tif (typeof url === 'string') {\n\t\t\turl = new URL(url);\n\t\t}\n\n\t\tif (servername && url.hostname !== servername) {\n\t\t\turl.hostname = servername;\n\t\t}\n\n\t\treturn url.origin;\n\t}\n\n\tnormalizeOptions(options) {\n\t\tlet normalized = '';\n\n\t\tif (options) {\n\t\t\tfor (const key of nameKeys) {\n\t\t\t\tif (options[key]) {\n\t\t\t\t\tnormalized += `:${options[key]}`;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\treturn normalized;\n\t}\n\n\t_tryToCreateNewSession(normalizedOptions, normalizedOrigin) {\n\t\tif (!(normalizedOptions in this.queue) || !(normalizedOrigin in this.queue[normalizedOptions])) {\n\t\t\treturn;\n\t\t}\n\n\t\tconst item = this.queue[normalizedOptions][normalizedOrigin];\n\n\t\t// The entry function can be run only once.\n\t\t// BUG: The session may be never created when:\n\t\t// - the first condition is false AND\n\t\t// - this function is never called with the same arguments in the future.\n\t\tif (this._sessionsCount < this.maxSessions && !item.completed) {\n\t\t\titem.completed = true;\n\n\t\t\titem();\n\t\t}\n\t}\n\n\tgetSession(origin, options, listeners) {\n\t\treturn new Promise((resolve, reject) => {\n\t\t\tif (Array.isArray(listeners)) {\n\t\t\t\tlisteners = [...listeners];\n\n\t\t\t\t// Resolve the current promise ASAP, we're just moving the listeners.\n\t\t\t\t// They will be executed at a different time.\n\t\t\t\tresolve();\n\t\t\t} else {\n\t\t\t\tlisteners = [{resolve, reject}];\n\t\t\t}\n\n\t\t\tconst normalizedOptions = this.normalizeOptions(options);\n\t\t\tconst normalizedOrigin = Agent.normalizeOrigin(origin, options && options.servername);\n\n\t\t\tif (normalizedOrigin === undefined) {\n\t\t\t\tfor (const {reject} of listeners) {\n\t\t\t\t\treject(new TypeError('The `origin` argument needs to be a string or an URL object'));\n\t\t\t\t}\n\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (normalizedOptions in this.sessions) {\n\t\t\t\tconst sessions = this.sessions[normalizedOptions];\n\n\t\t\t\tlet maxConcurrentStreams = -1;\n\t\t\t\tlet currentStreamsCount = -1;\n\t\t\t\tlet optimalSession;\n\n\t\t\t\t// We could just do this.sessions[normalizedOptions].find(...) but that isn't optimal.\n\t\t\t\t// Additionally, we are looking for session which has biggest current pending streams count.\n\t\t\t\tfor (const session of sessions) {\n\t\t\t\t\tconst sessionMaxConcurrentStreams = session.remoteSettings.maxConcurrentStreams;\n\n\t\t\t\t\tif (sessionMaxConcurrentStreams < maxConcurrentStreams) {\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\n\t\t\t\t\tif (session[kOriginSet].includes(normalizedOrigin)) {\n\t\t\t\t\t\tconst sessionCurrentStreamsCount = session[kCurrentStreamsCount];\n\n\t\t\t\t\t\tif (\n\t\t\t\t\t\t\tsessionCurrentStreamsCount >= sessionMaxConcurrentStreams ||\n\t\t\t\t\t\t\tsession[kGracefullyClosing] ||\n\t\t\t\t\t\t\t// Unfortunately the `close` event isn't called immediately,\n\t\t\t\t\t\t\t// so `session.destroyed` is `true`, but `session.closed` is `false`.\n\t\t\t\t\t\t\tsession.destroyed\n\t\t\t\t\t\t) {\n\t\t\t\t\t\t\tcontinue;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// We only need set this once.\n\t\t\t\t\t\tif (!optimalSession) {\n\t\t\t\t\t\t\tmaxConcurrentStreams = sessionMaxConcurrentStreams;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// We're looking for the session which has biggest current pending stream count,\n\t\t\t\t\t\t// in order to minimalize the amount of active sessions.\n\t\t\t\t\t\tif (sessionCurrentStreamsCount > currentStreamsCount) {\n\t\t\t\t\t\t\toptimalSession = session;\n\t\t\t\t\t\t\tcurrentStreamsCount = sessionCurrentStreamsCount;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif (optimalSession) {\n\t\t\t\t\t/* istanbul ignore next: safety check */\n\t\t\t\t\tif (listeners.length !== 1) {\n\t\t\t\t\t\tfor (const {reject} of listeners) {\n\t\t\t\t\t\t\tconst error = new Error(\n\t\t\t\t\t\t\t\t`Expected the length of listeners to be 1, got ${listeners.length}.\\n` +\n\t\t\t\t\t\t\t\t'Please report this to https://github.com/szmarczak/http2-wrapper/'\n\t\t\t\t\t\t\t);\n\n\t\t\t\t\t\t\treject(error);\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\n\t\t\t\t\tlisteners[0].resolve(optimalSession);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (normalizedOptions in this.queue) {\n\t\t\t\tif (normalizedOrigin in this.queue[normalizedOptions]) {\n\t\t\t\t\t// There's already an item in the queue, just attach ourselves to it.\n\t\t\t\t\tthis.queue[normalizedOptions][normalizedOrigin].listeners.push(...listeners);\n\n\t\t\t\t\t// This shouldn't be executed here.\n\t\t\t\t\t// See the comment inside _tryToCreateNewSession.\n\t\t\t\t\tthis._tryToCreateNewSession(normalizedOptions, normalizedOrigin);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tthis.queue[normalizedOptions] = {};\n\t\t\t}\n\n\t\t\t// The entry must be removed from the queue IMMEDIATELY when:\n\t\t\t// 1. the session connects successfully,\n\t\t\t// 2. an error occurs.\n\t\t\tconst removeFromQueue = () => {\n\t\t\t\t// Our entry can be replaced. We cannot remove the new one.\n\t\t\t\tif (normalizedOptions in this.queue && this.queue[normalizedOptions][normalizedOrigin] === entry) {\n\t\t\t\t\tdelete this.queue[normalizedOptions][normalizedOrigin];\n\n\t\t\t\t\tif (Object.keys(this.queue[normalizedOptions]).length === 0) {\n\t\t\t\t\t\tdelete this.queue[normalizedOptions];\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t};\n\n\t\t\t// The main logic is here\n\t\t\tconst entry = () => {\n\t\t\t\tconst name = `${normalizedOrigin}:${normalizedOptions}`;\n\t\t\t\tlet receivedSettings = false;\n\n\t\t\t\ttry {\n\t\t\t\t\tconst session = http2.connect(origin, {\n\t\t\t\t\t\tcreateConnection: this.createConnection,\n\t\t\t\t\t\tsettings: this.settings,\n\t\t\t\t\t\tsession: this.tlsSessionCache.get(name),\n\t\t\t\t\t\t...options\n\t\t\t\t\t});\n\t\t\t\t\tsession[kCurrentStreamsCount] = 0;\n\t\t\t\t\tsession[kGracefullyClosing] = false;\n\n\t\t\t\t\tconst isFree = () => session[kCurrentStreamsCount] < session.remoteSettings.maxConcurrentStreams;\n\t\t\t\t\tlet wasFree = true;\n\n\t\t\t\t\tsession.socket.once('session', tlsSession => {\n\t\t\t\t\t\tthis.tlsSessionCache.set(name, tlsSession);\n\t\t\t\t\t});\n\n\t\t\t\t\tsession.once('error', error => {\n\t\t\t\t\t\t// Listeners are empty when the session successfully connected.\n\t\t\t\t\t\tfor (const {reject} of listeners) {\n\t\t\t\t\t\t\treject(error);\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// The connection got broken, purge the cache.\n\t\t\t\t\t\tthis.tlsSessionCache.delete(name);\n\t\t\t\t\t});\n\n\t\t\t\t\tsession.setTimeout(this.timeout, () => {\n\t\t\t\t\t\t// Terminates all streams owned by this session.\n\t\t\t\t\t\t// TODO: Maybe the streams should have a \"Session timed out\" error?\n\t\t\t\t\t\tsession.destroy();\n\t\t\t\t\t});\n\n\t\t\t\t\tsession.once('close', () => {\n\t\t\t\t\t\tif (receivedSettings) {\n\t\t\t\t\t\t\t// 1. If it wasn't free then no need to decrease because\n\t\t\t\t\t\t\t// it has been decreased already in session.request().\n\t\t\t\t\t\t\t// 2. `stream.once('close')` won't increment the count\n\t\t\t\t\t\t\t// because the session is already closed.\n\t\t\t\t\t\t\tif (wasFree) {\n\t\t\t\t\t\t\t\tthis._freeSessionsCount--;\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tthis._sessionsCount--;\n\n\t\t\t\t\t\t\t// This cannot be moved to the stream logic,\n\t\t\t\t\t\t\t// because there may be a session that hadn't made a single request.\n\t\t\t\t\t\t\tconst where = this.sessions[normalizedOptions];\n\t\t\t\t\t\t\twhere.splice(where.indexOf(session), 1);\n\n\t\t\t\t\t\t\tif (where.length === 0) {\n\t\t\t\t\t\t\t\tdelete this.sessions[normalizedOptions];\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t// Broken connection\n\t\t\t\t\t\t\tconst error = new Error('Session closed without receiving a SETTINGS frame');\n\t\t\t\t\t\t\terror.code = 'HTTP2WRAPPER_NOSETTINGS';\n\n\t\t\t\t\t\t\tfor (const {reject} of listeners) {\n\t\t\t\t\t\t\t\treject(error);\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tremoveFromQueue();\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// There may be another session awaiting.\n\t\t\t\t\t\tthis._tryToCreateNewSession(normalizedOptions, normalizedOrigin);\n\t\t\t\t\t});\n\n\t\t\t\t\t// Iterates over the queue and processes listeners.\n\t\t\t\t\tconst processListeners = () => {\n\t\t\t\t\t\tif (!(normalizedOptions in this.queue) || !isFree()) {\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tfor (const origin of session[kOriginSet]) {\n\t\t\t\t\t\t\tif (origin in this.queue[normalizedOptions]) {\n\t\t\t\t\t\t\t\tconst {listeners} = this.queue[normalizedOptions][origin];\n\n\t\t\t\t\t\t\t\t// Prevents session overloading.\n\t\t\t\t\t\t\t\twhile (listeners.length !== 0 && isFree()) {\n\t\t\t\t\t\t\t\t\t// We assume `resolve(...)` calls `request(...)` *directly*,\n\t\t\t\t\t\t\t\t\t// otherwise the session will get overloaded.\n\t\t\t\t\t\t\t\t\tlisteners.shift().resolve(session);\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\tconst where = this.queue[normalizedOptions];\n\t\t\t\t\t\t\t\tif (where[origin].listeners.length === 0) {\n\t\t\t\t\t\t\t\t\tdelete where[origin];\n\n\t\t\t\t\t\t\t\t\tif (Object.keys(where).length === 0) {\n\t\t\t\t\t\t\t\t\t\tdelete this.queue[normalizedOptions];\n\t\t\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t// We're no longer free, no point in continuing.\n\t\t\t\t\t\t\t\tif (!isFree()) {\n\t\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t};\n\n\t\t\t\t\t// The Origin Set cannot shrink. No need to check if it suddenly became covered by another one.\n\t\t\t\t\tsession.on('origin', () => {\n\t\t\t\t\t\tsession[kOriginSet] = session.originSet;\n\n\t\t\t\t\t\tif (!isFree()) {\n\t\t\t\t\t\t\t// The session is full.\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tprocessListeners();\n\n\t\t\t\t\t\t// Close covered sessions (if possible).\n\t\t\t\t\t\tcloseCoveredSessions(this.sessions[normalizedOptions], session);\n\t\t\t\t\t});\n\n\t\t\t\t\tsession.once('remoteSettings', () => {\n\t\t\t\t\t\t// Fix Node.js bug preventing the process from exiting\n\t\t\t\t\t\tsession.ref();\n\t\t\t\t\t\tsession.unref();\n\n\t\t\t\t\t\tthis._sessionsCount++;\n\n\t\t\t\t\t\t// The Agent could have been destroyed already.\n\t\t\t\t\t\tif (entry.destroyed) {\n\t\t\t\t\t\t\tconst error = new Error('Agent has been destroyed');\n\n\t\t\t\t\t\t\tfor (const listener of listeners) {\n\t\t\t\t\t\t\t\tlistener.reject(error);\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tsession.destroy();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tsession[kOriginSet] = session.originSet;\n\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tconst where = this.sessions;\n\n\t\t\t\t\t\t\tif (normalizedOptions in where) {\n\t\t\t\t\t\t\t\tconst sessions = where[normalizedOptions];\n\t\t\t\t\t\t\t\tsessions.splice(getSortedIndex(sessions, session, compareSessions), 0, session);\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\twhere[normalizedOptions] = [session];\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tthis._freeSessionsCount += 1;\n\t\t\t\t\t\treceivedSettings = true;\n\n\t\t\t\t\t\tthis.emit('session', session);\n\n\t\t\t\t\t\tprocessListeners();\n\t\t\t\t\t\tremoveFromQueue();\n\n\t\t\t\t\t\t// TODO: Close last recently used (or least used?) session\n\t\t\t\t\t\tif (session[kCurrentStreamsCount] === 0 && this._freeSessionsCount > this.maxFreeSessions) {\n\t\t\t\t\t\t\tsession.close();\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// Check if we haven't managed to execute all listeners.\n\t\t\t\t\t\tif (listeners.length !== 0) {\n\t\t\t\t\t\t\t// Request for a new session with predefined listeners.\n\t\t\t\t\t\t\tthis.getSession(normalizedOrigin, options, listeners);\n\t\t\t\t\t\t\tlisteners.length = 0;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// `session.remoteSettings.maxConcurrentStreams` might get increased\n\t\t\t\t\t\tsession.on('remoteSettings', () => {\n\t\t\t\t\t\t\tprocessListeners();\n\n\t\t\t\t\t\t\t// In case the Origin Set changes\n\t\t\t\t\t\t\tcloseCoveredSessions(this.sessions[normalizedOptions], session);\n\t\t\t\t\t\t});\n\t\t\t\t\t});\n\n\t\t\t\t\t// Shim `session.request()` in order to catch all streams\n\t\t\t\t\tsession[kRequest] = session.request;\n\t\t\t\t\tsession.request = (headers, streamOptions) => {\n\t\t\t\t\t\tif (session[kGracefullyClosing]) {\n\t\t\t\t\t\t\tthrow new Error('The session is gracefully closing. No new streams are allowed.');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tconst stream = session[kRequest](headers, streamOptions);\n\n\t\t\t\t\t\t// The process won't exit until the session is closed or all requests are gone.\n\t\t\t\t\t\tsession.ref();\n\n\t\t\t\t\t\t++session[kCurrentStreamsCount];\n\n\t\t\t\t\t\tif (session[kCurrentStreamsCount] === session.remoteSettings.maxConcurrentStreams) {\n\t\t\t\t\t\t\tthis._freeSessionsCount--;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tstream.once('close', () => {\n\t\t\t\t\t\t\twasFree = isFree();\n\n\t\t\t\t\t\t\t--session[kCurrentStreamsCount];\n\n\t\t\t\t\t\t\tif (!session.destroyed && !session.closed) {\n\t\t\t\t\t\t\t\tcloseSessionIfCovered(this.sessions[normalizedOptions], session);\n\n\t\t\t\t\t\t\t\tif (isFree() && !session.closed) {\n\t\t\t\t\t\t\t\t\tif (!wasFree) {\n\t\t\t\t\t\t\t\t\t\tthis._freeSessionsCount++;\n\n\t\t\t\t\t\t\t\t\t\twasFree = true;\n\t\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t\tconst isEmpty = session[kCurrentStreamsCount] === 0;\n\n\t\t\t\t\t\t\t\t\tif (isEmpty) {\n\t\t\t\t\t\t\t\t\t\tsession.unref();\n\t\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t\tif (\n\t\t\t\t\t\t\t\t\t\tisEmpty &&\n\t\t\t\t\t\t\t\t\t\t(\n\t\t\t\t\t\t\t\t\t\t\tthis._freeSessionsCount > this.maxFreeSessions ||\n\t\t\t\t\t\t\t\t\t\t\tsession[kGracefullyClosing]\n\t\t\t\t\t\t\t\t\t\t)\n\t\t\t\t\t\t\t\t\t) {\n\t\t\t\t\t\t\t\t\t\tsession.close();\n\t\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\t\tcloseCoveredSessions(this.sessions[normalizedOptions], session);\n\t\t\t\t\t\t\t\t\t\tprocessListeners();\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t});\n\n\t\t\t\t\t\treturn stream;\n\t\t\t\t\t};\n\t\t\t\t} catch (error) {\n\t\t\t\t\tfor (const listener of listeners) {\n\t\t\t\t\t\tlistener.reject(error);\n\t\t\t\t\t}\n\n\t\t\t\t\tremoveFromQueue();\n\t\t\t\t}\n\t\t\t};\n\n\t\t\tentry.listeners = listeners;\n\t\t\tentry.completed = false;\n\t\t\tentry.destroyed = false;\n\n\t\t\tthis.queue[normalizedOptions][normalizedOrigin] = entry;\n\t\t\tthis._tryToCreateNewSession(normalizedOptions, normalizedOrigin);\n\t\t});\n\t}\n\n\trequest(origin, options, headers, streamOptions) {\n\t\treturn new Promise((resolve, reject) => {\n\t\t\tthis.getSession(origin, options, [{\n\t\t\t\treject,\n\t\t\t\tresolve: session => {\n\t\t\t\t\ttry {\n\t\t\t\t\t\tresolve(session.request(headers, streamOptions));\n\t\t\t\t\t} catch (error) {\n\t\t\t\t\t\treject(error);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}]);\n\t\t});\n\t}\n\n\tcreateConnection(origin, options) {\n\t\treturn Agent.connect(origin, options);\n\t}\n\n\tstatic connect(origin, options) {\n\t\toptions.ALPNProtocols = ['h2'];\n\n\t\tconst port = origin.port || 443;\n\t\tconst host = origin.hostname || origin.host;\n\n\t\tif (typeof options.servername === 'undefined') {\n\t\t\toptions.servername = host;\n\t\t}\n\n\t\treturn tls.connect(port, host, options);\n\t}\n\n\tcloseFreeSessions() {\n\t\tfor (const sessions of Object.values(this.sessions)) {\n\t\t\tfor (const session of sessions) {\n\t\t\t\tif (session[kCurrentStreamsCount] === 0) {\n\t\t\t\t\tsession.close();\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tdestroy(reason) {\n\t\tfor (const sessions of Object.values(this.sessions)) {\n\t\t\tfor (const session of sessions) {\n\t\t\t\tsession.destroy(reason);\n\t\t\t}\n\t\t}\n\n\t\tfor (const entriesOfAuthority of Object.values(this.queue)) {\n\t\t\tfor (const entry of Object.values(entriesOfAuthority)) {\n\t\t\t\tentry.destroyed = true;\n\t\t\t}\n\t\t}\n\n\t\t// New requests should NOT attach to destroyed sessions\n\t\tthis.queue = {};\n\t}\n\n\tget freeSessions() {\n\t\treturn getSessions({agent: this, isFree: true});\n\t}\n\n\tget busySessions() {\n\t\treturn getSessions({agent: this, isFree: false});\n\t}\n}\n\nAgent.kCurrentStreamsCount = kCurrentStreamsCount;\nAgent.kGracefullyClosing = kGracefullyClosing;\n\nmodule.exports = {\n\tAgent,\n\tglobalAgent: new Agent()\n};\n","'use strict';\nconst http = require('http');\nconst https = require('https');\nconst resolveALPN = require('resolve-alpn');\nconst QuickLRU = require('quick-lru');\nconst Http2ClientRequest = require('./client-request');\nconst calculateServerName = require('./utils/calculate-server-name');\nconst urlToOptions = require('./utils/url-to-options');\n\nconst cache = new QuickLRU({maxSize: 100});\nconst queue = new Map();\n\nconst installSocket = (agent, socket, options) => {\n\tsocket._httpMessage = {shouldKeepAlive: true};\n\n\tconst onFree = () => {\n\t\tagent.emit('free', socket, options);\n\t};\n\n\tsocket.on('free', onFree);\n\n\tconst onClose = () => {\n\t\tagent.removeSocket(socket, options);\n\t};\n\n\tsocket.on('close', onClose);\n\n\tconst onRemove = () => {\n\t\tagent.removeSocket(socket, options);\n\t\tsocket.off('close', onClose);\n\t\tsocket.off('free', onFree);\n\t\tsocket.off('agentRemove', onRemove);\n\t};\n\n\tsocket.on('agentRemove', onRemove);\n\n\tagent.emit('free', socket, options);\n};\n\nconst resolveProtocol = async options => {\n\tconst name = `${options.host}:${options.port}:${options.ALPNProtocols.sort()}`;\n\n\tif (!cache.has(name)) {\n\t\tif (queue.has(name)) {\n\t\t\tconst result = await queue.get(name);\n\t\t\treturn result.alpnProtocol;\n\t\t}\n\n\t\tconst {path, agent} = options;\n\t\toptions.path = options.socketPath;\n\n\t\tconst resultPromise = resolveALPN(options);\n\t\tqueue.set(name, resultPromise);\n\n\t\ttry {\n\t\t\tconst {socket, alpnProtocol} = await resultPromise;\n\t\t\tcache.set(name, alpnProtocol);\n\n\t\t\toptions.path = path;\n\n\t\t\tif (alpnProtocol === 'h2') {\n\t\t\t\t// https://github.com/nodejs/node/issues/33343\n\t\t\t\tsocket.destroy();\n\t\t\t} else {\n\t\t\t\tconst {globalAgent} = https;\n\t\t\t\tconst defaultCreateConnection = https.Agent.prototype.createConnection;\n\n\t\t\t\tif (agent) {\n\t\t\t\t\tif (agent.createConnection === defaultCreateConnection) {\n\t\t\t\t\t\tinstallSocket(agent, socket, options);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tsocket.destroy();\n\t\t\t\t\t}\n\t\t\t\t} else if (globalAgent.createConnection === defaultCreateConnection) {\n\t\t\t\t\tinstallSocket(globalAgent, socket, options);\n\t\t\t\t} else {\n\t\t\t\t\tsocket.destroy();\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tqueue.delete(name);\n\n\t\t\treturn alpnProtocol;\n\t\t} catch (error) {\n\t\t\tqueue.delete(name);\n\n\t\t\tthrow error;\n\t\t}\n\t}\n\n\treturn cache.get(name);\n};\n\nmodule.exports = async (input, options, callback) => {\n\tif (typeof input === 'string' || input instanceof URL) {\n\t\tinput = urlToOptions(new URL(input));\n\t}\n\n\tif (typeof options === 'function') {\n\t\tcallback = options;\n\t\toptions = undefined;\n\t}\n\n\toptions = {\n\t\tALPNProtocols: ['h2', 'http/1.1'],\n\t\t...input,\n\t\t...options,\n\t\tresolveSocket: true\n\t};\n\n\tif (!Array.isArray(options.ALPNProtocols) || options.ALPNProtocols.length === 0) {\n\t\tthrow new Error('The `ALPNProtocols` option must be an Array with at least one entry');\n\t}\n\n\toptions.protocol = options.protocol || 'https:';\n\tconst isHttps = options.protocol === 'https:';\n\n\toptions.host = options.hostname || options.host || 'localhost';\n\toptions.session = options.tlsSession;\n\toptions.servername = options.servername || calculateServerName(options);\n\toptions.port = options.port || (isHttps ? 443 : 80);\n\toptions._defaultAgent = isHttps ? https.globalAgent : http.globalAgent;\n\n\tconst agents = options.agent;\n\n\tif (agents) {\n\t\tif (agents.addRequest) {\n\t\t\tthrow new Error('The `options.agent` object can contain only `http`, `https` or `http2` properties');\n\t\t}\n\n\t\toptions.agent = agents[isHttps ? 'https' : 'http'];\n\t}\n\n\tif (isHttps) {\n\t\tconst protocol = await resolveProtocol(options);\n\n\t\tif (protocol === 'h2') {\n\t\t\tif (agents) {\n\t\t\t\toptions.agent = agents.http2;\n\t\t\t}\n\n\t\t\treturn new Http2ClientRequest(options, callback);\n\t\t}\n\t}\n\n\treturn http.request(options, callback);\n};\n\nmodule.exports.protocolCache = cache;\n","'use strict';\nconst http2 = require('http2');\nconst {Writable} = require('stream');\nconst {Agent, globalAgent} = require('./agent');\nconst IncomingMessage = require('./incoming-message');\nconst urlToOptions = require('./utils/url-to-options');\nconst proxyEvents = require('./utils/proxy-events');\nconst isRequestPseudoHeader = require('./utils/is-request-pseudo-header');\nconst {\n\tERR_INVALID_ARG_TYPE,\n\tERR_INVALID_PROTOCOL,\n\tERR_HTTP_HEADERS_SENT,\n\tERR_INVALID_HTTP_TOKEN,\n\tERR_HTTP_INVALID_HEADER_VALUE,\n\tERR_INVALID_CHAR\n} = require('./utils/errors');\n\nconst {\n\tHTTP2_HEADER_STATUS,\n\tHTTP2_HEADER_METHOD,\n\tHTTP2_HEADER_PATH,\n\tHTTP2_METHOD_CONNECT\n} = http2.constants;\n\nconst kHeaders = Symbol('headers');\nconst kOrigin = Symbol('origin');\nconst kSession = Symbol('session');\nconst kOptions = Symbol('options');\nconst kFlushedHeaders = Symbol('flushedHeaders');\nconst kJobs = Symbol('jobs');\n\nconst isValidHttpToken = /^[\\^`\\-\\w!#$%&*+.|~]+$/;\nconst isInvalidHeaderValue = /[^\\t\\u0020-\\u007E\\u0080-\\u00FF]/;\n\nclass ClientRequest extends Writable {\n\tconstructor(input, options, callback) {\n\t\tsuper({\n\t\t\tautoDestroy: false\n\t\t});\n\n\t\tconst hasInput = typeof input === 'string' || input instanceof URL;\n\t\tif (hasInput) {\n\t\t\tinput = urlToOptions(input instanceof URL ? input : new URL(input));\n\t\t}\n\n\t\tif (typeof options === 'function' || options === undefined) {\n\t\t\t// (options, callback)\n\t\t\tcallback = options;\n\t\t\toptions = hasInput ? input : {...input};\n\t\t} else {\n\t\t\t// (input, options, callback)\n\t\t\toptions = {...input, ...options};\n\t\t}\n\n\t\tif (options.h2session) {\n\t\t\tthis[kSession] = options.h2session;\n\t\t} else if (options.agent === false) {\n\t\t\tthis.agent = new Agent({maxFreeSessions: 0});\n\t\t} else if (typeof options.agent === 'undefined' || options.agent === null) {\n\t\t\tif (typeof options.createConnection === 'function') {\n\t\t\t\t// This is a workaround - we don't have to create the session on our own.\n\t\t\t\tthis.agent = new Agent({maxFreeSessions: 0});\n\t\t\t\tthis.agent.createConnection = options.createConnection;\n\t\t\t} else {\n\t\t\t\tthis.agent = globalAgent;\n\t\t\t}\n\t\t} else if (typeof options.agent.request === 'function') {\n\t\t\tthis.agent = options.agent;\n\t\t} else {\n\t\t\tthrow new ERR_INVALID_ARG_TYPE('options.agent', ['Agent-like Object', 'undefined', 'false'], options.agent);\n\t\t}\n\n\t\tif (options.protocol && options.protocol !== 'https:') {\n\t\t\tthrow new ERR_INVALID_PROTOCOL(options.protocol, 'https:');\n\t\t}\n\n\t\tconst port = options.port || options.defaultPort || (this.agent && this.agent.defaultPort) || 443;\n\t\tconst host = options.hostname || options.host || 'localhost';\n\n\t\t// Don't enforce the origin via options. It may be changed in an Agent.\n\t\tdelete options.hostname;\n\t\tdelete options.host;\n\t\tdelete options.port;\n\n\t\tconst {timeout} = options;\n\t\toptions.timeout = undefined;\n\n\t\tthis[kHeaders] = Object.create(null);\n\t\tthis[kJobs] = [];\n\n\t\tthis.socket = null;\n\t\tthis.connection = null;\n\n\t\tthis.method = options.method || 'GET';\n\t\tthis.path = options.path;\n\n\t\tthis.res = null;\n\t\tthis.aborted = false;\n\t\tthis.reusedSocket = false;\n\n\t\tif (options.headers) {\n\t\t\tfor (const [header, value] of Object.entries(options.headers)) {\n\t\t\t\tthis.setHeader(header, value);\n\t\t\t}\n\t\t}\n\n\t\tif (options.auth && !('authorization' in this[kHeaders])) {\n\t\t\tthis[kHeaders].authorization = 'Basic ' + Buffer.from(options.auth).toString('base64');\n\t\t}\n\n\t\toptions.session = options.tlsSession;\n\t\toptions.path = options.socketPath;\n\n\t\tthis[kOptions] = options;\n\n\t\t// Clients that generate HTTP/2 requests directly SHOULD use the :authority pseudo-header field instead of the Host header field.\n\t\tif (port === 443) {\n\t\t\tthis[kOrigin] = `https://${host}`;\n\n\t\t\tif (!(':authority' in this[kHeaders])) {\n\t\t\t\tthis[kHeaders][':authority'] = host;\n\t\t\t}\n\t\t} else {\n\t\t\tthis[kOrigin] = `https://${host}:${port}`;\n\n\t\t\tif (!(':authority' in this[kHeaders])) {\n\t\t\t\tthis[kHeaders][':authority'] = `${host}:${port}`;\n\t\t\t}\n\t\t}\n\n\t\tif (timeout) {\n\t\t\tthis.setTimeout(timeout);\n\t\t}\n\n\t\tif (callback) {\n\t\t\tthis.once('response', callback);\n\t\t}\n\n\t\tthis[kFlushedHeaders] = false;\n\t}\n\n\tget method() {\n\t\treturn this[kHeaders][HTTP2_HEADER_METHOD];\n\t}\n\n\tset method(value) {\n\t\tif (value) {\n\t\t\tthis[kHeaders][HTTP2_HEADER_METHOD] = value.toUpperCase();\n\t\t}\n\t}\n\n\tget path() {\n\t\treturn this[kHeaders][HTTP2_HEADER_PATH];\n\t}\n\n\tset path(value) {\n\t\tif (value) {\n\t\t\tthis[kHeaders][HTTP2_HEADER_PATH] = value;\n\t\t}\n\t}\n\n\tget _mustNotHaveABody() {\n\t\treturn this.method === 'GET' || this.method === 'HEAD' || this.method === 'DELETE';\n\t}\n\n\t_write(chunk, encoding, callback) {\n\t\t// https://github.com/nodejs/node/blob/654df09ae0c5e17d1b52a900a545f0664d8c7627/lib/internal/http2/util.js#L148-L156\n\t\tif (this._mustNotHaveABody) {\n\t\t\tcallback(new Error('The GET, HEAD and DELETE methods must NOT have a body'));\n\t\t\t/* istanbul ignore next: Node.js 12 throws directly */\n\t\t\treturn;\n\t\t}\n\n\t\tthis.flushHeaders();\n\n\t\tconst callWrite = () => this._request.write(chunk, encoding, callback);\n\t\tif (this._request) {\n\t\t\tcallWrite();\n\t\t} else {\n\t\t\tthis[kJobs].push(callWrite);\n\t\t}\n\t}\n\n\t_final(callback) {\n\t\tif (this.destroyed) {\n\t\t\treturn;\n\t\t}\n\n\t\tthis.flushHeaders();\n\n\t\tconst callEnd = () => {\n\t\t\t// For GET, HEAD and DELETE\n\t\t\tif (this._mustNotHaveABody) {\n\t\t\t\tcallback();\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tthis._request.end(callback);\n\t\t};\n\n\t\tif (this._request) {\n\t\t\tcallEnd();\n\t\t} else {\n\t\t\tthis[kJobs].push(callEnd);\n\t\t}\n\t}\n\n\tabort() {\n\t\tif (this.res && this.res.complete) {\n\t\t\treturn;\n\t\t}\n\n\t\tif (!this.aborted) {\n\t\t\tprocess.nextTick(() => this.emit('abort'));\n\t\t}\n\n\t\tthis.aborted = true;\n\n\t\tthis.destroy();\n\t}\n\n\t_destroy(error, callback) {\n\t\tif (this.res) {\n\t\t\tthis.res._dump();\n\t\t}\n\n\t\tif (this._request) {\n\t\t\tthis._request.destroy();\n\t\t}\n\n\t\tcallback(error);\n\t}\n\n\tasync flushHeaders() {\n\t\tif (this[kFlushedHeaders] || this.destroyed) {\n\t\t\treturn;\n\t\t}\n\n\t\tthis[kFlushedHeaders] = true;\n\n\t\tconst isConnectMethod = this.method === HTTP2_METHOD_CONNECT;\n\n\t\t// The real magic is here\n\t\tconst onStream = stream => {\n\t\t\tthis._request = stream;\n\n\t\t\tif (this.destroyed) {\n\t\t\t\tstream.destroy();\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// Forwards `timeout`, `continue`, `close` and `error` events to this instance.\n\t\t\tif (!isConnectMethod) {\n\t\t\t\tproxyEvents(stream, this, ['timeout', 'continue', 'close', 'error']);\n\t\t\t}\n\n\t\t\t// Wait for the `finish` event. We don't want to emit the `response` event\n\t\t\t// before `request.end()` is called.\n\t\t\tconst waitForEnd = fn => {\n\t\t\t\treturn (...args) => {\n\t\t\t\t\tif (!this.writable && !this.destroyed) {\n\t\t\t\t\t\tfn(...args);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tthis.once('finish', () => {\n\t\t\t\t\t\t\tfn(...args);\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t};\n\t\t\t};\n\n\t\t\t// This event tells we are ready to listen for the data.\n\t\t\tstream.once('response', waitForEnd((headers, flags, rawHeaders) => {\n\t\t\t\t// If we were to emit raw request stream, it would be as fast as the native approach.\n\t\t\t\t// Note that wrapping the raw stream in a Proxy instance won't improve the performance (already tested it).\n\t\t\t\tconst response = new IncomingMessage(this.socket, stream.readableHighWaterMark);\n\t\t\t\tthis.res = response;\n\n\t\t\t\tresponse.req = this;\n\t\t\t\tresponse.statusCode = headers[HTTP2_HEADER_STATUS];\n\t\t\t\tresponse.headers = headers;\n\t\t\t\tresponse.rawHeaders = rawHeaders;\n\n\t\t\t\tresponse.once('end', () => {\n\t\t\t\t\tif (this.aborted) {\n\t\t\t\t\t\tresponse.aborted = true;\n\t\t\t\t\t\tresponse.emit('aborted');\n\t\t\t\t\t} else {\n\t\t\t\t\t\tresponse.complete = true;\n\n\t\t\t\t\t\t// Has no effect, just be consistent with the Node.js behavior\n\t\t\t\t\t\tresponse.socket = null;\n\t\t\t\t\t\tresponse.connection = null;\n\t\t\t\t\t}\n\t\t\t\t});\n\n\t\t\t\tif (isConnectMethod) {\n\t\t\t\t\tresponse.upgrade = true;\n\n\t\t\t\t\t// The HTTP1 API says the socket is detached here,\n\t\t\t\t\t// but we can't do that so we pass the original HTTP2 request.\n\t\t\t\t\tif (this.emit('connect', response, stream, Buffer.alloc(0))) {\n\t\t\t\t\t\tthis.emit('close');\n\t\t\t\t\t} else {\n\t\t\t\t\t\t// No listeners attached, destroy the original request.\n\t\t\t\t\t\tstream.destroy();\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\t// Forwards data\n\t\t\t\t\tstream.on('data', chunk => {\n\t\t\t\t\t\tif (!response._dumped && !response.push(chunk)) {\n\t\t\t\t\t\t\tstream.pause();\n\t\t\t\t\t\t}\n\t\t\t\t\t});\n\n\t\t\t\t\tstream.once('end', () => {\n\t\t\t\t\t\tresponse.push(null);\n\t\t\t\t\t});\n\n\t\t\t\t\tif (!this.emit('response', response)) {\n\t\t\t\t\t\t// No listeners attached, dump the response.\n\t\t\t\t\t\tresponse._dump();\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}));\n\n\t\t\t// Emits `information` event\n\t\t\tstream.once('headers', waitForEnd(\n\t\t\t\theaders => this.emit('information', {statusCode: headers[HTTP2_HEADER_STATUS]})\n\t\t\t));\n\n\t\t\tstream.once('trailers', waitForEnd((trailers, flags, rawTrailers) => {\n\t\t\t\tconst {res} = this;\n\n\t\t\t\t// Assigns trailers to the response object.\n\t\t\t\tres.trailers = trailers;\n\t\t\t\tres.rawTrailers = rawTrailers;\n\t\t\t}));\n\n\t\t\tconst {socket} = stream.session;\n\t\t\tthis.socket = socket;\n\t\t\tthis.connection = socket;\n\n\t\t\tfor (const job of this[kJobs]) {\n\t\t\t\tjob();\n\t\t\t}\n\n\t\t\tthis.emit('socket', this.socket);\n\t\t};\n\n\t\t// Makes a HTTP2 request\n\t\tif (this[kSession]) {\n\t\t\ttry {\n\t\t\t\tonStream(this[kSession].request(this[kHeaders]));\n\t\t\t} catch (error) {\n\t\t\t\tthis.emit('error', error);\n\t\t\t}\n\t\t} else {\n\t\t\tthis.reusedSocket = true;\n\n\t\t\ttry {\n\t\t\t\tonStream(await this.agent.request(this[kOrigin], this[kOptions], this[kHeaders]));\n\t\t\t} catch (error) {\n\t\t\t\tthis.emit('error', error);\n\t\t\t}\n\t\t}\n\t}\n\n\tgetHeader(name) {\n\t\tif (typeof name !== 'string') {\n\t\t\tthrow new ERR_INVALID_ARG_TYPE('name', 'string', name);\n\t\t}\n\n\t\treturn this[kHeaders][name.toLowerCase()];\n\t}\n\n\tget headersSent() {\n\t\treturn this[kFlushedHeaders];\n\t}\n\n\tremoveHeader(name) {\n\t\tif (typeof name !== 'string') {\n\t\t\tthrow new ERR_INVALID_ARG_TYPE('name', 'string', name);\n\t\t}\n\n\t\tif (this.headersSent) {\n\t\t\tthrow new ERR_HTTP_HEADERS_SENT('remove');\n\t\t}\n\n\t\tdelete this[kHeaders][name.toLowerCase()];\n\t}\n\n\tsetHeader(name, value) {\n\t\tif (this.headersSent) {\n\t\t\tthrow new ERR_HTTP_HEADERS_SENT('set');\n\t\t}\n\n\t\tif (typeof name !== 'string' || (!isValidHttpToken.test(name) && !isRequestPseudoHeader(name))) {\n\t\t\tthrow new ERR_INVALID_HTTP_TOKEN('Header name', name);\n\t\t}\n\n\t\tif (typeof value === 'undefined') {\n\t\t\tthrow new ERR_HTTP_INVALID_HEADER_VALUE(value, name);\n\t\t}\n\n\t\tif (isInvalidHeaderValue.test(value)) {\n\t\t\tthrow new ERR_INVALID_CHAR('header content', name);\n\t\t}\n\n\t\tthis[kHeaders][name.toLowerCase()] = value;\n\t}\n\n\tsetNoDelay() {\n\t\t// HTTP2 sockets cannot be malformed, do nothing.\n\t}\n\n\tsetSocketKeepAlive() {\n\t\t// HTTP2 sockets cannot be malformed, do nothing.\n\t}\n\n\tsetTimeout(ms, callback) {\n\t\tconst applyTimeout = () => this._request.setTimeout(ms, callback);\n\n\t\tif (this._request) {\n\t\t\tapplyTimeout();\n\t\t} else {\n\t\t\tthis[kJobs].push(applyTimeout);\n\t\t}\n\n\t\treturn this;\n\t}\n\n\tget maxHeadersCount() {\n\t\tif (!this.destroyed && this._request) {\n\t\t\treturn this._request.session.localSettings.maxHeaderListSize;\n\t\t}\n\n\t\treturn undefined;\n\t}\n\n\tset maxHeadersCount(_value) {\n\t\t// Updating HTTP2 settings would affect all requests, do nothing.\n\t}\n}\n\nmodule.exports = ClientRequest;\n","'use strict';\nconst {Readable} = require('stream');\n\nclass IncomingMessage extends Readable {\n\tconstructor(socket, highWaterMark) {\n\t\tsuper({\n\t\t\thighWaterMark,\n\t\t\tautoDestroy: false\n\t\t});\n\n\t\tthis.statusCode = null;\n\t\tthis.statusMessage = '';\n\t\tthis.httpVersion = '2.0';\n\t\tthis.httpVersionMajor = 2;\n\t\tthis.httpVersionMinor = 0;\n\t\tthis.headers = {};\n\t\tthis.trailers = {};\n\t\tthis.req = null;\n\n\t\tthis.aborted = false;\n\t\tthis.complete = false;\n\t\tthis.upgrade = null;\n\n\t\tthis.rawHeaders = [];\n\t\tthis.rawTrailers = [];\n\n\t\tthis.socket = socket;\n\t\tthis.connection = socket;\n\n\t\tthis._dumped = false;\n\t}\n\n\t_destroy(error) {\n\t\tthis.req._request.destroy(error);\n\t}\n\n\tsetTimeout(ms, callback) {\n\t\tthis.req.setTimeout(ms, callback);\n\t\treturn this;\n\t}\n\n\t_dump() {\n\t\tif (!this._dumped) {\n\t\t\tthis._dumped = true;\n\n\t\t\tthis.removeAllListeners('data');\n\t\t\tthis.resume();\n\t\t}\n\t}\n\n\t_read() {\n\t\tif (this.req) {\n\t\t\tthis.req._request.resume();\n\t\t}\n\t}\n}\n\nmodule.exports = IncomingMessage;\n","'use strict';\nconst http2 = require('http2');\nconst agent = require('./agent');\nconst ClientRequest = require('./client-request');\nconst IncomingMessage = require('./incoming-message');\nconst auto = require('./auto');\n\nconst request = (url, options, callback) => {\n\treturn new ClientRequest(url, options, callback);\n};\n\nconst get = (url, options, callback) => {\n\t// eslint-disable-next-line unicorn/prevent-abbreviations\n\tconst req = new ClientRequest(url, options, callback);\n\treq.end();\n\n\treturn req;\n};\n\nmodule.exports = {\n\t...http2,\n\tClientRequest,\n\tIncomingMessage,\n\t...agent,\n\trequest,\n\tget,\n\tauto\n};\n","'use strict';\nconst net = require('net');\n/* istanbul ignore file: https://github.com/nodejs/node/blob/v13.0.1/lib/_http_agent.js */\n\nmodule.exports = options => {\n\tlet servername = options.host;\n\tconst hostHeader = options.headers && options.headers.host;\n\n\tif (hostHeader) {\n\t\tif (hostHeader.startsWith('[')) {\n\t\t\tconst index = hostHeader.indexOf(']');\n\t\t\tif (index === -1) {\n\t\t\t\tservername = hostHeader;\n\t\t\t} else {\n\t\t\t\tservername = hostHeader.slice(1, -1);\n\t\t\t}\n\t\t} else {\n\t\t\tservername = hostHeader.split(':', 1)[0];\n\t\t}\n\t}\n\n\tif (net.isIP(servername)) {\n\t\treturn '';\n\t}\n\n\treturn servername;\n};\n","'use strict';\n/* istanbul ignore file: https://github.com/nodejs/node/blob/master/lib/internal/errors.js */\n\nconst makeError = (Base, key, getMessage) => {\n\tmodule.exports[key] = class NodeError extends Base {\n\t\tconstructor(...args) {\n\t\t\tsuper(typeof getMessage === 'string' ? getMessage : getMessage(args));\n\t\t\tthis.name = `${super.name} [${key}]`;\n\t\t\tthis.code = key;\n\t\t}\n\t};\n};\n\nmakeError(TypeError, 'ERR_INVALID_ARG_TYPE', args => {\n\tconst type = args[0].includes('.') ? 'property' : 'argument';\n\n\tlet valid = args[1];\n\tconst isManyTypes = Array.isArray(valid);\n\n\tif (isManyTypes) {\n\t\tvalid = `${valid.slice(0, -1).join(', ')} or ${valid.slice(-1)}`;\n\t}\n\n\treturn `The \"${args[0]}\" ${type} must be ${isManyTypes ? 'one of' : 'of'} type ${valid}. Received ${typeof args[2]}`;\n});\n\nmakeError(TypeError, 'ERR_INVALID_PROTOCOL', args => {\n\treturn `Protocol \"${args[0]}\" not supported. Expected \"${args[1]}\"`;\n});\n\nmakeError(Error, 'ERR_HTTP_HEADERS_SENT', args => {\n\treturn `Cannot ${args[0]} headers after they are sent to the client`;\n});\n\nmakeError(TypeError, 'ERR_INVALID_HTTP_TOKEN', args => {\n\treturn `${args[0]} must be a valid HTTP token [${args[1]}]`;\n});\n\nmakeError(TypeError, 'ERR_HTTP_INVALID_HEADER_VALUE', args => {\n\treturn `Invalid value \"${args[0]} for header \"${args[1]}\"`;\n});\n\nmakeError(TypeError, 'ERR_INVALID_CHAR', args => {\n\treturn `Invalid character in ${args[0]} [${args[1]}]`;\n});\n","'use strict';\n\nmodule.exports = header => {\n\tswitch (header) {\n\t\tcase ':method':\n\t\tcase ':scheme':\n\t\tcase ':authority':\n\t\tcase ':path':\n\t\t\treturn true;\n\t\tdefault:\n\t\t\treturn false;\n\t}\n};\n","'use strict';\n\nmodule.exports = (from, to, events) => {\n\tfor (const event of events) {\n\t\tfrom.on(event, (...args) => to.emit(event, ...args));\n\t}\n};\n","'use strict';\n/* istanbul ignore file: https://github.com/nodejs/node/blob/a91293d4d9ab403046ab5eb022332e4e3d249bd3/lib/internal/url.js#L1257 */\n\nmodule.exports = url => {\n\tconst options = {\n\t\tprotocol: url.protocol,\n\t\thostname: typeof url.hostname === 'string' && url.hostname.startsWith('[') ? url.hostname.slice(1, -1) : url.hostname,\n\t\thost: url.host,\n\t\thash: url.hash,\n\t\tsearch: url.search,\n\t\tpathname: url.pathname,\n\t\thref: url.href,\n\t\tpath: `${url.pathname || ''}${url.search || ''}`\n\t};\n\n\tif (typeof url.port === 'string' && url.port.length !== 0) {\n\t\toptions.port = Number(url.port);\n\t}\n\n\tif (url.username || url.password) {\n\t\toptions.auth = `${url.username || ''}:${url.password || ''}`;\n\t}\n\n\treturn options;\n};\n","/*!\n * is-extglob \n *\n * Copyright (c) 2014-2016, Jon Schlinkert.\n * Licensed under the MIT License.\n */\n\nmodule.exports = function isExtglob(str) {\n if (typeof str !== 'string' || str === '') {\n return false;\n }\n\n var match;\n while ((match = /(\\\\).|([@?!+*]\\(.*\\))/g.exec(str))) {\n if (match[2]) return true;\n str = str.slice(match.index + match[0].length);\n }\n\n return false;\n};\n","/*!\n * is-glob \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nvar isExtglob = require('is-extglob');\nvar chars = { '{': '}', '(': ')', '[': ']'};\nvar strictRegex = /\\\\(.)|(^!|\\*|[\\].+)]\\?|\\[[^\\\\\\]]+\\]|\\{[^\\\\}]+\\}|\\(\\?[:!=][^\\\\)]+\\)|\\([^|]+\\|[^\\\\)]+\\))/;\nvar relaxedRegex = /\\\\(.)|(^!|[*?{}()[\\]]|\\(\\?)/;\n\nmodule.exports = function isGlob(str, options) {\n if (typeof str !== 'string' || str === '') {\n return false;\n }\n\n if (isExtglob(str)) {\n return true;\n }\n\n var regex = strictRegex;\n var match;\n\n // optionally relax regex\n if (options && options.strict === false) {\n regex = relaxedRegex;\n }\n\n while ((match = regex.exec(str))) {\n if (match[2]) return true;\n var idx = match.index + match[0].length;\n\n // if an open bracket/brace/paren is escaped,\n // set the index to the next closing character\n var open = match[1];\n var close = open ? chars[open] : null;\n if (open && close) {\n var n = str.indexOf(close, idx);\n if (n !== -1) {\n idx = n + 1;\n }\n }\n\n str = str.slice(idx);\n }\n return false;\n};\n","//TODO: handle reviver/dehydrate function like normal\n//and handle indentation, like normal.\n//if anyone needs this... please send pull request.\n\nexports.stringify = function stringify (o) {\n if('undefined' == typeof o) return o\n\n if(o && Buffer.isBuffer(o))\n return JSON.stringify(':base64:' + o.toString('base64'))\n\n if(o && o.toJSON)\n o = o.toJSON()\n\n if(o && 'object' === typeof o) {\n var s = ''\n var array = Array.isArray(o)\n s = array ? '[' : '{'\n var first = true\n\n for(var k in o) {\n var ignore = 'function' == typeof o[k] || (!array && 'undefined' === typeof o[k])\n if(Object.hasOwnProperty.call(o, k) && !ignore) {\n if(!first)\n s += ','\n first = false\n if (array) {\n if(o[k] == undefined)\n s += 'null'\n else\n s += stringify(o[k])\n } else if (o[k] !== void(0)) {\n s += stringify(k) + ':' + stringify(o[k])\n }\n }\n }\n\n s += array ? ']' : '}'\n\n return s\n } else if ('string' === typeof o) {\n return JSON.stringify(/^:/.test(o) ? ':' + o : o)\n } else if ('undefined' === typeof o) {\n return 'null';\n } else\n return JSON.stringify(o)\n}\n\nexports.parse = function (s) {\n return JSON.parse(s, function (key, value) {\n if('string' === typeof value) {\n if(/^:base64:/.test(value))\n return Buffer.from(value.substring(8), 'base64')\n else\n return /^:/.test(value) ? value.substring(1) : value \n }\n return value\n })\n}\n",null,"'use strict';\nmodule.exports = object => {\n\tconst result = {};\n\n\tfor (const [key, value] of Object.entries(object)) {\n\t\tresult[key.toLowerCase()] = value;\n\t}\n\n\treturn result;\n};\n","'use strict'\n/*\n * merge2\n * https://github.com/teambition/merge2\n *\n * Copyright (c) 2014-2020 Teambition\n * Licensed under the MIT license.\n */\nconst Stream = require('stream')\nconst PassThrough = Stream.PassThrough\nconst slice = Array.prototype.slice\n\nmodule.exports = merge2\n\nfunction merge2 () {\n const streamsQueue = []\n const args = slice.call(arguments)\n let merging = false\n let options = args[args.length - 1]\n\n if (options && !Array.isArray(options) && options.pipe == null) {\n args.pop()\n } else {\n options = {}\n }\n\n const doEnd = options.end !== false\n const doPipeError = options.pipeError === true\n if (options.objectMode == null) {\n options.objectMode = true\n }\n if (options.highWaterMark == null) {\n options.highWaterMark = 64 * 1024\n }\n const mergedStream = PassThrough(options)\n\n function addStream () {\n for (let i = 0, len = arguments.length; i < len; i++) {\n streamsQueue.push(pauseStreams(arguments[i], options))\n }\n mergeStream()\n return this\n }\n\n function mergeStream () {\n if (merging) {\n return\n }\n merging = true\n\n let streams = streamsQueue.shift()\n if (!streams) {\n process.nextTick(endStream)\n return\n }\n if (!Array.isArray(streams)) {\n streams = [streams]\n }\n\n let pipesCount = streams.length + 1\n\n function next () {\n if (--pipesCount > 0) {\n return\n }\n merging = false\n mergeStream()\n }\n\n function pipe (stream) {\n function onend () {\n stream.removeListener('merge2UnpipeEnd', onend)\n stream.removeListener('end', onend)\n if (doPipeError) {\n stream.removeListener('error', onerror)\n }\n next()\n }\n function onerror (err) {\n mergedStream.emit('error', err)\n }\n // skip ended stream\n if (stream._readableState.endEmitted) {\n return next()\n }\n\n stream.on('merge2UnpipeEnd', onend)\n stream.on('end', onend)\n\n if (doPipeError) {\n stream.on('error', onerror)\n }\n\n stream.pipe(mergedStream, { end: false })\n // compatible for old stream\n stream.resume()\n }\n\n for (let i = 0; i < streams.length; i++) {\n pipe(streams[i])\n }\n\n next()\n }\n\n function endStream () {\n merging = false\n // emit 'queueDrain' when all streams merged.\n mergedStream.emit('queueDrain')\n if (doEnd) {\n mergedStream.end()\n }\n }\n\n mergedStream.setMaxListeners(0)\n mergedStream.add = addStream\n mergedStream.on('unpipe', function (stream) {\n stream.emit('merge2UnpipeEnd')\n })\n\n if (args.length) {\n addStream.apply(null, args)\n }\n return mergedStream\n}\n\n// check and pause streams for pipe.\nfunction pauseStreams (streams, options) {\n if (!Array.isArray(streams)) {\n // Backwards-compat with old-style streams\n if (!streams._readableState && streams.pipe) {\n streams = streams.pipe(PassThrough(options))\n }\n if (!streams._readableState || !streams.pause || !streams.pipe) {\n throw new Error('Only readable stream can be merged.')\n }\n streams.pause()\n } else {\n for (let i = 0, len = streams.length; i < len; i++) {\n streams[i] = pauseStreams(streams[i], options)\n }\n }\n return streams\n}\n","'use strict';\n\n// We define these manually to ensure they're always copied\n// even if they would move up the prototype chain\n// https://nodejs.org/api/http.html#http_class_http_incomingmessage\nconst knownProps = [\n\t'destroy',\n\t'setTimeout',\n\t'socket',\n\t'headers',\n\t'trailers',\n\t'rawHeaders',\n\t'statusCode',\n\t'httpVersion',\n\t'httpVersionMinor',\n\t'httpVersionMajor',\n\t'rawTrailers',\n\t'statusMessage'\n];\n\nmodule.exports = (fromStream, toStream) => {\n\tconst fromProps = new Set(Object.keys(fromStream).concat(knownProps));\n\n\tfor (const prop of fromProps) {\n\t\t// Don't overwrite existing properties\n\t\tif (prop in toStream) {\n\t\t\tcontinue;\n\t\t}\n\n\t\ttoStream[prop] = typeof fromStream[prop] === 'function' ? fromStream[prop].bind(fromStream) : fromStream[prop];\n\t}\n};\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parse_url(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parse_url(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parse_url(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\nconst resolve_url = Url.resolve;\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\t\t\tfinalize();\n\t\t});\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tconst locationURL = location === null ? null : resolve_url(request.url, location);\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","'use strict';\n// TODO: Use the `URL` global when targeting Node.js 10\nconst URLParser = typeof URL === 'undefined' ? require('url').URL : URL;\n\n// https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs\nconst DATA_URL_DEFAULT_MIME_TYPE = 'text/plain';\nconst DATA_URL_DEFAULT_CHARSET = 'us-ascii';\n\nconst testParameter = (name, filters) => {\n\treturn filters.some(filter => filter instanceof RegExp ? filter.test(name) : filter === name);\n};\n\nconst normalizeDataURL = (urlString, {stripHash}) => {\n\tconst parts = urlString.match(/^data:(.*?),(.*?)(?:#(.*))?$/);\n\n\tif (!parts) {\n\t\tthrow new Error(`Invalid URL: ${urlString}`);\n\t}\n\n\tconst mediaType = parts[1].split(';');\n\tconst body = parts[2];\n\tconst hash = stripHash ? '' : parts[3];\n\n\tlet base64 = false;\n\n\tif (mediaType[mediaType.length - 1] === 'base64') {\n\t\tmediaType.pop();\n\t\tbase64 = true;\n\t}\n\n\t// Lowercase MIME type\n\tconst mimeType = (mediaType.shift() || '').toLowerCase();\n\tconst attributes = mediaType\n\t\t.map(attribute => {\n\t\t\tlet [key, value = ''] = attribute.split('=').map(string => string.trim());\n\n\t\t\t// Lowercase `charset`\n\t\t\tif (key === 'charset') {\n\t\t\t\tvalue = value.toLowerCase();\n\n\t\t\t\tif (value === DATA_URL_DEFAULT_CHARSET) {\n\t\t\t\t\treturn '';\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn `${key}${value ? `=${value}` : ''}`;\n\t\t})\n\t\t.filter(Boolean);\n\n\tconst normalizedMediaType = [\n\t\t...attributes\n\t];\n\n\tif (base64) {\n\t\tnormalizedMediaType.push('base64');\n\t}\n\n\tif (normalizedMediaType.length !== 0 || (mimeType && mimeType !== DATA_URL_DEFAULT_MIME_TYPE)) {\n\t\tnormalizedMediaType.unshift(mimeType);\n\t}\n\n\treturn `data:${normalizedMediaType.join(';')},${base64 ? body.trim() : body}${hash ? `#${hash}` : ''}`;\n};\n\nconst normalizeUrl = (urlString, options) => {\n\toptions = {\n\t\tdefaultProtocol: 'http:',\n\t\tnormalizeProtocol: true,\n\t\tforceHttp: false,\n\t\tforceHttps: false,\n\t\tstripAuthentication: true,\n\t\tstripHash: false,\n\t\tstripWWW: true,\n\t\tremoveQueryParameters: [/^utm_\\w+/i],\n\t\tremoveTrailingSlash: true,\n\t\tremoveDirectoryIndex: false,\n\t\tsortQueryParameters: true,\n\t\t...options\n\t};\n\n\t// TODO: Remove this at some point in the future\n\tif (Reflect.has(options, 'normalizeHttps')) {\n\t\tthrow new Error('options.normalizeHttps is renamed to options.forceHttp');\n\t}\n\n\tif (Reflect.has(options, 'normalizeHttp')) {\n\t\tthrow new Error('options.normalizeHttp is renamed to options.forceHttps');\n\t}\n\n\tif (Reflect.has(options, 'stripFragment')) {\n\t\tthrow new Error('options.stripFragment is renamed to options.stripHash');\n\t}\n\n\turlString = urlString.trim();\n\n\t// Data URL\n\tif (/^data:/i.test(urlString)) {\n\t\treturn normalizeDataURL(urlString, options);\n\t}\n\n\tconst hasRelativeProtocol = urlString.startsWith('//');\n\tconst isRelativeUrl = !hasRelativeProtocol && /^\\.*\\//.test(urlString);\n\n\t// Prepend protocol\n\tif (!isRelativeUrl) {\n\t\turlString = urlString.replace(/^(?!(?:\\w+:)?\\/\\/)|^\\/\\//, options.defaultProtocol);\n\t}\n\n\tconst urlObj = new URLParser(urlString);\n\n\tif (options.forceHttp && options.forceHttps) {\n\t\tthrow new Error('The `forceHttp` and `forceHttps` options cannot be used together');\n\t}\n\n\tif (options.forceHttp && urlObj.protocol === 'https:') {\n\t\turlObj.protocol = 'http:';\n\t}\n\n\tif (options.forceHttps && urlObj.protocol === 'http:') {\n\t\turlObj.protocol = 'https:';\n\t}\n\n\t// Remove auth\n\tif (options.stripAuthentication) {\n\t\turlObj.username = '';\n\t\turlObj.password = '';\n\t}\n\n\t// Remove hash\n\tif (options.stripHash) {\n\t\turlObj.hash = '';\n\t}\n\n\t// Remove duplicate slashes if not preceded by a protocol\n\tif (urlObj.pathname) {\n\t\t// TODO: Use the following instead when targeting Node.js 10\n\t\t// `urlObj.pathname = urlObj.pathname.replace(/(? {\n\t\t\tif (/^(?!\\/)/g.test(p1)) {\n\t\t\t\treturn `${p1}/`;\n\t\t\t}\n\n\t\t\treturn '/';\n\t\t});\n\t}\n\n\t// Decode URI octets\n\tif (urlObj.pathname) {\n\t\turlObj.pathname = decodeURI(urlObj.pathname);\n\t}\n\n\t// Remove directory index\n\tif (options.removeDirectoryIndex === true) {\n\t\toptions.removeDirectoryIndex = [/^index\\.[a-z]+$/];\n\t}\n\n\tif (Array.isArray(options.removeDirectoryIndex) && options.removeDirectoryIndex.length > 0) {\n\t\tlet pathComponents = urlObj.pathname.split('/');\n\t\tconst lastComponent = pathComponents[pathComponents.length - 1];\n\n\t\tif (testParameter(lastComponent, options.removeDirectoryIndex)) {\n\t\t\tpathComponents = pathComponents.slice(0, pathComponents.length - 1);\n\t\t\turlObj.pathname = pathComponents.slice(1).join('/') + '/';\n\t\t}\n\t}\n\n\tif (urlObj.hostname) {\n\t\t// Remove trailing dot\n\t\turlObj.hostname = urlObj.hostname.replace(/\\.$/, '');\n\n\t\t// Remove `www.`\n\t\tif (options.stripWWW && /^www\\.([a-z\\-\\d]{2,63})\\.([a-z.]{2,5})$/.test(urlObj.hostname)) {\n\t\t\t// Each label should be max 63 at length (min: 2).\n\t\t\t// The extension should be max 5 at length (min: 2).\n\t\t\t// Source: https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names\n\t\t\turlObj.hostname = urlObj.hostname.replace(/^www\\./, '');\n\t\t}\n\t}\n\n\t// Remove query unwanted parameters\n\tif (Array.isArray(options.removeQueryParameters)) {\n\t\tfor (const key of [...urlObj.searchParams.keys()]) {\n\t\t\tif (testParameter(key, options.removeQueryParameters)) {\n\t\t\t\turlObj.searchParams.delete(key);\n\t\t\t}\n\t\t}\n\t}\n\n\t// Sort query parameters\n\tif (options.sortQueryParameters) {\n\t\turlObj.searchParams.sort();\n\t}\n\n\tif (options.removeTrailingSlash) {\n\t\turlObj.pathname = urlObj.pathname.replace(/\\/$/, '');\n\t}\n\n\t// Take advantage of many of the Node `url` normalizations\n\turlString = urlObj.toString();\n\n\t// Remove ending `/`\n\tif ((options.removeTrailingSlash || urlObj.pathname === '/') && urlObj.hash === '') {\n\t\turlString = urlString.replace(/\\/$/, '');\n\t}\n\n\t// Restore relative protocol, if applicable\n\tif (hasRelativeProtocol && !options.normalizeProtocol) {\n\t\turlString = urlString.replace(/^http:\\/\\//, '//');\n\t}\n\n\t// Remove http/https\n\tif (options.stripProtocol) {\n\t\turlString = urlString.replace(/^(?:https?:)?\\/\\//, '');\n\t}\n\n\treturn urlString;\n};\n\nmodule.exports = normalizeUrl;\n// TODO: Remove this for the next major release\nmodule.exports.default = normalizeUrl;\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","'use strict';\n\nclass CancelError extends Error {\n\tconstructor(reason) {\n\t\tsuper(reason || 'Promise was canceled');\n\t\tthis.name = 'CancelError';\n\t}\n\n\tget isCanceled() {\n\t\treturn true;\n\t}\n}\n\nclass PCancelable {\n\tstatic fn(userFn) {\n\t\treturn (...arguments_) => {\n\t\t\treturn new PCancelable((resolve, reject, onCancel) => {\n\t\t\t\targuments_.push(onCancel);\n\t\t\t\t// eslint-disable-next-line promise/prefer-await-to-then\n\t\t\t\tuserFn(...arguments_).then(resolve, reject);\n\t\t\t});\n\t\t};\n\t}\n\n\tconstructor(executor) {\n\t\tthis._cancelHandlers = [];\n\t\tthis._isPending = true;\n\t\tthis._isCanceled = false;\n\t\tthis._rejectOnCancel = true;\n\n\t\tthis._promise = new Promise((resolve, reject) => {\n\t\t\tthis._reject = reject;\n\n\t\t\tconst onResolve = value => {\n\t\t\t\tthis._isPending = false;\n\t\t\t\tresolve(value);\n\t\t\t};\n\n\t\t\tconst onReject = error => {\n\t\t\t\tthis._isPending = false;\n\t\t\t\treject(error);\n\t\t\t};\n\n\t\t\tconst onCancel = handler => {\n\t\t\t\tif (!this._isPending) {\n\t\t\t\t\tthrow new Error('The `onCancel` handler was attached after the promise settled.');\n\t\t\t\t}\n\n\t\t\t\tthis._cancelHandlers.push(handler);\n\t\t\t};\n\n\t\t\tObject.defineProperties(onCancel, {\n\t\t\t\tshouldReject: {\n\t\t\t\t\tget: () => this._rejectOnCancel,\n\t\t\t\t\tset: boolean => {\n\t\t\t\t\t\tthis._rejectOnCancel = boolean;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t});\n\n\t\t\treturn executor(onResolve, onReject, onCancel);\n\t\t});\n\t}\n\n\tthen(onFulfilled, onRejected) {\n\t\t// eslint-disable-next-line promise/prefer-await-to-then\n\t\treturn this._promise.then(onFulfilled, onRejected);\n\t}\n\n\tcatch(onRejected) {\n\t\treturn this._promise.catch(onRejected);\n\t}\n\n\tfinally(onFinally) {\n\t\treturn this._promise.finally(onFinally);\n\t}\n\n\tcancel(reason) {\n\t\tif (!this._isPending || this._isCanceled) {\n\t\t\treturn;\n\t\t}\n\n\t\tif (this._cancelHandlers.length > 0) {\n\t\t\ttry {\n\t\t\t\tfor (const handler of this._cancelHandlers) {\n\t\t\t\t\thandler();\n\t\t\t\t}\n\t\t\t} catch (error) {\n\t\t\t\tthis._reject(error);\n\t\t\t}\n\t\t}\n\n\t\tthis._isCanceled = true;\n\t\tif (this._rejectOnCancel) {\n\t\t\tthis._reject(new CancelError(reason));\n\t\t}\n\t}\n\n\tget isCanceled() {\n\t\treturn this._isCanceled;\n\t}\n}\n\nObject.setPrototypeOf(PCancelable.prototype, Promise.prototype);\n\nmodule.exports = PCancelable;\nmodule.exports.CancelError = CancelError;\n","'use strict';\n\nmodule.exports = require('./lib/picomatch');\n","'use strict';\n\nconst path = require('path');\nconst WIN_SLASH = '\\\\\\\\/';\nconst WIN_NO_SLASH = `[^${WIN_SLASH}]`;\n\n/**\n * Posix glob regex\n */\n\nconst DOT_LITERAL = '\\\\.';\nconst PLUS_LITERAL = '\\\\+';\nconst QMARK_LITERAL = '\\\\?';\nconst SLASH_LITERAL = '\\\\/';\nconst ONE_CHAR = '(?=.)';\nconst QMARK = '[^/]';\nconst END_ANCHOR = `(?:${SLASH_LITERAL}|$)`;\nconst START_ANCHOR = `(?:^|${SLASH_LITERAL})`;\nconst DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`;\nconst NO_DOT = `(?!${DOT_LITERAL})`;\nconst NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`;\nconst NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`;\nconst NO_DOTS_SLASH = `(?!${DOTS_SLASH})`;\nconst QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`;\nconst STAR = `${QMARK}*?`;\n\nconst POSIX_CHARS = {\n DOT_LITERAL,\n PLUS_LITERAL,\n QMARK_LITERAL,\n SLASH_LITERAL,\n ONE_CHAR,\n QMARK,\n END_ANCHOR,\n DOTS_SLASH,\n NO_DOT,\n NO_DOTS,\n NO_DOT_SLASH,\n NO_DOTS_SLASH,\n QMARK_NO_DOT,\n STAR,\n START_ANCHOR\n};\n\n/**\n * Windows glob regex\n */\n\nconst WINDOWS_CHARS = {\n ...POSIX_CHARS,\n\n SLASH_LITERAL: `[${WIN_SLASH}]`,\n QMARK: WIN_NO_SLASH,\n STAR: `${WIN_NO_SLASH}*?`,\n DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`,\n NO_DOT: `(?!${DOT_LITERAL})`,\n NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,\n NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`,\n NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,\n QMARK_NO_DOT: `[^.${WIN_SLASH}]`,\n START_ANCHOR: `(?:^|[${WIN_SLASH}])`,\n END_ANCHOR: `(?:[${WIN_SLASH}]|$)`\n};\n\n/**\n * POSIX Bracket Regex\n */\n\nconst POSIX_REGEX_SOURCE = {\n alnum: 'a-zA-Z0-9',\n alpha: 'a-zA-Z',\n ascii: '\\\\x00-\\\\x7F',\n blank: ' \\\\t',\n cntrl: '\\\\x00-\\\\x1F\\\\x7F',\n digit: '0-9',\n graph: '\\\\x21-\\\\x7E',\n lower: 'a-z',\n print: '\\\\x20-\\\\x7E ',\n punct: '\\\\-!\"#$%&\\'()\\\\*+,./:;<=>?@[\\\\]^_`{|}~',\n space: ' \\\\t\\\\r\\\\n\\\\v\\\\f',\n upper: 'A-Z',\n word: 'A-Za-z0-9_',\n xdigit: 'A-Fa-f0-9'\n};\n\nmodule.exports = {\n MAX_LENGTH: 1024 * 64,\n POSIX_REGEX_SOURCE,\n\n // regular expressions\n REGEX_BACKSLASH: /\\\\(?![*+?^${}(|)[\\]])/g,\n REGEX_NON_SPECIAL_CHARS: /^[^@![\\].,$*+?^{}()|\\\\/]+/,\n REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\\]]/,\n REGEX_SPECIAL_CHARS_BACKREF: /(\\\\?)((\\W)(\\3*))/g,\n REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\\]])/g,\n REGEX_REMOVE_BACKSLASH: /(?:\\[.*?[^\\\\]\\]|\\\\(?=.))/g,\n\n // Replace globs with equivalent patterns to reduce parsing time.\n REPLACEMENTS: {\n '***': '*',\n '**/**': '**',\n '**/**/**': '**'\n },\n\n // Digits\n CHAR_0: 48, /* 0 */\n CHAR_9: 57, /* 9 */\n\n // Alphabet chars.\n CHAR_UPPERCASE_A: 65, /* A */\n CHAR_LOWERCASE_A: 97, /* a */\n CHAR_UPPERCASE_Z: 90, /* Z */\n CHAR_LOWERCASE_Z: 122, /* z */\n\n CHAR_LEFT_PARENTHESES: 40, /* ( */\n CHAR_RIGHT_PARENTHESES: 41, /* ) */\n\n CHAR_ASTERISK: 42, /* * */\n\n // Non-alphabetic chars.\n CHAR_AMPERSAND: 38, /* & */\n CHAR_AT: 64, /* @ */\n CHAR_BACKWARD_SLASH: 92, /* \\ */\n CHAR_CARRIAGE_RETURN: 13, /* \\r */\n CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */\n CHAR_COLON: 58, /* : */\n CHAR_COMMA: 44, /* , */\n CHAR_DOT: 46, /* . */\n CHAR_DOUBLE_QUOTE: 34, /* \" */\n CHAR_EQUAL: 61, /* = */\n CHAR_EXCLAMATION_MARK: 33, /* ! */\n CHAR_FORM_FEED: 12, /* \\f */\n CHAR_FORWARD_SLASH: 47, /* / */\n CHAR_GRAVE_ACCENT: 96, /* ` */\n CHAR_HASH: 35, /* # */\n CHAR_HYPHEN_MINUS: 45, /* - */\n CHAR_LEFT_ANGLE_BRACKET: 60, /* < */\n CHAR_LEFT_CURLY_BRACE: 123, /* { */\n CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */\n CHAR_LINE_FEED: 10, /* \\n */\n CHAR_NO_BREAK_SPACE: 160, /* \\u00A0 */\n CHAR_PERCENT: 37, /* % */\n CHAR_PLUS: 43, /* + */\n CHAR_QUESTION_MARK: 63, /* ? */\n CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */\n CHAR_RIGHT_CURLY_BRACE: 125, /* } */\n CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */\n CHAR_SEMICOLON: 59, /* ; */\n CHAR_SINGLE_QUOTE: 39, /* ' */\n CHAR_SPACE: 32, /* */\n CHAR_TAB: 9, /* \\t */\n CHAR_UNDERSCORE: 95, /* _ */\n CHAR_VERTICAL_LINE: 124, /* | */\n CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \\uFEFF */\n\n SEP: path.sep,\n\n /**\n * Create EXTGLOB_CHARS\n */\n\n extglobChars(chars) {\n return {\n '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` },\n '?': { type: 'qmark', open: '(?:', close: ')?' },\n '+': { type: 'plus', open: '(?:', close: ')+' },\n '*': { type: 'star', open: '(?:', close: ')*' },\n '@': { type: 'at', open: '(?:', close: ')' }\n };\n },\n\n /**\n * Create GLOB_CHARS\n */\n\n globChars(win32) {\n return win32 === true ? WINDOWS_CHARS : POSIX_CHARS;\n }\n};\n","'use strict';\n\nconst constants = require('./constants');\nconst utils = require('./utils');\n\n/**\n * Constants\n */\n\nconst {\n MAX_LENGTH,\n POSIX_REGEX_SOURCE,\n REGEX_NON_SPECIAL_CHARS,\n REGEX_SPECIAL_CHARS_BACKREF,\n REPLACEMENTS\n} = constants;\n\n/**\n * Helpers\n */\n\nconst expandRange = (args, options) => {\n if (typeof options.expandRange === 'function') {\n return options.expandRange(...args, options);\n }\n\n args.sort();\n const value = `[${args.join('-')}]`;\n\n try {\n /* eslint-disable-next-line no-new */\n new RegExp(value);\n } catch (ex) {\n return args.map(v => utils.escapeRegex(v)).join('..');\n }\n\n return value;\n};\n\n/**\n * Create the message for a syntax error\n */\n\nconst syntaxError = (type, char) => {\n return `Missing ${type}: \"${char}\" - use \"\\\\\\\\${char}\" to match literal characters`;\n};\n\n/**\n * Parse the given input string.\n * @param {String} input\n * @param {Object} options\n * @return {Object}\n */\n\nconst parse = (input, options) => {\n if (typeof input !== 'string') {\n throw new TypeError('Expected a string');\n }\n\n input = REPLACEMENTS[input] || input;\n\n const opts = { ...options };\n const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;\n\n let len = input.length;\n if (len > max) {\n throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);\n }\n\n const bos = { type: 'bos', value: '', output: opts.prepend || '' };\n const tokens = [bos];\n\n const capture = opts.capture ? '' : '?:';\n const win32 = utils.isWindows(options);\n\n // create constants based on platform, for windows or posix\n const PLATFORM_CHARS = constants.globChars(win32);\n const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS);\n\n const {\n DOT_LITERAL,\n PLUS_LITERAL,\n SLASH_LITERAL,\n ONE_CHAR,\n DOTS_SLASH,\n NO_DOT,\n NO_DOT_SLASH,\n NO_DOTS_SLASH,\n QMARK,\n QMARK_NO_DOT,\n STAR,\n START_ANCHOR\n } = PLATFORM_CHARS;\n\n const globstar = (opts) => {\n return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;\n };\n\n const nodot = opts.dot ? '' : NO_DOT;\n const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT;\n let star = opts.bash === true ? globstar(opts) : STAR;\n\n if (opts.capture) {\n star = `(${star})`;\n }\n\n // minimatch options support\n if (typeof opts.noext === 'boolean') {\n opts.noextglob = opts.noext;\n }\n\n const state = {\n input,\n index: -1,\n start: 0,\n dot: opts.dot === true,\n consumed: '',\n output: '',\n prefix: '',\n backtrack: false,\n negated: false,\n brackets: 0,\n braces: 0,\n parens: 0,\n quotes: 0,\n globstar: false,\n tokens\n };\n\n input = utils.removePrefix(input, state);\n len = input.length;\n\n const extglobs = [];\n const braces = [];\n const stack = [];\n let prev = bos;\n let value;\n\n /**\n * Tokenizing helpers\n */\n\n const eos = () => state.index === len - 1;\n const peek = state.peek = (n = 1) => input[state.index + n];\n const advance = state.advance = () => input[++state.index];\n const remaining = () => input.slice(state.index + 1);\n const consume = (value = '', num = 0) => {\n state.consumed += value;\n state.index += num;\n };\n const append = token => {\n state.output += token.output != null ? token.output : token.value;\n consume(token.value);\n };\n\n const negate = () => {\n let count = 1;\n\n while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) {\n advance();\n state.start++;\n count++;\n }\n\n if (count % 2 === 0) {\n return false;\n }\n\n state.negated = true;\n state.start++;\n return true;\n };\n\n const increment = type => {\n state[type]++;\n stack.push(type);\n };\n\n const decrement = type => {\n state[type]--;\n stack.pop();\n };\n\n /**\n * Push tokens onto the tokens array. This helper speeds up\n * tokenizing by 1) helping us avoid backtracking as much as possible,\n * and 2) helping us avoid creating extra tokens when consecutive\n * characters are plain text. This improves performance and simplifies\n * lookbehinds.\n */\n\n const push = tok => {\n if (prev.type === 'globstar') {\n const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace');\n const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren'));\n\n if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) {\n state.output = state.output.slice(0, -prev.output.length);\n prev.type = 'star';\n prev.value = '*';\n prev.output = star;\n state.output += prev.output;\n }\n }\n\n if (extglobs.length && tok.type !== 'paren' && !EXTGLOB_CHARS[tok.value]) {\n extglobs[extglobs.length - 1].inner += tok.value;\n }\n\n if (tok.value || tok.output) append(tok);\n if (prev && prev.type === 'text' && tok.type === 'text') {\n prev.value += tok.value;\n prev.output = (prev.output || '') + tok.value;\n return;\n }\n\n tok.prev = prev;\n tokens.push(tok);\n prev = tok;\n };\n\n const extglobOpen = (type, value) => {\n const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' };\n\n token.prev = prev;\n token.parens = state.parens;\n token.output = state.output;\n const output = (opts.capture ? '(' : '') + token.open;\n\n increment('parens');\n push({ type, value, output: state.output ? '' : ONE_CHAR });\n push({ type: 'paren', extglob: true, value: advance(), output });\n extglobs.push(token);\n };\n\n const extglobClose = token => {\n let output = token.close + (opts.capture ? ')' : '');\n\n if (token.type === 'negate') {\n let extglobStar = star;\n\n if (token.inner && token.inner.length > 1 && token.inner.includes('/')) {\n extglobStar = globstar(opts);\n }\n\n if (extglobStar !== star || eos() || /^\\)+$/.test(remaining())) {\n output = token.close = `)$))${extglobStar}`;\n }\n\n if (token.prev.type === 'bos' && eos()) {\n state.negatedExtglob = true;\n }\n }\n\n push({ type: 'paren', extglob: true, value, output });\n decrement('parens');\n };\n\n /**\n * Fast paths\n */\n\n if (opts.fastpaths !== false && !/(^[*!]|[/()[\\]{}\"])/.test(input)) {\n let backslashes = false;\n\n let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => {\n if (first === '\\\\') {\n backslashes = true;\n return m;\n }\n\n if (first === '?') {\n if (esc) {\n return esc + first + (rest ? QMARK.repeat(rest.length) : '');\n }\n if (index === 0) {\n return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : '');\n }\n return QMARK.repeat(chars.length);\n }\n\n if (first === '.') {\n return DOT_LITERAL.repeat(chars.length);\n }\n\n if (first === '*') {\n if (esc) {\n return esc + first + (rest ? star : '');\n }\n return star;\n }\n return esc ? m : `\\\\${m}`;\n });\n\n if (backslashes === true) {\n if (opts.unescape === true) {\n output = output.replace(/\\\\/g, '');\n } else {\n output = output.replace(/\\\\+/g, m => {\n return m.length % 2 === 0 ? '\\\\\\\\' : (m ? '\\\\' : '');\n });\n }\n }\n\n if (output === input && opts.contains === true) {\n state.output = input;\n return state;\n }\n\n state.output = utils.wrapOutput(output, state, options);\n return state;\n }\n\n /**\n * Tokenize input until we reach end-of-string\n */\n\n while (!eos()) {\n value = advance();\n\n if (value === '\\u0000') {\n continue;\n }\n\n /**\n * Escaped characters\n */\n\n if (value === '\\\\') {\n const next = peek();\n\n if (next === '/' && opts.bash !== true) {\n continue;\n }\n\n if (next === '.' || next === ';') {\n continue;\n }\n\n if (!next) {\n value += '\\\\';\n push({ type: 'text', value });\n continue;\n }\n\n // collapse slashes to reduce potential for exploits\n const match = /^\\\\+/.exec(remaining());\n let slashes = 0;\n\n if (match && match[0].length > 2) {\n slashes = match[0].length;\n state.index += slashes;\n if (slashes % 2 !== 0) {\n value += '\\\\';\n }\n }\n\n if (opts.unescape === true) {\n value = advance() || '';\n } else {\n value += advance() || '';\n }\n\n if (state.brackets === 0) {\n push({ type: 'text', value });\n continue;\n }\n }\n\n /**\n * If we're inside a regex character class, continue\n * until we reach the closing bracket.\n */\n\n if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) {\n if (opts.posix !== false && value === ':') {\n const inner = prev.value.slice(1);\n if (inner.includes('[')) {\n prev.posix = true;\n\n if (inner.includes(':')) {\n const idx = prev.value.lastIndexOf('[');\n const pre = prev.value.slice(0, idx);\n const rest = prev.value.slice(idx + 2);\n const posix = POSIX_REGEX_SOURCE[rest];\n if (posix) {\n prev.value = pre + posix;\n state.backtrack = true;\n advance();\n\n if (!bos.output && tokens.indexOf(prev) === 1) {\n bos.output = ONE_CHAR;\n }\n continue;\n }\n }\n }\n }\n\n if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) {\n value = `\\\\${value}`;\n }\n\n if (value === ']' && (prev.value === '[' || prev.value === '[^')) {\n value = `\\\\${value}`;\n }\n\n if (opts.posix === true && value === '!' && prev.value === '[') {\n value = '^';\n }\n\n prev.value += value;\n append({ value });\n continue;\n }\n\n /**\n * If we're inside a quoted string, continue\n * until we reach the closing double quote.\n */\n\n if (state.quotes === 1 && value !== '\"') {\n value = utils.escapeRegex(value);\n prev.value += value;\n append({ value });\n continue;\n }\n\n /**\n * Double quotes\n */\n\n if (value === '\"') {\n state.quotes = state.quotes === 1 ? 0 : 1;\n if (opts.keepQuotes === true) {\n push({ type: 'text', value });\n }\n continue;\n }\n\n /**\n * Parentheses\n */\n\n if (value === '(') {\n increment('parens');\n push({ type: 'paren', value });\n continue;\n }\n\n if (value === ')') {\n if (state.parens === 0 && opts.strictBrackets === true) {\n throw new SyntaxError(syntaxError('opening', '('));\n }\n\n const extglob = extglobs[extglobs.length - 1];\n if (extglob && state.parens === extglob.parens + 1) {\n extglobClose(extglobs.pop());\n continue;\n }\n\n push({ type: 'paren', value, output: state.parens ? ')' : '\\\\)' });\n decrement('parens');\n continue;\n }\n\n /**\n * Square brackets\n */\n\n if (value === '[') {\n if (opts.nobracket === true || !remaining().includes(']')) {\n if (opts.nobracket !== true && opts.strictBrackets === true) {\n throw new SyntaxError(syntaxError('closing', ']'));\n }\n\n value = `\\\\${value}`;\n } else {\n increment('brackets');\n }\n\n push({ type: 'bracket', value });\n continue;\n }\n\n if (value === ']') {\n if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) {\n push({ type: 'text', value, output: `\\\\${value}` });\n continue;\n }\n\n if (state.brackets === 0) {\n if (opts.strictBrackets === true) {\n throw new SyntaxError(syntaxError('opening', '['));\n }\n\n push({ type: 'text', value, output: `\\\\${value}` });\n continue;\n }\n\n decrement('brackets');\n\n const prevValue = prev.value.slice(1);\n if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) {\n value = `/${value}`;\n }\n\n prev.value += value;\n append({ value });\n\n // when literal brackets are explicitly disabled\n // assume we should match with a regex character class\n if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) {\n continue;\n }\n\n const escaped = utils.escapeRegex(prev.value);\n state.output = state.output.slice(0, -prev.value.length);\n\n // when literal brackets are explicitly enabled\n // assume we should escape the brackets to match literal characters\n if (opts.literalBrackets === true) {\n state.output += escaped;\n prev.value = escaped;\n continue;\n }\n\n // when the user specifies nothing, try to match both\n prev.value = `(${capture}${escaped}|${prev.value})`;\n state.output += prev.value;\n continue;\n }\n\n /**\n * Braces\n */\n\n if (value === '{' && opts.nobrace !== true) {\n increment('braces');\n\n const open = {\n type: 'brace',\n value,\n output: '(',\n outputIndex: state.output.length,\n tokensIndex: state.tokens.length\n };\n\n braces.push(open);\n push(open);\n continue;\n }\n\n if (value === '}') {\n const brace = braces[braces.length - 1];\n\n if (opts.nobrace === true || !brace) {\n push({ type: 'text', value, output: value });\n continue;\n }\n\n let output = ')';\n\n if (brace.dots === true) {\n const arr = tokens.slice();\n const range = [];\n\n for (let i = arr.length - 1; i >= 0; i--) {\n tokens.pop();\n if (arr[i].type === 'brace') {\n break;\n }\n if (arr[i].type !== 'dots') {\n range.unshift(arr[i].value);\n }\n }\n\n output = expandRange(range, opts);\n state.backtrack = true;\n }\n\n if (brace.comma !== true && brace.dots !== true) {\n const out = state.output.slice(0, brace.outputIndex);\n const toks = state.tokens.slice(brace.tokensIndex);\n brace.value = brace.output = '\\\\{';\n value = output = '\\\\}';\n state.output = out;\n for (const t of toks) {\n state.output += (t.output || t.value);\n }\n }\n\n push({ type: 'brace', value, output });\n decrement('braces');\n braces.pop();\n continue;\n }\n\n /**\n * Pipes\n */\n\n if (value === '|') {\n if (extglobs.length > 0) {\n extglobs[extglobs.length - 1].conditions++;\n }\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Commas\n */\n\n if (value === ',') {\n let output = value;\n\n const brace = braces[braces.length - 1];\n if (brace && stack[stack.length - 1] === 'braces') {\n brace.comma = true;\n output = '|';\n }\n\n push({ type: 'comma', value, output });\n continue;\n }\n\n /**\n * Slashes\n */\n\n if (value === '/') {\n // if the beginning of the glob is \"./\", advance the start\n // to the current index, and don't add the \"./\" characters\n // to the state. This greatly simplifies lookbehinds when\n // checking for BOS characters like \"!\" and \".\" (not \"./\")\n if (prev.type === 'dot' && state.index === state.start + 1) {\n state.start = state.index + 1;\n state.consumed = '';\n state.output = '';\n tokens.pop();\n prev = bos; // reset \"prev\" to the first token\n continue;\n }\n\n push({ type: 'slash', value, output: SLASH_LITERAL });\n continue;\n }\n\n /**\n * Dots\n */\n\n if (value === '.') {\n if (state.braces > 0 && prev.type === 'dot') {\n if (prev.value === '.') prev.output = DOT_LITERAL;\n const brace = braces[braces.length - 1];\n prev.type = 'dots';\n prev.output += value;\n prev.value += value;\n brace.dots = true;\n continue;\n }\n\n if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') {\n push({ type: 'text', value, output: DOT_LITERAL });\n continue;\n }\n\n push({ type: 'dot', value, output: DOT_LITERAL });\n continue;\n }\n\n /**\n * Question marks\n */\n\n if (value === '?') {\n const isGroup = prev && prev.value === '(';\n if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {\n extglobOpen('qmark', value);\n continue;\n }\n\n if (prev && prev.type === 'paren') {\n const next = peek();\n let output = value;\n\n if (next === '<' && !utils.supportsLookbehinds()) {\n throw new Error('Node.js v10 or higher is required for regex lookbehinds');\n }\n\n if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\\w+>)/.test(remaining()))) {\n output = `\\\\${value}`;\n }\n\n push({ type: 'text', value, output });\n continue;\n }\n\n if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) {\n push({ type: 'qmark', value, output: QMARK_NO_DOT });\n continue;\n }\n\n push({ type: 'qmark', value, output: QMARK });\n continue;\n }\n\n /**\n * Exclamation\n */\n\n if (value === '!') {\n if (opts.noextglob !== true && peek() === '(') {\n if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) {\n extglobOpen('negate', value);\n continue;\n }\n }\n\n if (opts.nonegate !== true && state.index === 0) {\n negate();\n continue;\n }\n }\n\n /**\n * Plus\n */\n\n if (value === '+') {\n if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {\n extglobOpen('plus', value);\n continue;\n }\n\n if ((prev && prev.value === '(') || opts.regex === false) {\n push({ type: 'plus', value, output: PLUS_LITERAL });\n continue;\n }\n\n if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) {\n push({ type: 'plus', value });\n continue;\n }\n\n push({ type: 'plus', value: PLUS_LITERAL });\n continue;\n }\n\n /**\n * Plain text\n */\n\n if (value === '@') {\n if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {\n push({ type: 'at', extglob: true, value, output: '' });\n continue;\n }\n\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Plain text\n */\n\n if (value !== '*') {\n if (value === '$' || value === '^') {\n value = `\\\\${value}`;\n }\n\n const match = REGEX_NON_SPECIAL_CHARS.exec(remaining());\n if (match) {\n value += match[0];\n state.index += match[0].length;\n }\n\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Stars\n */\n\n if (prev && (prev.type === 'globstar' || prev.star === true)) {\n prev.type = 'star';\n prev.star = true;\n prev.value += value;\n prev.output = star;\n state.backtrack = true;\n state.globstar = true;\n consume(value);\n continue;\n }\n\n let rest = remaining();\n if (opts.noextglob !== true && /^\\([^?]/.test(rest)) {\n extglobOpen('star', value);\n continue;\n }\n\n if (prev.type === 'star') {\n if (opts.noglobstar === true) {\n consume(value);\n continue;\n }\n\n const prior = prev.prev;\n const before = prior.prev;\n const isStart = prior.type === 'slash' || prior.type === 'bos';\n const afterStar = before && (before.type === 'star' || before.type === 'globstar');\n\n if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) {\n push({ type: 'star', value, output: '' });\n continue;\n }\n\n const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace');\n const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren');\n if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) {\n push({ type: 'star', value, output: '' });\n continue;\n }\n\n // strip consecutive `/**/`\n while (rest.slice(0, 3) === '/**') {\n const after = input[state.index + 4];\n if (after && after !== '/') {\n break;\n }\n rest = rest.slice(3);\n consume('/**', 3);\n }\n\n if (prior.type === 'bos' && eos()) {\n prev.type = 'globstar';\n prev.value += value;\n prev.output = globstar(opts);\n state.output = prev.output;\n state.globstar = true;\n consume(value);\n continue;\n }\n\n if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) {\n state.output = state.output.slice(0, -(prior.output + prev.output).length);\n prior.output = `(?:${prior.output}`;\n\n prev.type = 'globstar';\n prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)');\n prev.value += value;\n state.globstar = true;\n state.output += prior.output + prev.output;\n consume(value);\n continue;\n }\n\n if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') {\n const end = rest[1] !== void 0 ? '|$' : '';\n\n state.output = state.output.slice(0, -(prior.output + prev.output).length);\n prior.output = `(?:${prior.output}`;\n\n prev.type = 'globstar';\n prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`;\n prev.value += value;\n\n state.output += prior.output + prev.output;\n state.globstar = true;\n\n consume(value + advance());\n\n push({ type: 'slash', value: '/', output: '' });\n continue;\n }\n\n if (prior.type === 'bos' && rest[0] === '/') {\n prev.type = 'globstar';\n prev.value += value;\n prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`;\n state.output = prev.output;\n state.globstar = true;\n consume(value + advance());\n push({ type: 'slash', value: '/', output: '' });\n continue;\n }\n\n // remove single star from output\n state.output = state.output.slice(0, -prev.output.length);\n\n // reset previous token to globstar\n prev.type = 'globstar';\n prev.output = globstar(opts);\n prev.value += value;\n\n // reset output with globstar\n state.output += prev.output;\n state.globstar = true;\n consume(value);\n continue;\n }\n\n const token = { type: 'star', value, output: star };\n\n if (opts.bash === true) {\n token.output = '.*?';\n if (prev.type === 'bos' || prev.type === 'slash') {\n token.output = nodot + token.output;\n }\n push(token);\n continue;\n }\n\n if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) {\n token.output = value;\n push(token);\n continue;\n }\n\n if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') {\n if (prev.type === 'dot') {\n state.output += NO_DOT_SLASH;\n prev.output += NO_DOT_SLASH;\n\n } else if (opts.dot === true) {\n state.output += NO_DOTS_SLASH;\n prev.output += NO_DOTS_SLASH;\n\n } else {\n state.output += nodot;\n prev.output += nodot;\n }\n\n if (peek() !== '*') {\n state.output += ONE_CHAR;\n prev.output += ONE_CHAR;\n }\n }\n\n push(token);\n }\n\n while (state.brackets > 0) {\n if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']'));\n state.output = utils.escapeLast(state.output, '[');\n decrement('brackets');\n }\n\n while (state.parens > 0) {\n if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')'));\n state.output = utils.escapeLast(state.output, '(');\n decrement('parens');\n }\n\n while (state.braces > 0) {\n if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}'));\n state.output = utils.escapeLast(state.output, '{');\n decrement('braces');\n }\n\n if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) {\n push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` });\n }\n\n // rebuild the output if we had to backtrack at any point\n if (state.backtrack === true) {\n state.output = '';\n\n for (const token of state.tokens) {\n state.output += token.output != null ? token.output : token.value;\n\n if (token.suffix) {\n state.output += token.suffix;\n }\n }\n }\n\n return state;\n};\n\n/**\n * Fast paths for creating regular expressions for common glob patterns.\n * This can significantly speed up processing and has very little downside\n * impact when none of the fast paths match.\n */\n\nparse.fastpaths = (input, options) => {\n const opts = { ...options };\n const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;\n const len = input.length;\n if (len > max) {\n throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);\n }\n\n input = REPLACEMENTS[input] || input;\n const win32 = utils.isWindows(options);\n\n // create constants based on platform, for windows or posix\n const {\n DOT_LITERAL,\n SLASH_LITERAL,\n ONE_CHAR,\n DOTS_SLASH,\n NO_DOT,\n NO_DOTS,\n NO_DOTS_SLASH,\n STAR,\n START_ANCHOR\n } = constants.globChars(win32);\n\n const nodot = opts.dot ? NO_DOTS : NO_DOT;\n const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT;\n const capture = opts.capture ? '' : '?:';\n const state = { negated: false, prefix: '' };\n let star = opts.bash === true ? '.*?' : STAR;\n\n if (opts.capture) {\n star = `(${star})`;\n }\n\n const globstar = (opts) => {\n if (opts.noglobstar === true) return star;\n return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;\n };\n\n const create = str => {\n switch (str) {\n case '*':\n return `${nodot}${ONE_CHAR}${star}`;\n\n case '.*':\n return `${DOT_LITERAL}${ONE_CHAR}${star}`;\n\n case '*.*':\n return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;\n\n case '*/*':\n return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`;\n\n case '**':\n return nodot + globstar(opts);\n\n case '**/*':\n return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`;\n\n case '**/*.*':\n return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;\n\n case '**/.*':\n return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`;\n\n default: {\n const match = /^(.*?)\\.(\\w+)$/.exec(str);\n if (!match) return;\n\n const source = create(match[1]);\n if (!source) return;\n\n return source + DOT_LITERAL + match[2];\n }\n }\n };\n\n const output = utils.removePrefix(input, state);\n let source = create(output);\n\n if (source && opts.strictSlashes !== true) {\n source += `${SLASH_LITERAL}?`;\n }\n\n return source;\n};\n\nmodule.exports = parse;\n","'use strict';\n\nconst path = require('path');\nconst scan = require('./scan');\nconst parse = require('./parse');\nconst utils = require('./utils');\nconst constants = require('./constants');\nconst isObject = val => val && typeof val === 'object' && !Array.isArray(val);\n\n/**\n * Creates a matcher function from one or more glob patterns. The\n * returned function takes a string to match as its first argument,\n * and returns true if the string is a match. The returned matcher\n * function also takes a boolean as the second argument that, when true,\n * returns an object with additional information.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch(glob[, options]);\n *\n * const isMatch = picomatch('*.!(*a)');\n * console.log(isMatch('a.a')); //=> false\n * console.log(isMatch('a.b')); //=> true\n * ```\n * @name picomatch\n * @param {String|Array} `globs` One or more glob patterns.\n * @param {Object=} `options`\n * @return {Function=} Returns a matcher function.\n * @api public\n */\n\nconst picomatch = (glob, options, returnState = false) => {\n if (Array.isArray(glob)) {\n const fns = glob.map(input => picomatch(input, options, returnState));\n const arrayMatcher = str => {\n for (const isMatch of fns) {\n const state = isMatch(str);\n if (state) return state;\n }\n return false;\n };\n return arrayMatcher;\n }\n\n const isState = isObject(glob) && glob.tokens && glob.input;\n\n if (glob === '' || (typeof glob !== 'string' && !isState)) {\n throw new TypeError('Expected pattern to be a non-empty string');\n }\n\n const opts = options || {};\n const posix = utils.isWindows(options);\n const regex = isState\n ? picomatch.compileRe(glob, options)\n : picomatch.makeRe(glob, options, false, true);\n\n const state = regex.state;\n delete regex.state;\n\n let isIgnored = () => false;\n if (opts.ignore) {\n const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null };\n isIgnored = picomatch(opts.ignore, ignoreOpts, returnState);\n }\n\n const matcher = (input, returnObject = false) => {\n const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix });\n const result = { glob, state, regex, posix, input, output, match, isMatch };\n\n if (typeof opts.onResult === 'function') {\n opts.onResult(result);\n }\n\n if (isMatch === false) {\n result.isMatch = false;\n return returnObject ? result : false;\n }\n\n if (isIgnored(input)) {\n if (typeof opts.onIgnore === 'function') {\n opts.onIgnore(result);\n }\n result.isMatch = false;\n return returnObject ? result : false;\n }\n\n if (typeof opts.onMatch === 'function') {\n opts.onMatch(result);\n }\n return returnObject ? result : true;\n };\n\n if (returnState) {\n matcher.state = state;\n }\n\n return matcher;\n};\n\n/**\n * Test `input` with the given `regex`. This is used by the main\n * `picomatch()` function to test the input string.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.test(input, regex[, options]);\n *\n * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\\/([^/]*?))$/));\n * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' }\n * ```\n * @param {String} `input` String to test.\n * @param {RegExp} `regex`\n * @return {Object} Returns an object with matching info.\n * @api public\n */\n\npicomatch.test = (input, regex, options, { glob, posix } = {}) => {\n if (typeof input !== 'string') {\n throw new TypeError('Expected input to be a string');\n }\n\n if (input === '') {\n return { isMatch: false, output: '' };\n }\n\n const opts = options || {};\n const format = opts.format || (posix ? utils.toPosixSlashes : null);\n let match = input === glob;\n let output = (match && format) ? format(input) : input;\n\n if (match === false) {\n output = format ? format(input) : input;\n match = output === glob;\n }\n\n if (match === false || opts.capture === true) {\n if (opts.matchBase === true || opts.basename === true) {\n match = picomatch.matchBase(input, regex, options, posix);\n } else {\n match = regex.exec(output);\n }\n }\n\n return { isMatch: Boolean(match), match, output };\n};\n\n/**\n * Match the basename of a filepath.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.matchBase(input, glob[, options]);\n * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true\n * ```\n * @param {String} `input` String to test.\n * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe).\n * @return {Boolean}\n * @api public\n */\n\npicomatch.matchBase = (input, glob, options, posix = utils.isWindows(options)) => {\n const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options);\n return regex.test(path.basename(input));\n};\n\n/**\n * Returns true if **any** of the given glob `patterns` match the specified `string`.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.isMatch(string, patterns[, options]);\n *\n * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true\n * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false\n * ```\n * @param {String|Array} str The string to test.\n * @param {String|Array} patterns One or more glob patterns to use for matching.\n * @param {Object} [options] See available [options](#options).\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\npicomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str);\n\n/**\n * Parse a glob pattern to create the source string for a regular\n * expression.\n *\n * ```js\n * const picomatch = require('picomatch');\n * const result = picomatch.parse(pattern[, options]);\n * ```\n * @param {String} `pattern`\n * @param {Object} `options`\n * @return {Object} Returns an object with useful properties and output to be used as a regex source string.\n * @api public\n */\n\npicomatch.parse = (pattern, options) => {\n if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options));\n return parse(pattern, { ...options, fastpaths: false });\n};\n\n/**\n * Scan a glob pattern to separate the pattern into segments.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.scan(input[, options]);\n *\n * const result = picomatch.scan('!./foo/*.js');\n * console.log(result);\n * { prefix: '!./',\n * input: '!./foo/*.js',\n * start: 3,\n * base: 'foo',\n * glob: '*.js',\n * isBrace: false,\n * isBracket: false,\n * isGlob: true,\n * isExtglob: false,\n * isGlobstar: false,\n * negated: true }\n * ```\n * @param {String} `input` Glob pattern to scan.\n * @param {Object} `options`\n * @return {Object} Returns an object with\n * @api public\n */\n\npicomatch.scan = (input, options) => scan(input, options);\n\n/**\n * Create a regular expression from a parsed glob pattern.\n *\n * ```js\n * const picomatch = require('picomatch');\n * const state = picomatch.parse('*.js');\n * // picomatch.compileRe(state[, options]);\n *\n * console.log(picomatch.compileRe(state));\n * //=> /^(?:(?!\\.)(?=.)[^/]*?\\.js)$/\n * ```\n * @param {String} `state` The object returned from the `.parse` method.\n * @param {Object} `options`\n * @return {RegExp} Returns a regex created from the given pattern.\n * @api public\n */\n\npicomatch.compileRe = (parsed, options, returnOutput = false, returnState = false) => {\n if (returnOutput === true) {\n return parsed.output;\n }\n\n const opts = options || {};\n const prepend = opts.contains ? '' : '^';\n const append = opts.contains ? '' : '$';\n\n let source = `${prepend}(?:${parsed.output})${append}`;\n if (parsed && parsed.negated === true) {\n source = `^(?!${source}).*$`;\n }\n\n const regex = picomatch.toRegex(source, options);\n if (returnState === true) {\n regex.state = parsed;\n }\n\n return regex;\n};\n\npicomatch.makeRe = (input, options, returnOutput = false, returnState = false) => {\n if (!input || typeof input !== 'string') {\n throw new TypeError('Expected a non-empty string');\n }\n\n const opts = options || {};\n let parsed = { negated: false, fastpaths: true };\n let prefix = '';\n let output;\n\n if (input.startsWith('./')) {\n input = input.slice(2);\n prefix = parsed.prefix = './';\n }\n\n if (opts.fastpaths !== false && (input[0] === '.' || input[0] === '*')) {\n output = parse.fastpaths(input, options);\n }\n\n if (output === undefined) {\n parsed = parse(input, options);\n parsed.prefix = prefix + (parsed.prefix || '');\n } else {\n parsed.output = output;\n }\n\n return picomatch.compileRe(parsed, options, returnOutput, returnState);\n};\n\n/**\n * Create a regular expression from the given regex source string.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.toRegex(source[, options]);\n *\n * const { output } = picomatch.parse('*.js');\n * console.log(picomatch.toRegex(output));\n * //=> /^(?:(?!\\.)(?=.)[^/]*?\\.js)$/\n * ```\n * @param {String} `source` Regular expression source string.\n * @param {Object} `options`\n * @return {RegExp}\n * @api public\n */\n\npicomatch.toRegex = (source, options) => {\n try {\n const opts = options || {};\n return new RegExp(source, opts.flags || (opts.nocase ? 'i' : ''));\n } catch (err) {\n if (options && options.debug === true) throw err;\n return /$^/;\n }\n};\n\n/**\n * Picomatch constants.\n * @return {Object}\n */\n\npicomatch.constants = constants;\n\n/**\n * Expose \"picomatch\"\n */\n\nmodule.exports = picomatch;\n","'use strict';\n\nconst utils = require('./utils');\nconst {\n CHAR_ASTERISK, /* * */\n CHAR_AT, /* @ */\n CHAR_BACKWARD_SLASH, /* \\ */\n CHAR_COMMA, /* , */\n CHAR_DOT, /* . */\n CHAR_EXCLAMATION_MARK, /* ! */\n CHAR_FORWARD_SLASH, /* / */\n CHAR_LEFT_CURLY_BRACE, /* { */\n CHAR_LEFT_PARENTHESES, /* ( */\n CHAR_LEFT_SQUARE_BRACKET, /* [ */\n CHAR_PLUS, /* + */\n CHAR_QUESTION_MARK, /* ? */\n CHAR_RIGHT_CURLY_BRACE, /* } */\n CHAR_RIGHT_PARENTHESES, /* ) */\n CHAR_RIGHT_SQUARE_BRACKET /* ] */\n} = require('./constants');\n\nconst isPathSeparator = code => {\n return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH;\n};\n\nconst depth = token => {\n if (token.isPrefix !== true) {\n token.depth = token.isGlobstar ? Infinity : 1;\n }\n};\n\n/**\n * Quickly scans a glob pattern and returns an object with a handful of\n * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists),\n * `glob` (the actual pattern), and `negated` (true if the path starts with `!`).\n *\n * ```js\n * const pm = require('picomatch');\n * console.log(pm.scan('foo/bar/*.js'));\n * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' }\n * ```\n * @param {String} `str`\n * @param {Object} `options`\n * @return {Object} Returns an object with tokens and regex source string.\n * @api public\n */\n\nconst scan = (input, options) => {\n const opts = options || {};\n\n const length = input.length - 1;\n const scanToEnd = opts.parts === true || opts.scanToEnd === true;\n const slashes = [];\n const tokens = [];\n const parts = [];\n\n let str = input;\n let index = -1;\n let start = 0;\n let lastIndex = 0;\n let isBrace = false;\n let isBracket = false;\n let isGlob = false;\n let isExtglob = false;\n let isGlobstar = false;\n let braceEscaped = false;\n let backslashes = false;\n let negated = false;\n let finished = false;\n let braces = 0;\n let prev;\n let code;\n let token = { value: '', depth: 0, isGlob: false };\n\n const eos = () => index >= length;\n const peek = () => str.charCodeAt(index + 1);\n const advance = () => {\n prev = code;\n return str.charCodeAt(++index);\n };\n\n while (index < length) {\n code = advance();\n let next;\n\n if (code === CHAR_BACKWARD_SLASH) {\n backslashes = token.backslashes = true;\n code = advance();\n\n if (code === CHAR_LEFT_CURLY_BRACE) {\n braceEscaped = true;\n }\n continue;\n }\n\n if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) {\n braces++;\n\n while (eos() !== true && (code = advance())) {\n if (code === CHAR_BACKWARD_SLASH) {\n backslashes = token.backslashes = true;\n advance();\n continue;\n }\n\n if (code === CHAR_LEFT_CURLY_BRACE) {\n braces++;\n continue;\n }\n\n if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) {\n isBrace = token.isBrace = true;\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n\n break;\n }\n\n if (braceEscaped !== true && code === CHAR_COMMA) {\n isBrace = token.isBrace = true;\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n\n break;\n }\n\n if (code === CHAR_RIGHT_CURLY_BRACE) {\n braces--;\n\n if (braces === 0) {\n braceEscaped = false;\n isBrace = token.isBrace = true;\n finished = true;\n break;\n }\n }\n }\n\n if (scanToEnd === true) {\n continue;\n }\n\n break;\n }\n\n if (code === CHAR_FORWARD_SLASH) {\n slashes.push(index);\n tokens.push(token);\n token = { value: '', depth: 0, isGlob: false };\n\n if (finished === true) continue;\n if (prev === CHAR_DOT && index === (start + 1)) {\n start += 2;\n continue;\n }\n\n lastIndex = index + 1;\n continue;\n }\n\n if (opts.noext !== true) {\n const isExtglobChar = code === CHAR_PLUS\n || code === CHAR_AT\n || code === CHAR_ASTERISK\n || code === CHAR_QUESTION_MARK\n || code === CHAR_EXCLAMATION_MARK;\n\n if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) {\n isGlob = token.isGlob = true;\n isExtglob = token.isExtglob = true;\n finished = true;\n\n if (scanToEnd === true) {\n while (eos() !== true && (code = advance())) {\n if (code === CHAR_BACKWARD_SLASH) {\n backslashes = token.backslashes = true;\n code = advance();\n continue;\n }\n\n if (code === CHAR_RIGHT_PARENTHESES) {\n isGlob = token.isGlob = true;\n finished = true;\n break;\n }\n }\n continue;\n }\n break;\n }\n }\n\n if (code === CHAR_ASTERISK) {\n if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true;\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n break;\n }\n\n if (code === CHAR_QUESTION_MARK) {\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n break;\n }\n\n if (code === CHAR_LEFT_SQUARE_BRACKET) {\n while (eos() !== true && (next = advance())) {\n if (next === CHAR_BACKWARD_SLASH) {\n backslashes = token.backslashes = true;\n advance();\n continue;\n }\n\n if (next === CHAR_RIGHT_SQUARE_BRACKET) {\n isBracket = token.isBracket = true;\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n break;\n }\n }\n }\n\n if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) {\n negated = token.negated = true;\n start++;\n continue;\n }\n\n if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) {\n isGlob = token.isGlob = true;\n\n if (scanToEnd === true) {\n while (eos() !== true && (code = advance())) {\n if (code === CHAR_LEFT_PARENTHESES) {\n backslashes = token.backslashes = true;\n code = advance();\n continue;\n }\n\n if (code === CHAR_RIGHT_PARENTHESES) {\n finished = true;\n break;\n }\n }\n continue;\n }\n break;\n }\n\n if (isGlob === true) {\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n\n break;\n }\n }\n\n if (opts.noext === true) {\n isExtglob = false;\n isGlob = false;\n }\n\n let base = str;\n let prefix = '';\n let glob = '';\n\n if (start > 0) {\n prefix = str.slice(0, start);\n str = str.slice(start);\n lastIndex -= start;\n }\n\n if (base && isGlob === true && lastIndex > 0) {\n base = str.slice(0, lastIndex);\n glob = str.slice(lastIndex);\n } else if (isGlob === true) {\n base = '';\n glob = str;\n } else {\n base = str;\n }\n\n if (base && base !== '' && base !== '/' && base !== str) {\n if (isPathSeparator(base.charCodeAt(base.length - 1))) {\n base = base.slice(0, -1);\n }\n }\n\n if (opts.unescape === true) {\n if (glob) glob = utils.removeBackslashes(glob);\n\n if (base && backslashes === true) {\n base = utils.removeBackslashes(base);\n }\n }\n\n const state = {\n prefix,\n input,\n start,\n base,\n glob,\n isBrace,\n isBracket,\n isGlob,\n isExtglob,\n isGlobstar,\n negated\n };\n\n if (opts.tokens === true) {\n state.maxDepth = 0;\n if (!isPathSeparator(code)) {\n tokens.push(token);\n }\n state.tokens = tokens;\n }\n\n if (opts.parts === true || opts.tokens === true) {\n let prevIndex;\n\n for (let idx = 0; idx < slashes.length; idx++) {\n const n = prevIndex ? prevIndex + 1 : start;\n const i = slashes[idx];\n const value = input.slice(n, i);\n if (opts.tokens) {\n if (idx === 0 && start !== 0) {\n tokens[idx].isPrefix = true;\n tokens[idx].value = prefix;\n } else {\n tokens[idx].value = value;\n }\n depth(tokens[idx]);\n state.maxDepth += tokens[idx].depth;\n }\n if (idx !== 0 || value !== '') {\n parts.push(value);\n }\n prevIndex = i;\n }\n\n if (prevIndex && prevIndex + 1 < input.length) {\n const value = input.slice(prevIndex + 1);\n parts.push(value);\n\n if (opts.tokens) {\n tokens[tokens.length - 1].value = value;\n depth(tokens[tokens.length - 1]);\n state.maxDepth += tokens[tokens.length - 1].depth;\n }\n }\n\n state.slashes = slashes;\n state.parts = parts;\n }\n\n return state;\n};\n\nmodule.exports = scan;\n","'use strict';\n\nconst path = require('path');\nconst win32 = process.platform === 'win32';\nconst {\n REGEX_BACKSLASH,\n REGEX_REMOVE_BACKSLASH,\n REGEX_SPECIAL_CHARS,\n REGEX_SPECIAL_CHARS_GLOBAL\n} = require('./constants');\n\nexports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);\nexports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str);\nexports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str);\nexports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\\\$1');\nexports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/');\n\nexports.removeBackslashes = str => {\n return str.replace(REGEX_REMOVE_BACKSLASH, match => {\n return match === '\\\\' ? '' : match;\n });\n};\n\nexports.supportsLookbehinds = () => {\n const segs = process.version.slice(1).split('.').map(Number);\n if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) {\n return true;\n }\n return false;\n};\n\nexports.isWindows = options => {\n if (options && typeof options.windows === 'boolean') {\n return options.windows;\n }\n return win32 === true || path.sep === '\\\\';\n};\n\nexports.escapeLast = (input, char, lastIdx) => {\n const idx = input.lastIndexOf(char, lastIdx);\n if (idx === -1) return input;\n if (input[idx - 1] === '\\\\') return exports.escapeLast(input, char, idx - 1);\n return `${input.slice(0, idx)}\\\\${input.slice(idx)}`;\n};\n\nexports.removePrefix = (input, state = {}) => {\n let output = input;\n if (output.startsWith('./')) {\n output = output.slice(2);\n state.prefix = './';\n }\n return output;\n};\n\nexports.wrapOutput = (input, state = {}, options = {}) => {\n const prepend = options.contains ? '' : '^';\n const append = options.contains ? '' : '$';\n\n let output = `${prepend}(?:${input})${append}`;\n if (state.negated === true) {\n output = `(?:^(?!${output}).*$)`;\n }\n return output;\n};\n","var once = require('once')\nvar eos = require('end-of-stream')\nvar fs = require('fs') // we only need fs to get the ReadStream and WriteStream prototypes\n\nvar noop = function () {}\nvar ancient = /^v?\\.0/.test(process.version)\n\nvar isFn = function (fn) {\n return typeof fn === 'function'\n}\n\nvar isFS = function (stream) {\n if (!ancient) return false // newer node version do not need to care about fs is a special way\n if (!fs) return false // browser\n return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close)\n}\n\nvar isRequest = function (stream) {\n return stream.setHeader && isFn(stream.abort)\n}\n\nvar destroyer = function (stream, reading, writing, callback) {\n callback = once(callback)\n\n var closed = false\n stream.on('close', function () {\n closed = true\n })\n\n eos(stream, {readable: reading, writable: writing}, function (err) {\n if (err) return callback(err)\n closed = true\n callback()\n })\n\n var destroyed = false\n return function (err) {\n if (closed) return\n if (destroyed) return\n destroyed = true\n\n if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks\n if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want\n\n if (isFn(stream.destroy)) return stream.destroy()\n\n callback(err || new Error('stream was destroyed'))\n }\n}\n\nvar call = function (fn) {\n fn()\n}\n\nvar pipe = function (from, to) {\n return from.pipe(to)\n}\n\nvar pump = function () {\n var streams = Array.prototype.slice.call(arguments)\n var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop\n\n if (Array.isArray(streams[0])) streams = streams[0]\n if (streams.length < 2) throw new Error('pump requires two streams per minimum')\n\n var error\n var destroys = streams.map(function (stream, i) {\n var reading = i < streams.length - 1\n var writing = i > 0\n return destroyer(stream, reading, writing, function (err) {\n if (!error) error = err\n if (err) destroys.forEach(call)\n if (reading) return\n destroys.forEach(call)\n callback(error)\n })\n })\n\n return streams.reduce(pipe)\n}\n\nmodule.exports = pump\n","'use strict';\n\nclass QuickLRU {\n\tconstructor(options = {}) {\n\t\tif (!(options.maxSize && options.maxSize > 0)) {\n\t\t\tthrow new TypeError('`maxSize` must be a number greater than 0');\n\t\t}\n\n\t\tthis.maxSize = options.maxSize;\n\t\tthis.onEviction = options.onEviction;\n\t\tthis.cache = new Map();\n\t\tthis.oldCache = new Map();\n\t\tthis._size = 0;\n\t}\n\n\t_set(key, value) {\n\t\tthis.cache.set(key, value);\n\t\tthis._size++;\n\n\t\tif (this._size >= this.maxSize) {\n\t\t\tthis._size = 0;\n\n\t\t\tif (typeof this.onEviction === 'function') {\n\t\t\t\tfor (const [key, value] of this.oldCache.entries()) {\n\t\t\t\t\tthis.onEviction(key, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tthis.oldCache = this.cache;\n\t\t\tthis.cache = new Map();\n\t\t}\n\t}\n\n\tget(key) {\n\t\tif (this.cache.has(key)) {\n\t\t\treturn this.cache.get(key);\n\t\t}\n\n\t\tif (this.oldCache.has(key)) {\n\t\t\tconst value = this.oldCache.get(key);\n\t\t\tthis.oldCache.delete(key);\n\t\t\tthis._set(key, value);\n\t\t\treturn value;\n\t\t}\n\t}\n\n\tset(key, value) {\n\t\tif (this.cache.has(key)) {\n\t\t\tthis.cache.set(key, value);\n\t\t} else {\n\t\t\tthis._set(key, value);\n\t\t}\n\n\t\treturn this;\n\t}\n\n\thas(key) {\n\t\treturn this.cache.has(key) || this.oldCache.has(key);\n\t}\n\n\tpeek(key) {\n\t\tif (this.cache.has(key)) {\n\t\t\treturn this.cache.get(key);\n\t\t}\n\n\t\tif (this.oldCache.has(key)) {\n\t\t\treturn this.oldCache.get(key);\n\t\t}\n\t}\n\n\tdelete(key) {\n\t\tconst deleted = this.cache.delete(key);\n\t\tif (deleted) {\n\t\t\tthis._size--;\n\t\t}\n\n\t\treturn this.oldCache.delete(key) || deleted;\n\t}\n\n\tclear() {\n\t\tthis.cache.clear();\n\t\tthis.oldCache.clear();\n\t\tthis._size = 0;\n\t}\n\n\t* keys() {\n\t\tfor (const [key] of this) {\n\t\t\tyield key;\n\t\t}\n\t}\n\n\t* values() {\n\t\tfor (const [, value] of this) {\n\t\t\tyield value;\n\t\t}\n\t}\n\n\t* [Symbol.iterator]() {\n\t\tfor (const item of this.cache) {\n\t\t\tyield item;\n\t\t}\n\n\t\tfor (const item of this.oldCache) {\n\t\t\tconst [key] = item;\n\t\t\tif (!this.cache.has(key)) {\n\t\t\t\tyield item;\n\t\t\t}\n\t\t}\n\t}\n\n\tget size() {\n\t\tlet oldCacheSize = 0;\n\t\tfor (const key of this.oldCache.keys()) {\n\t\t\tif (!this.cache.has(key)) {\n\t\t\t\toldCacheSize++;\n\t\t\t}\n\t\t}\n\n\t\treturn Math.min(this._size + oldCacheSize, this.maxSize);\n\t}\n}\n\nmodule.exports = QuickLRU;\n","'use strict';\nconst tls = require('tls');\n\nmodule.exports = (options = {}) => new Promise((resolve, reject) => {\n\tconst socket = tls.connect(options, () => {\n\t\tif (options.resolveSocket) {\n\t\t\tsocket.off('error', reject);\n\t\t\tresolve({alpnProtocol: socket.alpnProtocol, socket});\n\t\t} else {\n\t\t\tsocket.destroy();\n\t\t\tresolve({alpnProtocol: socket.alpnProtocol});\n\t\t}\n\t});\n\n\tsocket.on('error', reject);\n});\n","'use strict';\n\nconst Readable = require('stream').Readable;\nconst lowercaseKeys = require('lowercase-keys');\n\nclass Response extends Readable {\n\tconstructor(statusCode, headers, body, url) {\n\t\tif (typeof statusCode !== 'number') {\n\t\t\tthrow new TypeError('Argument `statusCode` should be a number');\n\t\t}\n\t\tif (typeof headers !== 'object') {\n\t\t\tthrow new TypeError('Argument `headers` should be an object');\n\t\t}\n\t\tif (!(body instanceof Buffer)) {\n\t\t\tthrow new TypeError('Argument `body` should be a buffer');\n\t\t}\n\t\tif (typeof url !== 'string') {\n\t\t\tthrow new TypeError('Argument `url` should be a string');\n\t\t}\n\n\t\tsuper();\n\t\tthis.statusCode = statusCode;\n\t\tthis.headers = lowercaseKeys(headers);\n\t\tthis.body = body;\n\t\tthis.url = url;\n\t}\n\n\t_read() {\n\t\tthis.push(this.body);\n\t\tthis.push(null);\n\t}\n}\n\nmodule.exports = Response;\n","'use strict'\n\nfunction reusify (Constructor) {\n var head = new Constructor()\n var tail = head\n\n function get () {\n var current = head\n\n if (current.next) {\n head = current.next\n } else {\n head = new Constructor()\n tail = head\n }\n\n current.next = null\n\n return current\n }\n\n function release (obj) {\n tail.next = obj\n tail = obj\n }\n\n return {\n get: get,\n release: release\n }\n}\n\nmodule.exports = reusify\n","/*! run-parallel. MIT License. Feross Aboukhadijeh */\nmodule.exports = runParallel\n\nfunction runParallel (tasks, cb) {\n var results, pending, keys\n var isSync = true\n\n if (Array.isArray(tasks)) {\n results = []\n pending = tasks.length\n } else {\n keys = Object.keys(tasks)\n results = {}\n pending = keys.length\n }\n\n function done (err) {\n function end () {\n if (cb) cb(err, results)\n cb = null\n }\n if (isSync) process.nextTick(end)\n else end()\n }\n\n function each (i, err, result) {\n results[i] = result\n if (--pending === 0 || err) {\n done(err)\n }\n }\n\n if (!pending) {\n // empty\n done(null)\n } else if (keys) {\n // object\n keys.forEach(function (key) {\n tasks[key](function (err, result) { each(key, err, result) })\n })\n } else {\n // array\n tasks.forEach(function (task, i) {\n task(function (err, result) { each(i, err, result) })\n })\n }\n\n isSync = false\n}\n",";(function (sax) { // wrapper for non-node envs\n sax.parser = function (strict, opt) { return new SAXParser(strict, opt) }\n sax.SAXParser = SAXParser\n sax.SAXStream = SAXStream\n sax.createStream = createStream\n\n // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns.\n // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)),\n // since that's the earliest that a buffer overrun could occur. This way, checks are\n // as rare as required, but as often as necessary to ensure never crossing this bound.\n // Furthermore, buffers are only tested at most once per write(), so passing a very\n // large string into write() might have undesirable effects, but this is manageable by\n // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme\n // edge case, result in creating at most one complete copy of the string passed in.\n // Set to Infinity to have unlimited buffers.\n sax.MAX_BUFFER_LENGTH = 64 * 1024\n\n var buffers = [\n 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype',\n 'procInstName', 'procInstBody', 'entity', 'attribName',\n 'attribValue', 'cdata', 'script'\n ]\n\n sax.EVENTS = [\n 'text',\n 'processinginstruction',\n 'sgmldeclaration',\n 'doctype',\n 'comment',\n 'opentagstart',\n 'attribute',\n 'opentag',\n 'closetag',\n 'opencdata',\n 'cdata',\n 'closecdata',\n 'error',\n 'end',\n 'ready',\n 'script',\n 'opennamespace',\n 'closenamespace'\n ]\n\n function SAXParser (strict, opt) {\n if (!(this instanceof SAXParser)) {\n return new SAXParser(strict, opt)\n }\n\n var parser = this\n clearBuffers(parser)\n parser.q = parser.c = ''\n parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH\n parser.opt = opt || {}\n parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags\n parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase'\n parser.tags = []\n parser.closed = parser.closedRoot = parser.sawRoot = false\n parser.tag = parser.error = null\n parser.strict = !!strict\n parser.noscript = !!(strict || parser.opt.noscript)\n parser.state = S.BEGIN\n parser.strictEntities = parser.opt.strictEntities\n parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES)\n parser.attribList = []\n\n // namespaces form a prototype chain.\n // it always points at the current tag,\n // which protos to its parent tag.\n if (parser.opt.xmlns) {\n parser.ns = Object.create(rootNS)\n }\n\n // mostly just for error reporting\n parser.trackPosition = parser.opt.position !== false\n if (parser.trackPosition) {\n parser.position = parser.line = parser.column = 0\n }\n emit(parser, 'onready')\n }\n\n if (!Object.create) {\n Object.create = function (o) {\n function F () {}\n F.prototype = o\n var newf = new F()\n return newf\n }\n }\n\n if (!Object.keys) {\n Object.keys = function (o) {\n var a = []\n for (var i in o) if (o.hasOwnProperty(i)) a.push(i)\n return a\n }\n }\n\n function checkBufferLength (parser) {\n var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10)\n var maxActual = 0\n for (var i = 0, l = buffers.length; i < l; i++) {\n var len = parser[buffers[i]].length\n if (len > maxAllowed) {\n // Text/cdata nodes can get big, and since they're buffered,\n // we can get here under normal conditions.\n // Avoid issues by emitting the text node now,\n // so at least it won't get any bigger.\n switch (buffers[i]) {\n case 'textNode':\n closeText(parser)\n break\n\n case 'cdata':\n emitNode(parser, 'oncdata', parser.cdata)\n parser.cdata = ''\n break\n\n case 'script':\n emitNode(parser, 'onscript', parser.script)\n parser.script = ''\n break\n\n default:\n error(parser, 'Max buffer length exceeded: ' + buffers[i])\n }\n }\n maxActual = Math.max(maxActual, len)\n }\n // schedule the next check for the earliest possible buffer overrun.\n var m = sax.MAX_BUFFER_LENGTH - maxActual\n parser.bufferCheckPosition = m + parser.position\n }\n\n function clearBuffers (parser) {\n for (var i = 0, l = buffers.length; i < l; i++) {\n parser[buffers[i]] = ''\n }\n }\n\n function flushBuffers (parser) {\n closeText(parser)\n if (parser.cdata !== '') {\n emitNode(parser, 'oncdata', parser.cdata)\n parser.cdata = ''\n }\n if (parser.script !== '') {\n emitNode(parser, 'onscript', parser.script)\n parser.script = ''\n }\n }\n\n SAXParser.prototype = {\n end: function () { end(this) },\n write: write,\n resume: function () { this.error = null; return this },\n close: function () { return this.write(null) },\n flush: function () { flushBuffers(this) }\n }\n\n var Stream\n try {\n Stream = require('stream').Stream\n } catch (ex) {\n Stream = function () {}\n }\n\n var streamWraps = sax.EVENTS.filter(function (ev) {\n return ev !== 'error' && ev !== 'end'\n })\n\n function createStream (strict, opt) {\n return new SAXStream(strict, opt)\n }\n\n function SAXStream (strict, opt) {\n if (!(this instanceof SAXStream)) {\n return new SAXStream(strict, opt)\n }\n\n Stream.apply(this)\n\n this._parser = new SAXParser(strict, opt)\n this.writable = true\n this.readable = true\n\n var me = this\n\n this._parser.onend = function () {\n me.emit('end')\n }\n\n this._parser.onerror = function (er) {\n me.emit('error', er)\n\n // if didn't throw, then means error was handled.\n // go ahead and clear error, so we can write again.\n me._parser.error = null\n }\n\n this._decoder = null\n\n streamWraps.forEach(function (ev) {\n Object.defineProperty(me, 'on' + ev, {\n get: function () {\n return me._parser['on' + ev]\n },\n set: function (h) {\n if (!h) {\n me.removeAllListeners(ev)\n me._parser['on' + ev] = h\n return h\n }\n me.on(ev, h)\n },\n enumerable: true,\n configurable: false\n })\n })\n }\n\n SAXStream.prototype = Object.create(Stream.prototype, {\n constructor: {\n value: SAXStream\n }\n })\n\n SAXStream.prototype.write = function (data) {\n if (typeof Buffer === 'function' &&\n typeof Buffer.isBuffer === 'function' &&\n Buffer.isBuffer(data)) {\n if (!this._decoder) {\n var SD = require('string_decoder').StringDecoder\n this._decoder = new SD('utf8')\n }\n data = this._decoder.write(data)\n }\n\n this._parser.write(data.toString())\n this.emit('data', data)\n return true\n }\n\n SAXStream.prototype.end = function (chunk) {\n if (chunk && chunk.length) {\n this.write(chunk)\n }\n this._parser.end()\n return true\n }\n\n SAXStream.prototype.on = function (ev, handler) {\n var me = this\n if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) {\n me._parser['on' + ev] = function () {\n var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments)\n args.splice(0, 0, ev)\n me.emit.apply(me, args)\n }\n }\n\n return Stream.prototype.on.call(me, ev, handler)\n }\n\n // this really needs to be replaced with character classes.\n // XML allows all manner of ridiculous numbers and digits.\n var CDATA = '[CDATA['\n var DOCTYPE = 'DOCTYPE'\n var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'\n var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/'\n var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE }\n\n // http://www.w3.org/TR/REC-xml/#NT-NameStartChar\n // This implementation works on strings, a single character at a time\n // as such, it cannot ever support astral-plane characters (10000-EFFFF)\n // without a significant breaking change to either this parser, or the\n // JavaScript language. Implementation of an emoji-capable xml parser\n // is left as an exercise for the reader.\n var nameStart = /[:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD]/\n\n var nameBody = /[:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD\\u00B7\\u0300-\\u036F\\u203F-\\u2040.\\d-]/\n\n var entityStart = /[#:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD]/\n var entityBody = /[#:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD\\u00B7\\u0300-\\u036F\\u203F-\\u2040.\\d-]/\n\n function isWhitespace (c) {\n return c === ' ' || c === '\\n' || c === '\\r' || c === '\\t'\n }\n\n function isQuote (c) {\n return c === '\"' || c === '\\''\n }\n\n function isAttribEnd (c) {\n return c === '>' || isWhitespace(c)\n }\n\n function isMatch (regex, c) {\n return regex.test(c)\n }\n\n function notMatch (regex, c) {\n return !isMatch(regex, c)\n }\n\n var S = 0\n sax.STATE = {\n BEGIN: S++, // leading byte order mark or whitespace\n BEGIN_WHITESPACE: S++, // leading whitespace\n TEXT: S++, // general stuff\n TEXT_ENTITY: S++, // & and such.\n OPEN_WAKA: S++, // <\n SGML_DECL: S++, // \n SCRIPT: S++, //