/******/ (() => { // webpackBootstrap /******/ var __webpack_modules__ = ({ /***/ 9399: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.LocalFileProvider = void 0; const fs = __importStar(__nccwpck_require__(7147)); const fast_glob_1 = __importDefault(__nccwpck_require__(3664)); const git_1 = __nccwpck_require__(9844); const adm_zip_1 = __importDefault(__nccwpck_require__(6761)); const path_1 = __importDefault(__nccwpck_require__(1017)); class LocalFileProvider { constructor(name, pattern) { this.name = name; this.pattern = pattern; } load() { return __awaiter(this, void 0, void 0, function* () { const result = []; const zip = new adm_zip_1.default(); for (const pat of this.pattern) { const paths = yield (0, fast_glob_1.default)(pat, { dot: true }); for (const file of paths) { const dir = path_1.default.dirname(file); zip.addLocalFile(file, dir); const content = yield fs.promises.readFile(file, { encoding: 'utf8' }); result.push({ file, content }); } } return { trxZip: zip, reports: { [this.name]: result } }; }); } listTrackedFiles() { return __awaiter(this, void 0, void 0, function* () { return (0, git_1.listFiles)(); }); } } exports.LocalFileProvider = LocalFileProvider; /***/ }), /***/ 3109: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); const core = __importStar(__nccwpck_require__(2186)); const github = __importStar(__nccwpck_require__(5438)); const local_file_provider_1 = __nccwpck_require__(9399); const test_results_1 = __nccwpck_require__(2768); const get_annotations_1 = __nccwpck_require__(5867); const get_report_1 = __nccwpck_require__(3737); const dart_json_parser_1 = __nccwpck_require__(4528); const dotnet_trx_parser_1 = __nccwpck_require__(2664); const java_junit_parser_1 = __nccwpck_require__(676); const jest_junit_parser_1 = __nccwpck_require__(1113); const mocha_json_parser_1 = __nccwpck_require__(6043); const path_utils_1 = __nccwpck_require__(4070); const github_utils_1 = __nccwpck_require__(3522); const markdown_utils_1 = __nccwpck_require__(6482); const webhook_1 = __nccwpck_require__(1095); const fs_1 = __importDefault(__nccwpck_require__(7147)); const bent_1 = __importDefault(__nccwpck_require__(3113)); const process_1 = __nccwpck_require__(7282); function main() { return __awaiter(this, void 0, void 0, function* () { try { const testReporter = new TestReporter(); yield testReporter.run(); } catch (error) { if (error instanceof Error) core.setFailed(error); else core.setFailed(JSON.stringify(error)); } }); } class TestReporter { constructor() { this.artifact = core.getInput('artifact', { required: false }); this.name = core.getInput('name', { required: true }); this.path = core.getInput('path', { required: true }); this.pathReplaceBackslashes = core.getInput('path-replace-backslashes', { required: false }) === 'true'; this.reporter = core.getInput('reporter', { required: true }); this.listSuites = core.getInput('list-suites', { required: true }); this.listTests = core.getInput('list-tests', { required: true }); this.maxAnnotations = parseInt(core.getInput('max-annotations', { required: true })); this.failOnError = core.getInput('fail-on-error', { required: true }) === 'true'; this.failOnEmpty = core.getInput('fail-on-empty', { required: true }) === 'true'; this.workDirInput = core.getInput('working-directory', { required: false }); this.onlySummary = core.getInput('only-summary', { required: false }) === 'true'; this.token = core.getInput('token', { required: true }); this.slackWebhook = core.getInput('slack-url', { required: false }); this.resultsEndpoint = core.getInput('test-results-endpoint', { required: false }); this.resultsEndpointSecret = core.getInput('test-results-endpoint-secret', { required: false }); this.context = (0, github_utils_1.getCheckRunContext)(); this.octokit = github.getOctokit(this.token); if (this.listSuites !== 'all' && this.listSuites !== 'failed') { core.setFailed(`Input parameter 'list-suites' has invalid value`); return; } if (this.listTests !== 'all' && this.listTests !== 'failed' && this.listTests !== 'none') { core.setFailed(`Input parameter 'list-tests' has invalid value`); return; } if (isNaN(this.maxAnnotations) || this.maxAnnotations < 0 || this.maxAnnotations > 50) { core.setFailed(`Input parameter 'max-annotations' has invalid value`); return; } } run() { var _a, _b; return __awaiter(this, void 0, void 0, function* () { if (this.workDirInput) { core.info(`Changing directory to '${this.workDirInput}'`); process.chdir(this.workDirInput); } core.info(`Check runs will be created with SHA=${this.context.sha}`); // Split path pattern by ',' and optionally convert all backslashes to forward slashes // fast-glob (micromatch) always interprets backslashes as escape characters instead of directory separators const pathsList = this.path.split(','); const pattern = this.pathReplaceBackslashes ? pathsList.map(path_utils_1.normalizeFilePath) : pathsList; const inputProvider = new local_file_provider_1.LocalFileProvider(this.name, pattern); const parseErrors = this.maxAnnotations > 0; const trackedFiles = parseErrors ? yield inputProvider.listTrackedFiles() : []; const workDir = this.artifact ? undefined : (0, path_utils_1.normalizeDirPath)(process.cwd(), true); if (parseErrors) core.info(`Found ${trackedFiles.length} files tracked by GitHub`); const options = { workDir, trackedFiles, parseErrors }; core.info(`Using test report parser '${this.reporter}'`); const parser = this.getParser(this.reporter, options); const results = []; const input = yield inputProvider.load(); if (((_a = this.resultsEndpoint) === null || _a === void 0 ? void 0 : _a.length) > 0) { try { const readStream = input.trxZip.toBuffer(); const version = fs_1.default.existsSync('test/EVA.TestSuite.Core/bin/Release/version.txt') ? fs_1.default.readFileSync('test/EVA.TestSuite.Core/bin/Release/version.txt').toString() : null; const commitID = fs_1.default.existsSync('test/EVA.TestSuite.Core/bin/Release/commit.txt') ? fs_1.default.readFileSync('test/EVA.TestSuite.Core/bin/Release/commit.txt').toString() : null; core.info(`Using EVA version ${version}, commit ${commitID}, branch ${this.context.branch}, current directory: ${(0, process_1.cwd)()}`); const post = (0, bent_1.default)(this.resultsEndpoint, 'POST', {}, 200); yield post(`TestResults?Secret=${this.resultsEndpointSecret}${version ? '&EVAVersion=' + version : ''}${commitID ? '&EVACommitID=' + commitID : ''}&EVABranch=${encodeURI(this.context.branch)}`, readStream); core.info(`Uploaded TRX files`); } catch (ex) { core.warning(`Could not upload TRX ZIP file: ${ex}`); } } for (const [reportName, files] of Object.entries(input.reports)) { try { core.startGroup(`Creating test report ${reportName}`); const tr = yield this.createReport(parser, reportName, files); if (tr != null) { results.push(tr); } } finally { core.endGroup(); } } const isFailed = results.some(tr => tr.results.some(r => r.isFailed)); const conclusion = isFailed ? 'failure' : 'success'; const passed = results.reduce((sum, tr) => sum + tr.passed, 0); const failed = results.reduce((sum, tr) => sum + tr.failed, 0); const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0); const time = results.reduce((sum, tr) => sum + tr.time, 0); core.setOutput('conclusion', conclusion); core.setOutput('passed', passed); core.setOutput('failed', failed); core.setOutput('skipped', skipped); core.setOutput('time', time); if (results.some(r => r.shouldFail)) { core.setFailed(`Failed test were found and the results could not be written to github, so fail this step.`); let counter = 0; core.startGroup('Failed tests'); for (const r of results.filter(r => r.shouldFail)) { for (const rr of r.results) { for (const s of rr.failedSuites) { for (const g of s.failedGroups) { for (const t of g.failedTests) { if (++counter > 10) { core.endGroup(); return; } core.info(`${t.name}: ${(_b = t.error) === null || _b === void 0 ? void 0 : _b.message}`); } } } } } core.endGroup(); return; } if (this.failOnError && isFailed) { core.setFailed(`Failed test were found and 'fail-on-error' option is set to ${this.failOnError}`); return; } if (results.length === 0 && this.failOnEmpty) { core.setFailed(`No test report files were found`); return; } }); } createReport(parser, name, files) { var _a; return __awaiter(this, void 0, void 0, function* () { if (files.length === 0) { core.warning(`No file matches path ${this.path}`); return null; } core.info(`Processing test results for check run ${name}`); const results = []; const result = new test_results_1.TestRunResultWithUrl(results, null); for (const { file, content } of files) { try { core.info(`Processing test results from ${file}`); const tr = yield parser.parse(file, content); results.push(tr); } catch (error) { core.error(`Processing test results from ${file} failed`); throw error; } } core.info(`Creating check run ${name}`); try { const existingChecks = yield this.octokit.rest.checks.listForRef(Object.assign(Object.assign({ ref: this.context.sha }, github.context.repo), { check_name: name, status: 'queued' })); const check = ((_a = existingChecks === null || existingChecks === void 0 ? void 0 : existingChecks.data) === null || _a === void 0 ? void 0 : _a.total_count) > 0 ? yield this.octokit.rest.checks.get(Object.assign({ check_run_id: existingChecks.data.check_runs[0].id }, github.context.repo)) : yield this.octokit.rest.checks.create(Object.assign({ head_sha: this.context.sha, name, status: 'in_progress', output: { title: name, summary: '' } }, github.context.repo)); if (check.data.status === 'queued') { yield this.octokit.rest.checks.update(Object.assign({ check_run_id: check.data.id, status: 'in_progress', output: { title: name, summary: '' } }, github.context.repo)); } core.info('Creating report summary'); const { listSuites, listTests, onlySummary } = this; const baseUrl = check.data.html_url || ''; const summary = (0, get_report_1.getReport)(results, { listSuites, listTests, baseUrl, onlySummary }); core.info('Creating annotations'); const annotations = (0, get_annotations_1.getAnnotations)(results, this.maxAnnotations); const isFailed = results.some(tr => tr.result === 'failed'); const conclusion = isFailed ? 'failure' : 'success'; const icon = isFailed ? markdown_utils_1.Icon.fail : markdown_utils_1.Icon.success; core.info(`Updating check run conclusion (${conclusion}) and output`); const resp = yield this.octokit.rest.checks.update(Object.assign({ check_run_id: check.data.id, conclusion, status: 'completed', output: { title: `${name} ${icon}`, summary, annotations } }, github.context.repo)); core.info(`Check run create response: ${resp.status}`); core.info(`Check run URL: ${resp.data.url}`); core.info(`Check run HTML: ${resp.data.html_url}`); core.setOutput('url', resp.data.url); core.setOutput('url_html', resp.data.html_url); core.info(`Check run details: ${resp.data.details_url}`); result.checkUrl = resp.data.html_url; if (this.slackWebhook && this.context.branch === 'master') { const webhook = new webhook_1.IncomingWebhook(this.slackWebhook); const passed = results.reduce((sum, tr) => sum + tr.passed, 0); const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0); const failed = results.reduce((sum, tr) => sum + tr.failed, 0); const req = { blocks: [ { type: 'section', text: { type: 'mrkdwn', text: `:large_green_circle: ${passed} :large_orange_circle: ${skipped} :red_circle: ${failed} <${resp.data.html_url}|(view)>` } } ] }; results.map((tr, runIndex) => { if (tr.failed === 0) return; const runName = tr.path.slice(0, tr.path.indexOf('/TestResults/')); req.blocks.push({ type: 'section', text: { type: 'mrkdwn', text: `:red_circle: ${tr.failed} in <${resp.data.html_url}#r${runIndex}|${runName}>` } }); }); yield webhook.send(req); } } catch (error) { core.error(`Could not create check to store the results`); } return result; }); } getParser(reporter, options) { switch (reporter) { case 'dart-json': return new dart_json_parser_1.DartJsonParser(options, 'dart'); case 'dotnet-trx': return new dotnet_trx_parser_1.DotnetTrxParser(options); case 'flutter-json': return new dart_json_parser_1.DartJsonParser(options, 'flutter'); case 'java-junit': return new java_junit_parser_1.JavaJunitParser(options); case 'jest-junit': return new jest_junit_parser_1.JestJunitParser(options); case 'mocha-json': return new mocha_json_parser_1.MochaJsonParser(options); default: throw new Error(`Input variable 'reporter' is set to invalid value '${reporter}'`); } } } main(); /***/ }), /***/ 4528: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DartJsonParser = void 0; const path_utils_1 = __nccwpck_require__(4070); const dart_json_types_1 = __nccwpck_require__(7887); const test_results_1 = __nccwpck_require__(2768); class TestRun { constructor(path, suites, success, time) { this.path = path; this.suites = suites; this.success = success; this.time = time; } } class TestSuite { constructor(suite) { this.suite = suite; this.groups = {}; } } class TestGroup { constructor(group) { this.group = group; this.tests = []; } } class TestCase { constructor(testStart) { this.testStart = testStart; this.print = []; this.groupId = testStart.test.groupIDs[testStart.test.groupIDs.length - 1]; } get result() { var _a, _b, _c, _d; if ((_a = this.testDone) === null || _a === void 0 ? void 0 : _a.skipped) { return 'skipped'; } if (((_b = this.testDone) === null || _b === void 0 ? void 0 : _b.result) === 'success') { return 'success'; } if (((_c = this.testDone) === null || _c === void 0 ? void 0 : _c.result) === 'error' || ((_d = this.testDone) === null || _d === void 0 ? void 0 : _d.result) === 'failure') { return 'failed'; } return undefined; } get time() { return this.testDone !== undefined ? this.testDone.time - this.testStart.time : 0; } } class DartJsonParser { constructor(options, sdk) { this.options = options; this.sdk = sdk; } parse(path, content) { return __awaiter(this, void 0, void 0, function* () { const tr = this.getTestRun(path, content); const result = this.getTestRunResult(tr); return Promise.resolve(result); }); } getTestRun(path, content) { const lines = content.split(/\n\r?/g); const events = lines .map((str, i) => { if (str.trim() === '') { return null; } try { return JSON.parse(str); } catch (e) { const errWithCol = e; const col = errWithCol.columnNumber !== undefined ? `:${errWithCol.columnNumber}` : ''; throw new Error(`Invalid JSON at ${path}:${i + 1}${col}\n\n${e}`); } }) .filter(evt => evt != null); let success = false; let totalTime = 0; const suites = {}; const tests = {}; for (const evt of events) { if ((0, dart_json_types_1.isSuiteEvent)(evt)) { suites[evt.suite.id] = new TestSuite(evt.suite); } else if ((0, dart_json_types_1.isGroupEvent)(evt)) { suites[evt.group.suiteID].groups[evt.group.id] = new TestGroup(evt.group); } else if ((0, dart_json_types_1.isTestStartEvent)(evt) && evt.test.url !== null) { const test = new TestCase(evt); const suite = suites[evt.test.suiteID]; const group = suite.groups[evt.test.groupIDs[evt.test.groupIDs.length - 1]]; group.tests.push(test); tests[evt.test.id] = test; } else if ((0, dart_json_types_1.isTestDoneEvent)(evt) && !evt.hidden && tests[evt.testID]) { tests[evt.testID].testDone = evt; } else if ((0, dart_json_types_1.isErrorEvent)(evt) && tests[evt.testID]) { tests[evt.testID].error = evt; } else if ((0, dart_json_types_1.isMessageEvent)(evt) && tests[evt.testID]) { tests[evt.testID].print.push(evt); } else if ((0, dart_json_types_1.isDoneEvent)(evt)) { success = evt.success; totalTime = evt.time; } } return new TestRun(path, Object.values(suites), success, totalTime); } getTestRunResult(tr) { const suites = tr.suites.map(s => { return new test_results_1.TestSuiteResult(this.getRelativePath(s.suite.path), this.getGroups(s)); }); return new test_results_1.TestRunResult(tr.path, suites, tr.time); } getGroups(suite) { const groups = Object.values(suite.groups).filter(grp => grp.tests.length > 0); groups.sort((a, b) => { var _a, _b; return ((_a = a.group.line) !== null && _a !== void 0 ? _a : 0) - ((_b = b.group.line) !== null && _b !== void 0 ? _b : 0); }); return groups.map(group => { group.tests.sort((a, b) => { var _a, _b; return ((_a = a.testStart.test.line) !== null && _a !== void 0 ? _a : 0) - ((_b = b.testStart.test.line) !== null && _b !== void 0 ? _b : 0); }); const tests = group.tests.map(tc => { const error = this.getError(suite, tc); const testName = group.group.name !== undefined && tc.testStart.test.name.startsWith(group.group.name) ? tc.testStart.test.name.slice(group.group.name.length).trim() : tc.testStart.test.name.trim(); return new test_results_1.TestCaseResult(testName, tc.result, tc.time, error); }); return new test_results_1.TestGroupResult(group.group.name, tests); }); } getError(testSuite, test) { var _a, _b, _c, _d, _e, _f; if (!this.options.parseErrors || !test.error) { return undefined; } const { trackedFiles } = this.options; const stackTrace = (_b = (_a = test.error) === null || _a === void 0 ? void 0 : _a.stackTrace) !== null && _b !== void 0 ? _b : ''; const print = test.print .filter(p => p.messageType === 'print') .map(p => p.message) .join('\n'); const details = [print, stackTrace].filter(str => str !== '').join('\n'); const src = this.exceptionThrowSource(details, trackedFiles); const message = this.getErrorMessage((_d = (_c = test.error) === null || _c === void 0 ? void 0 : _c.error) !== null && _d !== void 0 ? _d : '', print); let path; let line; if (src !== undefined) { path = src.path; line = src.line; } else { const testStartPath = this.getRelativePath(testSuite.suite.path); if (trackedFiles.includes(testStartPath)) { path = testStartPath; line = (_f = (_e = test.testStart.test.root_line) !== null && _e !== void 0 ? _e : test.testStart.test.line) !== null && _f !== void 0 ? _f : undefined; } } return { path, line, message, details }; } getErrorMessage(message, print) { if (this.sdk === 'flutter') { const uselessMessageRe = /^Test failed\. See exception logs above\.\nThe test description was:/m; const flutterPrintRe = /^══╡ EXCEPTION CAUGHT BY FLUTTER TEST FRAMEWORK ╞═+\s+(.*)\s+When the exception was thrown, this was the stack:/ms; if (uselessMessageRe.test(message)) { const match = print.match(flutterPrintRe); if (match !== null) { return match[1]; } } } return message || print; } exceptionThrowSource(ex, trackedFiles) { const lines = ex.split(/\r?\n/g); // regexp to extract file path and line number from stack trace const dartRe = /^(?!package:)(.*)\s+(\d+):\d+\s+/; const flutterRe = /^#\d+\s+.*\((?!package:)(.*):(\d+):\d+\)$/; const re = this.sdk === 'dart' ? dartRe : flutterRe; for (const str of lines) { const match = str.match(re); if (match !== null) { const [_, pathStr, lineStr] = match; const path = (0, path_utils_1.normalizeFilePath)(this.getRelativePath(pathStr)); if (trackedFiles.includes(path)) { const line = parseInt(lineStr); return { path, line }; } } } } getRelativePath(path) { const prefix = 'file://'; if (path.startsWith(prefix)) { path = path.substr(prefix.length); } path = (0, path_utils_1.normalizeFilePath)(path); const workDir = this.getWorkDir(path); if (workDir !== undefined && path.startsWith(workDir)) { path = path.substr(workDir.length); } return path; } getWorkDir(path) { var _a, _b; return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = (0, path_utils_1.getBasePath)(path, this.options.trackedFiles))); } } exports.DartJsonParser = DartJsonParser; /***/ }), /***/ 7887: /***/ ((__unused_webpack_module, exports) => { "use strict"; /// reflects documentation at https://github.com/dart-lang/test/blob/master/pkgs/test/doc/json_reporter.md Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isMessageEvent = exports.isDoneEvent = exports.isErrorEvent = exports.isTestDoneEvent = exports.isTestStartEvent = exports.isGroupEvent = exports.isSuiteEvent = void 0; function isSuiteEvent(event) { return event.type === 'suite'; } exports.isSuiteEvent = isSuiteEvent; function isGroupEvent(event) { return event.type === 'group'; } exports.isGroupEvent = isGroupEvent; function isTestStartEvent(event) { return event.type === 'testStart'; } exports.isTestStartEvent = isTestStartEvent; function isTestDoneEvent(event) { return event.type === 'testDone'; } exports.isTestDoneEvent = isTestDoneEvent; function isErrorEvent(event) { return event.type === 'error'; } exports.isErrorEvent = isErrorEvent; function isDoneEvent(event) { return event.type === 'done'; } exports.isDoneEvent = isDoneEvent; function isMessageEvent(event) { return event.type === 'print'; } exports.isMessageEvent = isMessageEvent; /***/ }), /***/ 2664: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DotnetTrxParser = void 0; const xml2js_1 = __nccwpck_require__(6189); const path_utils_1 = __nccwpck_require__(4070); const parse_utils_1 = __nccwpck_require__(7811); const test_results_1 = __nccwpck_require__(2768); class TestClass { constructor(name) { this.name = name; this.tests = []; } } class Test { constructor(name, outcome, duration, error) { this.name = name; this.outcome = outcome; this.duration = duration; this.error = error; } get result() { switch (this.outcome) { case 'Passed': return 'success'; case 'NotExecuted': return 'skipped'; case 'Failed': return 'failed'; } } } class DotnetTrxParser { constructor(options) { this.options = options; } parse(path, content) { return __awaiter(this, void 0, void 0, function* () { const trx = yield this.getTrxReport(path, content); const tc = this.getTestClasses(trx); const tr = this.getTestRunResult(path, trx, tc); tr.sort(true); return tr; }); } getTrxReport(path, content) { return __awaiter(this, void 0, void 0, function* () { try { return (yield (0, xml2js_1.parseStringPromise)(content)); } catch (e) { throw new Error(`Invalid XML at ${path}\n\n${e}`); } }); } getTestClasses(trx) { if (trx.TestRun.TestDefinitions === undefined || trx.TestRun.Results === undefined) { return []; } const unitTests = {}; for (const td of trx.TestRun.TestDefinitions) { for (const ut of td.UnitTest) { unitTests[ut.$.id] = ut; } } const unitTestsResults = trx.TestRun.Results.flatMap(r => r.UnitTestResult).flatMap(result => ({ result, test: unitTests[result.$.testId] })); const testClasses = {}; for (const r of unitTestsResults) { const className = r.test.TestMethod[0].$.className; let tc = testClasses[className]; if (tc === undefined) { tc = new TestClass(className); testClasses[tc.name] = tc; } const error = this.getErrorInfo(r.result); const durationAttr = r.result.$.duration; const duration = durationAttr ? (0, parse_utils_1.parseNetDuration)(durationAttr) : 0; const resultTestName = r.result.$.testName; const testName = resultTestName.startsWith(className) && resultTestName[className.length] === '.' ? resultTestName.substr(className.length + 1) : resultTestName; const test = new Test(testName, r.result.$.outcome, duration, error); tc.tests.push(test); } const result = Object.values(testClasses); return result; } getTestRunResult(path, trx, testClasses) { const times = trx.TestRun.Times[0].$; const totalTime = (0, parse_utils_1.parseIsoDate)(times.finish).getTime() - (0, parse_utils_1.parseIsoDate)(times.start).getTime(); const suites = testClasses.map(testClass => { const tests = testClass.tests.map(test => { const error = this.getError(test); return new test_results_1.TestCaseResult(test.name, test.result, test.duration, error); }); const group = new test_results_1.TestGroupResult(null, tests); return new test_results_1.TestSuiteResult(testClass.name, [group]); }); return new test_results_1.TestRunResult(path, suites, totalTime); } getErrorInfo(testResult) { var _a; if (testResult.$.outcome !== 'Failed') { return undefined; } const output = testResult.Output; const error = (output === null || output === void 0 ? void 0 : output.length) > 0 && ((_a = output[0].ErrorInfo) === null || _a === void 0 ? void 0 : _a.length) > 0 ? output[0].ErrorInfo[0] : undefined; return error; } getError(test) { if (!this.options.parseErrors || !test.error) { return undefined; } const error = test.error; if (!Array.isArray(error.Message) || error.Message.length === 0 || !Array.isArray(error.StackTrace) || error.StackTrace.length === 0) { return undefined; } const message = test.error.Message[0]; const stackTrace = test.error.StackTrace[0]; let path; let line; const src = this.exceptionThrowSource(stackTrace); if (src) { path = src.path; line = src.line; } return { path, line, message, details: `${message}\n${stackTrace}` }; } exceptionThrowSource(stackTrace) { const lines = stackTrace.split(/\r*\n/); const re = / in (.+):line (\d+)$/; const { trackedFiles } = this.options; for (const str of lines) { const match = str.match(re); if (match !== null) { const [_, fileStr, lineStr] = match; const filePath = (0, path_utils_1.normalizeFilePath)(fileStr); const workDir = this.getWorkDir(filePath); if (workDir) { const file = filePath.substr(workDir.length); if (trackedFiles.includes(file)) { const line = parseInt(lineStr); return { path: file, line }; } } } } } getWorkDir(path) { var _a, _b; return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = (0, path_utils_1.getBasePath)(path, this.options.trackedFiles))); } } exports.DotnetTrxParser = DotnetTrxParser; /***/ }), /***/ 676: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.JavaJunitParser = void 0; const path = __importStar(__nccwpck_require__(1017)); const xml2js_1 = __nccwpck_require__(6189); const java_stack_trace_element_parser_1 = __nccwpck_require__(5775); const path_utils_1 = __nccwpck_require__(4070); const test_results_1 = __nccwpck_require__(2768); class JavaJunitParser { constructor(options) { var _a; this.options = options; // Map to efficient lookup of all paths with given file name this.trackedFiles = {}; for (const filePath of options.trackedFiles) { const fileName = path.basename(filePath); const files = (_a = this.trackedFiles[fileName]) !== null && _a !== void 0 ? _a : (this.trackedFiles[fileName] = []); files.push((0, path_utils_1.normalizeFilePath)(filePath)); } } parse(filePath, content) { return __awaiter(this, void 0, void 0, function* () { const reportOrSuite = yield this.getJunitReport(filePath, content); const isReport = reportOrSuite.testsuites !== undefined; // XML might contain: // - multiple suites under root node // - single as root node let ju; if (isReport) { ju = reportOrSuite; } else { // Make it behave the same way as if suite was inside root node const suite = reportOrSuite.testsuite; ju = { testsuites: { $: { time: suite.$.time }, testsuite: [suite] } }; } return this.getTestRunResult(filePath, ju); }); } getJunitReport(filePath, content) { return __awaiter(this, void 0, void 0, function* () { try { return yield (0, xml2js_1.parseStringPromise)(content); } catch (e) { throw new Error(`Invalid XML at ${filePath}\n\n${e}`); } }); } getTestRunResult(filePath, junit) { var _a; const suites = junit.testsuites.testsuite === undefined ? [] : junit.testsuites.testsuite.map(ts => { const name = ts.$.name.trim(); const time = parseFloat(ts.$.time) * 1000; const sr = new test_results_1.TestSuiteResult(name, this.getGroups(ts), time); return sr; }); const seconds = parseFloat((_a = junit.testsuites.$) === null || _a === void 0 ? void 0 : _a.time); const time = isNaN(seconds) ? undefined : seconds * 1000; return new test_results_1.TestRunResult(filePath, suites, time); } getGroups(suite) { if (suite.testcase === undefined) { return []; } const groups = []; for (const tc of suite.testcase) { // Normally classname is same as suite name - both refer to same Java class // Therefore it doesn't make sense to process it as a group // and tests will be added to default group with empty name const className = tc.$.classname === suite.$.name ? '' : tc.$.classname; let grp = groups.find(g => g.name === className); if (grp === undefined) { grp = { name: className, tests: [] }; groups.push(grp); } grp.tests.push(tc); } return groups.map(grp => { const tests = grp.tests.map(tc => { const name = tc.$.name.trim(); const result = this.getTestCaseResult(tc); const time = parseFloat(tc.$.time) * 1000; const error = this.getTestCaseError(tc); return new test_results_1.TestCaseResult(name, result, time, error); }); return new test_results_1.TestGroupResult(grp.name, tests); }); } getTestCaseResult(test) { if (test.failure || test.error) return 'failed'; if (test.skipped) return 'skipped'; return 'success'; } getTestCaseError(tc) { var _a; if (!this.options.parseErrors) { return undefined; } // We process and the same way const failures = (_a = tc.failure) !== null && _a !== void 0 ? _a : tc.error; if (!failures) { return undefined; } const failure = failures[0]; const details = typeof failure === 'object' ? failure._ : failure; let filePath; let line; if (details != null) { const src = this.exceptionThrowSource(details); if (src) { filePath = src.filePath; line = src.line; } } return { path: filePath, line, details, message: typeof failure === 'object' ? failure.message : undefined }; } exceptionThrowSource(stackTrace) { const lines = stackTrace.split(/\r?\n/); for (const str of lines) { const stackTraceElement = (0, java_stack_trace_element_parser_1.parseStackTraceElement)(str); if (stackTraceElement) { const { tracePath, fileName, lineStr } = stackTraceElement; const filePath = this.getFilePath(tracePath, fileName); if (filePath !== undefined) { const line = parseInt(lineStr); return { filePath, line }; } } } } // Stacktrace in Java doesn't contain full paths to source file. // There are only package, file name and line. // Assuming folder structure matches package name (as it should in Java), // we can try to match tracked file. getFilePath(tracePath, fileName) { // Check if there is any tracked file with given name const files = this.trackedFiles[fileName]; if (files === undefined) { return undefined; } // Remove class name and method name from trace. // Take parts until first item with capital letter - package names are lowercase while class name is CamelCase. const packageParts = tracePath.split(/\./g); const packageIndex = packageParts.findIndex(part => part[0] <= 'Z'); if (packageIndex !== -1) { packageParts.splice(packageIndex, packageParts.length - packageIndex); } if (packageParts.length === 0) { return undefined; } // Get right file // - file name matches // - parent folders structure must reflect the package name for (const filePath of files) { const dirs = path.dirname(filePath).split(/\//g); if (packageParts.length > dirs.length) { continue; } // get only N parent folders, where N = length of package name parts if (dirs.length > packageParts.length) { dirs.splice(0, dirs.length - packageParts.length); } // check if parent folder structure matches package name const isMatch = packageParts.every((part, i) => part === dirs[i]); if (isMatch) { return filePath; } } return undefined; } } exports.JavaJunitParser = JavaJunitParser; /***/ }), /***/ 5775: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.parseStackTraceElement = void 0; // classloader and module name are optional: // at //(:) // https://github.com/eclipse-openj9/openj9/issues/11452#issuecomment-754946992 const re = /^\s*at (\S+\/\S*\/)?(.*)\((.*):(\d+)\)$/; function parseStackTraceElement(stackTraceLine) { const match = stackTraceLine.match(re); if (match !== null) { const [_, maybeClassLoaderAndModuleNameAndVersion, tracePath, fileName, lineStr] = match; const { classLoader, moduleNameAndVersion } = parseClassLoaderAndModule(maybeClassLoaderAndModuleNameAndVersion); return { classLoader, moduleNameAndVersion, tracePath, fileName, lineStr }; } return undefined; } exports.parseStackTraceElement = parseStackTraceElement; function parseClassLoaderAndModule(maybeClassLoaderAndModuleNameAndVersion) { if (maybeClassLoaderAndModuleNameAndVersion) { const res = maybeClassLoaderAndModuleNameAndVersion.split('/'); const classLoader = res[0]; let moduleNameAndVersion = res[1]; if (moduleNameAndVersion === '') { moduleNameAndVersion = undefined; } return { classLoader, moduleNameAndVersion }; } return { classLoader: undefined, moduleNameAndVersion: undefined }; } /***/ }), /***/ 1113: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.JestJunitParser = void 0; const xml2js_1 = __nccwpck_require__(6189); const node_utils_1 = __nccwpck_require__(5824); const path_utils_1 = __nccwpck_require__(4070); const test_results_1 = __nccwpck_require__(2768); class JestJunitParser { constructor(options) { this.options = options; } parse(path, content) { return __awaiter(this, void 0, void 0, function* () { const ju = yield this.getJunitReport(path, content); return this.getTestRunResult(path, ju); }); } getJunitReport(path, content) { return __awaiter(this, void 0, void 0, function* () { try { return (yield (0, xml2js_1.parseStringPromise)(content)); } catch (e) { throw new Error(`Invalid XML at ${path}\n\n${e}`); } }); } getTestRunResult(path, junit) { const suites = junit.testsuites.testsuite === undefined ? [] : junit.testsuites.testsuite.map(ts => { const name = this.escapeCharacters(ts.$.name.trim()); const time = parseFloat(ts.$.time) * 1000; const sr = new test_results_1.TestSuiteResult(name, this.getGroups(ts), time); return sr; }); const time = parseFloat(junit.testsuites.$.time) * 1000; return new test_results_1.TestRunResult(path, suites, time); } getGroups(suite) { if (!suite.testcase) { return []; } const groups = []; for (const tc of suite.testcase) { let grp = groups.find(g => g.describe === tc.$.classname); if (grp === undefined) { grp = { describe: tc.$.classname, tests: [] }; groups.push(grp); } grp.tests.push(tc); } return groups.map(grp => { const tests = grp.tests.map(tc => { const name = tc.$.name.trim(); const result = this.getTestCaseResult(tc); const time = parseFloat(tc.$.time) * 1000; const error = this.getTestCaseError(tc); return new test_results_1.TestCaseResult(name, result, time, error); }); return new test_results_1.TestGroupResult(grp.describe, tests); }); } getTestCaseResult(test) { if (test.failure) return 'failed'; if (test.skipped) return 'skipped'; return 'success'; } getTestCaseError(tc) { if (!this.options.parseErrors || !tc.failure) { return undefined; } const details = tc.failure[0]; let path; let line; const src = (0, node_utils_1.getExceptionSource)(details, this.options.trackedFiles, file => this.getRelativePath(file)); if (src) { path = src.path; line = src.line; } return { path, line, details }; } getRelativePath(path) { path = (0, path_utils_1.normalizeFilePath)(path); const workDir = this.getWorkDir(path); if (workDir !== undefined && path.startsWith(workDir)) { path = path.substr(workDir.length); } return path; } getWorkDir(path) { var _a, _b; return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = (0, path_utils_1.getBasePath)(path, this.options.trackedFiles))); } escapeCharacters(s) { return s.replace(/([<>])/g, '\\$1'); } } exports.JestJunitParser = JestJunitParser; /***/ }), /***/ 6043: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.MochaJsonParser = void 0; const test_results_1 = __nccwpck_require__(2768); const node_utils_1 = __nccwpck_require__(5824); const path_utils_1 = __nccwpck_require__(4070); class MochaJsonParser { constructor(options) { this.options = options; } parse(path, content) { return __awaiter(this, void 0, void 0, function* () { const mocha = this.getMochaJson(path, content); const result = this.getTestRunResult(path, mocha); result.sort(true); return Promise.resolve(result); }); } getMochaJson(path, content) { try { return JSON.parse(content); } catch (e) { throw new Error(`Invalid JSON at ${path}\n\n${e}`); } } getTestRunResult(resultsPath, mocha) { const suitesMap = {}; const getSuite = (test) => { var _a; const path = this.getRelativePath(test.file); return (_a = suitesMap[path]) !== null && _a !== void 0 ? _a : (suitesMap[path] = new test_results_1.TestSuiteResult(path, [])); }; for (const test of mocha.passes) { const suite = getSuite(test); this.processTest(suite, test, 'success'); } for (const test of mocha.failures) { const suite = getSuite(test); this.processTest(suite, test, 'failed'); } for (const test of mocha.pending) { const suite = getSuite(test); this.processTest(suite, test, 'skipped'); } const suites = Object.values(suitesMap); return new test_results_1.TestRunResult(resultsPath, suites, mocha.stats.duration); } processTest(suite, test, result) { var _a; const groupName = test.fullTitle !== test.title ? test.fullTitle.substr(0, test.fullTitle.length - test.title.length).trimEnd() : null; let group = suite.groups.find(grp => grp.name === groupName); if (group === undefined) { group = new test_results_1.TestGroupResult(groupName, []); suite.groups.push(group); } const error = this.getTestCaseError(test); const testCase = new test_results_1.TestCaseResult(test.title, result, (_a = test.duration) !== null && _a !== void 0 ? _a : 0, error); group.tests.push(testCase); } getTestCaseError(test) { const details = test.err.stack; const message = test.err.message; if (details === undefined) { return undefined; } let path; let line; const src = (0, node_utils_1.getExceptionSource)(details, this.options.trackedFiles, file => this.getRelativePath(file)); if (src) { path = src.path; line = src.line; } return { path, line, message, details }; } getRelativePath(path) { path = (0, path_utils_1.normalizeFilePath)(path); const workDir = this.getWorkDir(path); if (workDir !== undefined && path.startsWith(workDir)) { path = path.substr(workDir.length); } return path; } getWorkDir(path) { var _a, _b; return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = (0, path_utils_1.getBasePath)(path, this.options.trackedFiles))); } } exports.MochaJsonParser = MochaJsonParser; /***/ }), /***/ 5867: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getAnnotations = void 0; const markdown_utils_1 = __nccwpck_require__(6482); const parse_utils_1 = __nccwpck_require__(7811); function getAnnotations(results, maxCount) { var _a, _b, _c, _d; if (maxCount === 0) { return []; } // Collect errors from TestRunResults // Merge duplicates if there are more test results files processed const errors = []; const mergeDup = results.length > 1; for (const tr of results) { for (const ts of tr.suites) { for (const tg of ts.groups) { for (const tc of tg.tests) { const err = tc.error; if (err === undefined) { continue; } const path = (_a = err.path) !== null && _a !== void 0 ? _a : tr.path; const line = (_b = err.line) !== null && _b !== void 0 ? _b : 0; if (mergeDup) { const dup = errors.find(e => path === e.path && line === e.line && err.details === e.details); if (dup !== undefined) { dup.testRunPaths.push(tr.path); continue; } } errors.push({ testRunPaths: [tr.path], suiteName: ts.name, testName: tg.name ? `${tg.name} ► ${tc.name}` : tc.name, details: err.details, message: (_d = (_c = err.message) !== null && _c !== void 0 ? _c : (0, parse_utils_1.getFirstNonEmptyLine)(err.details)) !== null && _d !== void 0 ? _d : 'Test failed', path, line }); } } } } // Limit number of created annotations errors.splice(maxCount + 1); const annotations = errors.map(e => { const message = [ 'Failed test found in:', e.testRunPaths.map(p => ` ${p}`).join('\n'), 'Error:', ident((0, markdown_utils_1.fixEol)(e.message), ' ') ].join('\n'); return enforceCheckRunLimits({ path: e.path, start_line: e.line, end_line: e.line, annotation_level: 'failure', title: `${e.suiteName} ► ${e.testName}`, raw_details: (0, markdown_utils_1.fixEol)(e.details), message }); }); return annotations; } exports.getAnnotations = getAnnotations; function enforceCheckRunLimits(err) { err.title = (0, markdown_utils_1.ellipsis)(err.title || '', 255); err.message = (0, markdown_utils_1.ellipsis)(err.message, 65535); if (err.raw_details) { err.raw_details = (0, markdown_utils_1.ellipsis)(err.raw_details, 65535); } return err; } function ident(text, prefix) { return text .split(/\n/g) .map(line => prefix + line) .join('\n'); } /***/ }), /***/ 3737: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getReport = void 0; const core = __importStar(__nccwpck_require__(2186)); const node_utils_1 = __nccwpck_require__(5824); const markdown_utils_1 = __nccwpck_require__(6482); const parse_utils_1 = __nccwpck_require__(7811); const slugger_1 = __nccwpck_require__(3328); const MAX_REPORT_LENGTH = 65535; const defaultOptions = { listSuites: 'all', listTests: 'all', baseUrl: '', onlySummary: false }; function getReport(results, options = defaultOptions) { core.info('Generating check run summary'); applySort(results); const opts = Object.assign({}, options); let lines = renderReport(results, opts); let report = lines.join('\n'); if (getByteLength(report) <= MAX_REPORT_LENGTH) { return report; } if (opts.listTests === 'all') { core.info("Test report summary is too big - setting 'listTests' to 'failed'"); opts.listTests = 'failed'; lines = renderReport(results, opts); report = lines.join('\n'); if (getByteLength(report) <= MAX_REPORT_LENGTH) { return report; } } if (opts.listSuites === 'all') { core.info("Test report summary is too big - setting 'listSuites' to 'failed'"); opts.listSuites = 'failed'; lines = renderReport(results, opts); report = lines.join('\n'); if (getByteLength(report) <= MAX_REPORT_LENGTH) { return report; } } core.warning(`Test report summary exceeded limit of ${MAX_REPORT_LENGTH} bytes and will be trimmed`); return trimReport(lines); } exports.getReport = getReport; function trimReport(lines) { const closingBlock = '```'; const errorMsg = `**Report exceeded GitHub limit of ${MAX_REPORT_LENGTH} bytes and has been trimmed**`; const maxErrorMsgLength = closingBlock.length + errorMsg.length + 2; const maxReportLength = MAX_REPORT_LENGTH - maxErrorMsgLength; let reportLength = 0; let codeBlock = false; let endLineIndex = 0; for (endLineIndex = 0; endLineIndex < lines.length; endLineIndex++) { const line = lines[endLineIndex]; const lineLength = getByteLength(line); reportLength += lineLength + 1; if (reportLength > maxReportLength) { break; } if (line === '```') { codeBlock = !codeBlock; } } const reportLines = lines.slice(0, endLineIndex); if (codeBlock) { reportLines.push('```'); } reportLines.push(errorMsg); return reportLines.join('\n'); } function applySort(results) { results.sort((a, b) => a.path.localeCompare(b.path, node_utils_1.DEFAULT_LOCALE)); for (const res of results) { res.suites.sort((a, b) => a.name.localeCompare(b.name, node_utils_1.DEFAULT_LOCALE)); } } function getByteLength(text) { return Buffer.byteLength(text, 'utf8'); } function renderReport(results, options) { const sections = []; const badge = getReportBadge(results); sections.push(badge); const runs = getTestRunsReport(results, options); sections.push(...runs); return sections; } function getReportBadge(results) { const passed = results.reduce((sum, tr) => sum + tr.passed, 0); const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0); const failed = results.reduce((sum, tr) => sum + tr.failed, 0); return getBadge(passed, failed, skipped); } function getBadge(passed, failed, skipped) { const text = []; if (passed > 0) { text.push(`${passed} passed`); } if (failed > 0) { text.push(`${failed} failed`); } if (skipped > 0) { text.push(`${skipped} skipped`); } const message = text.length > 0 ? text.join(', ') : 'none'; let color = 'success'; if (failed > 0) { color = 'critical'; } else if (passed === 0 && failed === 0) { color = 'yellow'; } const hint = failed > 0 ? 'Tests failed' : 'Tests passed successfully'; const uri = encodeURIComponent(`tests-${message}-${color}`); return `![${hint}](https://img.shields.io/badge/${uri})`; } function getTestRunsReport(testRuns, options) { const sections = []; if (testRuns.length > 1 || options.onlySummary) { const tableData = testRuns.map((tr, runIndex) => { const time = (0, markdown_utils_1.formatTime)(tr.time); const folder = tr.path.indexOf('/TestResults/'); const name = folder > 0 ? tr.path.slice(0, folder) : tr.path; const addr = options.baseUrl + makeRunSlug(runIndex).link; const nameLink = (0, markdown_utils_1.link)(name, addr); const passed = tr.passed > 0 ? `${tr.passed}${markdown_utils_1.Icon.success}` : ''; const failed = tr.failed > 0 ? `${tr.failed}${markdown_utils_1.Icon.fail}` : ''; const skipped = tr.skipped > 0 ? `${tr.skipped}${markdown_utils_1.Icon.skip}` : ''; return [nameLink, passed, failed, skipped, time]; }); const resultsTable = (0, markdown_utils_1.table)(['Report', 'Passed', 'Failed', 'Skipped', 'Time'], [markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...tableData); sections.push(resultsTable); } if (options.onlySummary === false) { const suitesReports = testRuns.map((tr, i) => getSuitesReport(tr, i, options)).flat(); sections.push(...suitesReports); } return sections; } function getSuitesReport(tr, runIndex, options) { const sections = []; const trSlug = makeRunSlug(runIndex); const folder = tr.path.indexOf('/TestResults/'); const name = folder > 0 ? tr.path.slice(0, folder) : tr.path; const nameLink = `${name}`; const icon = getResultIcon(tr.result); sections.push(`## ${icon}\xa0${nameLink}`); const time = (0, markdown_utils_1.formatTime)(tr.time); const headingLine2 = tr.tests > 0 ? `**${tr.tests}** tests were completed in **${time}** with **${tr.passed}** passed, **${tr.failed}** failed and **${tr.skipped}** skipped.` : 'No tests found'; sections.push(headingLine2); const suites = options.listSuites === 'failed' ? tr.failedSuites : tr.suites; if (suites.length > 0) { const suitesTable = (0, markdown_utils_1.table)(['Test suite', 'Passed', 'Failed', 'Skipped', 'Time'], [markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...suites.map((s, suiteIndex) => { const tsTime = (0, markdown_utils_1.formatTime)(s.time); const tsName = s.name.startsWith(name) ? s.name.slice(name.length + 1) : s.name; const skipLink = options.listTests === 'none' || (options.listTests === 'failed' && s.result !== 'failed'); const tsAddr = options.baseUrl + makeSuiteSlug(runIndex, suiteIndex).link; const tsNameLink = skipLink ? tsName : (0, markdown_utils_1.link)(tsName, tsAddr); const passed = s.passed > 0 ? `${s.passed}${markdown_utils_1.Icon.success}` : ''; const failed = s.failed > 0 ? `${s.failed}${markdown_utils_1.Icon.fail}` : ''; const skipped = s.skipped > 0 ? `${s.skipped}${markdown_utils_1.Icon.skip}` : ''; return [tsNameLink, passed, failed, skipped, tsTime]; })); sections.push(suitesTable); } if (options.listTests !== 'none') { const tests = suites.map((ts, suiteIndex) => getTestsReport(ts, runIndex, suiteIndex, options)).flat(); if (tests.length > 1) { sections.push(...tests); } } return sections; } function getTestsReport(ts, runIndex, suiteIndex, options) { var _a, _b, _c; if (options.listTests === 'failed' && ts.result !== 'failed') { return []; } const groups = ts.groups; if (groups.length === 0) { return []; } const sections = []; const tsName = ts.name; const tsSlug = makeSuiteSlug(runIndex, suiteIndex); const tsNameLink = `${tsName}`; const icon = getResultIcon(ts.result); sections.push(`### ${icon}\xa0${tsNameLink}`); sections.push('```'); for (const grp of groups) { if (grp.name) { sections.push(grp.name); } const space = grp.name ? ' ' : ''; for (const tc of grp.tests) { const result = getResultIcon(tc.result); sections.push(`${space}${result} ${tc.name}`); if (tc.error) { const lines = (_c = ((_a = tc.error.message) !== null && _a !== void 0 ? _a : (_b = (0, parse_utils_1.getFirstNonEmptyLine)(tc.error.details)) === null || _b === void 0 ? void 0 : _b.trim())) === null || _c === void 0 ? void 0 : _c.split(/\r?\n/g).map(l => '\t' + l); if (lines) { sections.push(...lines); } } } } sections.push('```'); return sections; } function makeRunSlug(runIndex) { // use prefix to avoid slug conflicts after escaping the paths return (0, slugger_1.slug)(`r${runIndex}`); } function makeSuiteSlug(runIndex, suiteIndex) { // use prefix to avoid slug conflicts after escaping the paths return (0, slugger_1.slug)(`r${runIndex}s${suiteIndex}`); } function getResultIcon(result) { switch (result) { case 'success': return markdown_utils_1.Icon.success; case 'skipped': return markdown_utils_1.Icon.skip; case 'failed': return markdown_utils_1.Icon.fail; default: return ''; } } /***/ }), /***/ 2768: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.TestCaseResult = exports.TestGroupResult = exports.TestSuiteResult = exports.TestRunResult = exports.TestRunResultWithUrl = void 0; const node_utils_1 = __nccwpck_require__(5824); class TestRunResultWithUrl { constructor(results, checkUrl) { this.results = results; this.checkUrl = checkUrl; } get hasCheck() { return !!this.checkUrl; } get shouldFail() { return !this.hasCheck && this.results.some(r => r.isFailed); } get passed() { return this.results.reduce((sum, g) => sum + g.passed, 0); } get failed() { return this.results.reduce((sum, g) => sum + g.failed, 0); } get skipped() { return this.results.reduce((sum, g) => sum + g.skipped, 0); } get time() { return this.results.reduce((sum, g) => sum + g.time, 0); } } exports.TestRunResultWithUrl = TestRunResultWithUrl; class TestRunResult { constructor(path, suites, totalTime) { this.path = path; this.suites = suites; this.totalTime = totalTime; } get tests() { return this.suites.reduce((sum, g) => sum + g.tests, 0); } get passed() { return this.suites.reduce((sum, g) => sum + g.passed, 0); } get failed() { return this.suites.reduce((sum, g) => sum + g.failed, 0); } get skipped() { return this.suites.reduce((sum, g) => sum + g.skipped, 0); } get time() { var _a; return (_a = this.totalTime) !== null && _a !== void 0 ? _a : this.suites.reduce((sum, g) => sum + g.time, 0); } get isFailed() { return this.suites.some(t => t.result === 'failed'); } get result() { return this.isFailed ? 'failed' : 'success'; } get failedSuites() { return this.suites.filter(s => s.result === 'failed'); } sort(deep) { this.suites.sort((a, b) => a.name.localeCompare(b.name, node_utils_1.DEFAULT_LOCALE)); if (deep) { for (const suite of this.suites) { suite.sort(deep); } } } } exports.TestRunResult = TestRunResult; class TestSuiteResult { constructor(name, groups, totalTime) { this.name = name; this.groups = groups; this.totalTime = totalTime; } get tests() { return this.groups.reduce((sum, g) => sum + g.tests.length, 0); } get passed() { return this.groups.reduce((sum, g) => sum + g.passed, 0); } get failed() { return this.groups.reduce((sum, g) => sum + g.failed, 0); } get skipped() { return this.groups.reduce((sum, g) => sum + g.skipped, 0); } get time() { var _a; return (_a = this.totalTime) !== null && _a !== void 0 ? _a : this.groups.reduce((sum, g) => sum + g.time, 0); } get result() { return this.groups.some(t => t.result === 'failed') ? 'failed' : 'success'; } get failedGroups() { return this.groups.filter(grp => grp.result === 'failed'); } sort(deep) { this.groups.sort((a, b) => { var _a, _b; return ((_a = a.name) !== null && _a !== void 0 ? _a : '').localeCompare((_b = b.name) !== null && _b !== void 0 ? _b : '', node_utils_1.DEFAULT_LOCALE); }); if (deep) { for (const grp of this.groups) { grp.sort(); } } } } exports.TestSuiteResult = TestSuiteResult; class TestGroupResult { constructor(name, tests) { this.name = name; this.tests = tests; } get passed() { return this.tests.reduce((sum, t) => (t.result === 'success' ? sum + 1 : sum), 0); } get failed() { return this.tests.reduce((sum, t) => (t.result === 'failed' ? sum + 1 : sum), 0); } get skipped() { return this.tests.reduce((sum, t) => (t.result === 'skipped' ? sum + 1 : sum), 0); } get time() { return this.tests.reduce((sum, t) => sum + t.time, 0); } get result() { return this.tests.some(t => t.result === 'failed') ? 'failed' : 'success'; } get failedTests() { return this.tests.filter(tc => tc.result === 'failed'); } sort() { this.tests.sort((a, b) => a.name.localeCompare(b.name, node_utils_1.DEFAULT_LOCALE)); } } exports.TestGroupResult = TestGroupResult; class TestCaseResult { constructor(name, result, time, error) { this.name = name; this.result = result; this.time = time; this.error = error; } } exports.TestCaseResult = TestCaseResult; /***/ }), /***/ 9844: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.listFiles = void 0; const core = __importStar(__nccwpck_require__(2186)); const exec_1 = __nccwpck_require__(1514); function listFiles() { return __awaiter(this, void 0, void 0, function* () { core.startGroup('Listing all files tracked by git'); let output = ''; try { output = (yield (0, exec_1.getExecOutput)('git', ['ls-files', '-z'])).stdout; } finally { fixStdOutNullTermination(); core.endGroup(); } return output.split('\u0000').filter(s => s.length > 0); }); } exports.listFiles = listFiles; function fixStdOutNullTermination() { // Previous command uses NULL as delimiters and output is printed to stdout. // We have to make sure next thing written to stdout will start on new line. // Otherwise things like ::set-output wouldn't work. core.info(''); } /***/ }), /***/ 3522: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.listFiles = exports.downloadArtifact = exports.getCheckRunContext = void 0; const fs_1 = __nccwpck_require__(7147); const core = __importStar(__nccwpck_require__(2186)); const github = __importStar(__nccwpck_require__(5438)); const stream = __importStar(__nccwpck_require__(2781)); const util_1 = __nccwpck_require__(3837); const got_1 = __importDefault(__nccwpck_require__(643)); const asyncStream = (0, util_1.promisify)(stream.pipeline); function getCheckRunContext() { let branch = github.context.ref; if (branch.startsWith('refs/heads')) branch = branch.slice(11); core.info('Branch: ' + branch); if (github.context.eventName === 'workflow_run') { core.info('Action was triggered by workflow_run: using SHA and RUN_ID from triggering workflow'); const event = github.context.payload; if (!event.workflow_run) { throw new Error("Event of type 'workflow_run' is missing 'workflow_run' field"); } return { sha: event.workflow_run.head_commit.id, runId: event.workflow_run.id, branch }; } const runId = github.context.runId; if (github.context.payload.pull_request) { core.info(`Action was triggered by ${github.context.eventName}: using SHA from head of source branch`); const pr = github.context.payload.pull_request; return { sha: pr.head.sha, runId, branch }; } return { sha: github.context.sha, runId, branch }; } exports.getCheckRunContext = getCheckRunContext; function downloadArtifact(octokit, artifactId, fileName, token) { return __awaiter(this, void 0, void 0, function* () { core.startGroup(`Downloading artifact ${fileName}`); try { core.info(`Artifact ID: ${artifactId}`); const req = octokit.rest.actions.downloadArtifact.endpoint(Object.assign(Object.assign({}, github.context.repo), { artifact_id: artifactId, archive_format: 'zip' })); const headers = { Authorization: `Bearer ${token}` }; const resp = yield (0, got_1.default)(req.url, { headers, followRedirect: false }); core.info(`Fetch artifact URL: ${resp.statusCode} ${resp.statusMessage}`); if (resp.statusCode !== 302) { throw new Error('Fetch artifact URL failed: received unexpected status code'); } const url = resp.headers.location; if (url === undefined) { const receivedHeaders = Object.keys(resp.headers); core.info(`Received headers: ${receivedHeaders.join(', ')}`); throw new Error('Location header was not found in API response'); } if (typeof url !== 'string') { throw new Error(`Location header has unexpected value: ${url}`); } const downloadStream = got_1.default.stream(url, { headers }); const fileWriterStream = (0, fs_1.createWriteStream)(fileName); core.info(`Downloading ${url}`); downloadStream.on('downloadProgress', ({ transferred }) => { core.info(`Progress: ${transferred} B`); }); yield asyncStream(downloadStream, fileWriterStream); } finally { core.endGroup(); } }); } exports.downloadArtifact = downloadArtifact; function listFiles(octokit, sha) { return __awaiter(this, void 0, void 0, function* () { core.startGroup('Fetching list of tracked files from GitHub'); try { const commit = yield octokit.rest.git.getCommit(Object.assign({ commit_sha: sha }, github.context.repo)); const files = yield listGitTree(octokit, commit.data.tree.sha, ''); return files; } finally { core.endGroup(); } }); } exports.listFiles = listFiles; function listGitTree(octokit, sha, path) { return __awaiter(this, void 0, void 0, function* () { const pathLog = path ? ` at ${path}` : ''; core.info(`Fetching tree ${sha}${pathLog}`); let truncated = false; let tree = yield octokit.rest.git.getTree(Object.assign({ recursive: 'true', tree_sha: sha }, github.context.repo)); if (tree.data.truncated) { truncated = true; tree = yield octokit.rest.git.getTree(Object.assign({ tree_sha: sha }, github.context.repo)); } const result = []; for (const tr of tree.data.tree) { const file = `${path}${tr.path}`; if (tr.type === 'blob') { result.push(file); } else if (tr.type === 'tree' && truncated) { const files = yield listGitTree(octokit, tr.sha || '', `${file}/`); result.push(...files); } } return result; }); } /***/ }), /***/ 6482: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.formatTime = exports.ellipsis = exports.fixEol = exports.tableEscape = exports.table = exports.link = exports.Icon = exports.Align = void 0; var Align; (function (Align) { Align["Left"] = ":---"; Align["Center"] = ":---:"; Align["Right"] = "---:"; Align["None"] = "---"; })(Align || (exports.Align = Align = {})); exports.Icon = { skip: '⚪', // ':white_circle:' success: '✅', // ':white_check_mark:' fail: '❌' // ':x:' }; function link(title, address) { return `[${title}](${address})`; } exports.link = link; function table(headers, align, ...rows) { const headerRow = `|${headers.map(tableEscape).join('|')}|`; const alignRow = `|${align.join('|')}|`; const contentRows = rows.map(row => `|${row.map(tableEscape).join('|')}|`).join('\n'); return [headerRow, alignRow, contentRows].join('\n'); } exports.table = table; function tableEscape(content) { return content.toString().replace('|', '\\|'); } exports.tableEscape = tableEscape; function fixEol(text) { var _a; return (_a = text === null || text === void 0 ? void 0 : text.replace(/\r/g, '')) !== null && _a !== void 0 ? _a : ''; } exports.fixEol = fixEol; function ellipsis(text, maxLength) { if (text.length <= maxLength) { return text; } return text.substr(0, maxLength - 3) + '...'; } exports.ellipsis = ellipsis; function formatTime(ms) { if (ms > 1000) { return `${Math.round(ms / 1000)}s`; } return `${Math.round(ms)}ms`; } exports.formatTime = formatTime; /***/ }), /***/ 5824: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getExceptionSource = exports.DEFAULT_LOCALE = void 0; const path_utils_1 = __nccwpck_require__(4070); exports.DEFAULT_LOCALE = 'en-US'; function getExceptionSource(stackTrace, trackedFiles, getRelativePath) { const lines = stackTrace.split(/\r?\n/); const re = /\((.*):(\d+):\d+\)$/; for (const str of lines) { const match = str.match(re); if (match !== null) { const [_, fileStr, lineStr] = match; const filePath = (0, path_utils_1.normalizeFilePath)(fileStr); if (filePath.startsWith('internal/') || filePath.includes('/node_modules/')) { continue; } const path = getRelativePath(filePath); if (!path) { continue; } if (trackedFiles.includes(path)) { const line = parseInt(lineStr); return { path, line }; } } } } exports.getExceptionSource = getExceptionSource; /***/ }), /***/ 7811: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getFirstNonEmptyLine = exports.parseIsoDate = exports.parseNetDuration = void 0; function parseNetDuration(str) { const durationRe = /^(\d\d):(\d\d):(\d\d(?:\.\d+)?)$/; const durationMatch = str.match(durationRe); if (durationMatch === null) { throw new Error(`Invalid format: "${str}" is not NET duration`); } const [_, hourStr, minStr, secStr] = durationMatch; return (parseInt(hourStr) * 3600 + parseInt(minStr) * 60 + parseFloat(secStr)) * 1000; } exports.parseNetDuration = parseNetDuration; function parseIsoDate(str) { const isoDateRe = /^\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-5]\d\.\d+([+-][0-2]\d:[0-5]\d|Z)$/; if (str === undefined || !isoDateRe.test(str)) { throw new Error(`Invalid format: "${str}" is not ISO date`); } return new Date(str); } exports.parseIsoDate = parseIsoDate; function getFirstNonEmptyLine(stackTrace) { const lines = stackTrace === null || stackTrace === void 0 ? void 0 : stackTrace.split(/\r?\n/g); return lines === null || lines === void 0 ? void 0 : lines.find(str => !/^\s*$/.test(str)); } exports.getFirstNonEmptyLine = getFirstNonEmptyLine; /***/ }), /***/ 4070: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getBasePath = exports.normalizeFilePath = exports.normalizeDirPath = void 0; function normalizeDirPath(path, addTrailingSlash) { if (!path) { return path; } path = normalizeFilePath(path); if (addTrailingSlash && !path.endsWith('/')) { path += '/'; } return path; } exports.normalizeDirPath = normalizeDirPath; function normalizeFilePath(path) { if (!path) { return path; } return path.trim().replace(/\\/g, '/'); } exports.normalizeFilePath = normalizeFilePath; function getBasePath(path, trackedFiles) { if (trackedFiles.includes(path)) { return ''; } let max = ''; for (const file of trackedFiles) { if (path.endsWith(file) && file.length > max.length) { max = file; } } if (max === '') { return undefined; } const base = path.substr(0, path.length - max.length); return base; } exports.getBasePath = getBasePath; /***/ }), /***/ 3328: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.slug = void 0; // Returns HTML element id and href link usable as manual anchor links // This is needed because Github in check run summary doesn't automatically // create links out of headings as it normally does for other markdown content function slug(name) { const slugId = name .trim() .replace(/_/g, '') .replace(/[./\\]/g, '-') .replace(/[^\w-]/g, ''); const id = `user-content-${slugId}`; const link = `#${slugId}`; return { id, link }; } exports.slug = slug; /***/ }), /***/ 7351: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.issue = exports.issueCommand = void 0; const os = __importStar(__nccwpck_require__(2037)); const utils_1 = __nccwpck_require__(5278); /** * Commands * * Command Format: * ::name key=value,key=value::message * * Examples: * ::warning::This is the message * ::set-env name=MY_VAR::some value */ function issueCommand(command, properties, message) { const cmd = new Command(command, properties, message); process.stdout.write(cmd.toString() + os.EOL); } exports.issueCommand = issueCommand; function issue(name, message = '') { issueCommand(name, {}, message); } exports.issue = issue; const CMD_STRING = '::'; class Command { constructor(command, properties, message) { if (!command) { command = 'missing.command'; } this.command = command; this.properties = properties; this.message = message; } toString() { let cmdStr = CMD_STRING + this.command; if (this.properties && Object.keys(this.properties).length > 0) { cmdStr += ' '; let first = true; for (const key in this.properties) { if (this.properties.hasOwnProperty(key)) { const val = this.properties[key]; if (val) { if (first) { first = false; } else { cmdStr += ','; } cmdStr += `${key}=${escapeProperty(val)}`; } } } } cmdStr += `${CMD_STRING}${escapeData(this.message)}`; return cmdStr; } } function escapeData(s) { return utils_1.toCommandValue(s) .replace(/%/g, '%25') .replace(/\r/g, '%0D') .replace(/\n/g, '%0A'); } function escapeProperty(s) { return utils_1.toCommandValue(s) .replace(/%/g, '%25') .replace(/\r/g, '%0D') .replace(/\n/g, '%0A') .replace(/:/g, '%3A') .replace(/,/g, '%2C'); } //# sourceMappingURL=command.js.map /***/ }), /***/ 2186: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; const command_1 = __nccwpck_require__(7351); const file_command_1 = __nccwpck_require__(717); const utils_1 = __nccwpck_require__(5278); const os = __importStar(__nccwpck_require__(2037)); const path = __importStar(__nccwpck_require__(1017)); const oidc_utils_1 = __nccwpck_require__(8041); /** * The code to exit an action */ var ExitCode; (function (ExitCode) { /** * A code indicating that the action was successful */ ExitCode[ExitCode["Success"] = 0] = "Success"; /** * A code indicating that the action was a failure */ ExitCode[ExitCode["Failure"] = 1] = "Failure"; })(ExitCode = exports.ExitCode || (exports.ExitCode = {})); //----------------------------------------------------------------------- // Variables //----------------------------------------------------------------------- /** * Sets env variable for this action and future actions in the job * @param name the name of the variable to set * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function exportVariable(name, val) { const convertedVal = utils_1.toCommandValue(val); process.env[name] = convertedVal; const filePath = process.env['GITHUB_ENV'] || ''; if (filePath) { return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val)); } command_1.issueCommand('set-env', { name }, convertedVal); } exports.exportVariable = exportVariable; /** * Registers a secret which will get masked from logs * @param secret value of the secret */ function setSecret(secret) { command_1.issueCommand('add-mask', {}, secret); } exports.setSecret = setSecret; /** * Prepends inputPath to the PATH (for this action and future actions) * @param inputPath */ function addPath(inputPath) { const filePath = process.env['GITHUB_PATH'] || ''; if (filePath) { file_command_1.issueFileCommand('PATH', inputPath); } else { command_1.issueCommand('add-path', {}, inputPath); } process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; } exports.addPath = addPath; /** * Gets the value of an input. * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. * Returns an empty string if the value is not defined. * * @param name name of the input to get * @param options optional. See InputOptions. * @returns string */ function getInput(name, options) { const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; if (options && options.required && !val) { throw new Error(`Input required and not supplied: ${name}`); } if (options && options.trimWhitespace === false) { return val; } return val.trim(); } exports.getInput = getInput; /** * Gets the values of an multiline input. Each value is also trimmed. * * @param name name of the input to get * @param options optional. See InputOptions. * @returns string[] * */ function getMultilineInput(name, options) { const inputs = getInput(name, options) .split('\n') .filter(x => x !== ''); if (options && options.trimWhitespace === false) { return inputs; } return inputs.map(input => input.trim()); } exports.getMultilineInput = getMultilineInput; /** * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. * Support boolean input list: `true | True | TRUE | false | False | FALSE` . * The return value is also in boolean type. * ref: https://yaml.org/spec/1.2/spec.html#id2804923 * * @param name name of the input to get * @param options optional. See InputOptions. * @returns boolean */ function getBooleanInput(name, options) { const trueValue = ['true', 'True', 'TRUE']; const falseValue = ['false', 'False', 'FALSE']; const val = getInput(name, options); if (trueValue.includes(val)) return true; if (falseValue.includes(val)) return false; throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); } exports.getBooleanInput = getBooleanInput; /** * Sets the value of an output. * * @param name name of the output to set * @param value value to store. Non-string values will be converted to a string via JSON.stringify */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function setOutput(name, value) { const filePath = process.env['GITHUB_OUTPUT'] || ''; if (filePath) { return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value)); } process.stdout.write(os.EOL); command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value)); } exports.setOutput = setOutput; /** * Enables or disables the echoing of commands into stdout for the rest of the step. * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. * */ function setCommandEcho(enabled) { command_1.issue('echo', enabled ? 'on' : 'off'); } exports.setCommandEcho = setCommandEcho; //----------------------------------------------------------------------- // Results //----------------------------------------------------------------------- /** * Sets the action status to failed. * When the action exits it will be with an exit code of 1 * @param message add error issue message */ function setFailed(message) { process.exitCode = ExitCode.Failure; error(message); } exports.setFailed = setFailed; //----------------------------------------------------------------------- // Logging Commands //----------------------------------------------------------------------- /** * Gets whether Actions Step Debug is on or not */ function isDebug() { return process.env['RUNNER_DEBUG'] === '1'; } exports.isDebug = isDebug; /** * Writes debug message to user log * @param message debug message */ function debug(message) { command_1.issueCommand('debug', {}, message); } exports.debug = debug; /** * Adds an error issue * @param message error issue message. Errors will be converted to string via toString() * @param properties optional properties to add to the annotation. */ function error(message, properties = {}) { command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); } exports.error = error; /** * Adds a warning issue * @param message warning issue message. Errors will be converted to string via toString() * @param properties optional properties to add to the annotation. */ function warning(message, properties = {}) { command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); } exports.warning = warning; /** * Adds a notice issue * @param message notice issue message. Errors will be converted to string via toString() * @param properties optional properties to add to the annotation. */ function notice(message, properties = {}) { command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); } exports.notice = notice; /** * Writes info to log with console.log. * @param message info message */ function info(message) { process.stdout.write(message + os.EOL); } exports.info = info; /** * Begin an output group. * * Output until the next `groupEnd` will be foldable in this group * * @param name The name of the output group */ function startGroup(name) { command_1.issue('group', name); } exports.startGroup = startGroup; /** * End an output group. */ function endGroup() { command_1.issue('endgroup'); } exports.endGroup = endGroup; /** * Wrap an asynchronous function call in a group. * * Returns the same type as the function itself. * * @param name The name of the group * @param fn The function to wrap in the group */ function group(name, fn) { return __awaiter(this, void 0, void 0, function* () { startGroup(name); let result; try { result = yield fn(); } finally { endGroup(); } return result; }); } exports.group = group; //----------------------------------------------------------------------- // Wrapper action state //----------------------------------------------------------------------- /** * Saves state for current action, the state can only be retrieved by this action's post job execution. * * @param name name of the state to store * @param value value to store. Non-string values will be converted to a string via JSON.stringify */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function saveState(name, value) { const filePath = process.env['GITHUB_STATE'] || ''; if (filePath) { return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value)); } command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value)); } exports.saveState = saveState; /** * Gets the value of an state set by this action's main execution. * * @param name name of the state to get * @returns string */ function getState(name) { return process.env[`STATE_${name}`] || ''; } exports.getState = getState; function getIDToken(aud) { return __awaiter(this, void 0, void 0, function* () { return yield oidc_utils_1.OidcClient.getIDToken(aud); }); } exports.getIDToken = getIDToken; /** * Summary exports */ var summary_1 = __nccwpck_require__(1327); Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); /** * @deprecated use core.summary */ var summary_2 = __nccwpck_require__(1327); Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); /** * Path exports */ var path_utils_1 = __nccwpck_require__(2981); Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); //# sourceMappingURL=core.js.map /***/ }), /***/ 717: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // For internal use, subject to change. var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ const fs = __importStar(__nccwpck_require__(7147)); const os = __importStar(__nccwpck_require__(2037)); const uuid_1 = __nccwpck_require__(5840); const utils_1 = __nccwpck_require__(5278); function issueFileCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } if (!fs.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { encoding: 'utf8' }); } exports.issueFileCommand = issueFileCommand; function prepareKeyValueMessage(key, value) { const delimiter = `ghadelimiter_${uuid_1.v4()}`; const convertedValue = utils_1.toCommandValue(value); // These should realistically never happen, but just in case someone finds a // way to exploit uuid generation let's not allow keys or values that contain // the delimiter. if (key.includes(delimiter)) { throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); } if (convertedValue.includes(delimiter)) { throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); } return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; } exports.prepareKeyValueMessage = prepareKeyValueMessage; //# sourceMappingURL=file-command.js.map /***/ }), /***/ 8041: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.OidcClient = void 0; const http_client_1 = __nccwpck_require__(6255); const auth_1 = __nccwpck_require__(5526); const core_1 = __nccwpck_require__(2186); class OidcClient { static createHttpClient(allowRetry = true, maxRetry = 10) { const requestOptions = { allowRetries: allowRetry, maxRetries: maxRetry }; return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); } static getRequestToken() { const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; if (!token) { throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); } return token; } static getIDTokenUrl() { const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; if (!runtimeUrl) { throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); } return runtimeUrl; } static getCall(id_token_url) { var _a; return __awaiter(this, void 0, void 0, function* () { const httpclient = OidcClient.createHttpClient(); const res = yield httpclient .getJson(id_token_url) .catch(error => { throw new Error(`Failed to get ID Token. \n Error Code : ${error.statusCode}\n Error Message: ${error.message}`); }); const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; if (!id_token) { throw new Error('Response json body do not have ID Token field'); } return id_token; }); } static getIDToken(audience) { return __awaiter(this, void 0, void 0, function* () { try { // New ID Token is requested from action service let id_token_url = OidcClient.getIDTokenUrl(); if (audience) { const encodedAudience = encodeURIComponent(audience); id_token_url = `${id_token_url}&audience=${encodedAudience}`; } core_1.debug(`ID token url is ${id_token_url}`); const id_token = yield OidcClient.getCall(id_token_url); core_1.setSecret(id_token); return id_token; } catch (error) { throw new Error(`Error message: ${error.message}`); } }); } } exports.OidcClient = OidcClient; //# sourceMappingURL=oidc-utils.js.map /***/ }), /***/ 2981: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; const path = __importStar(__nccwpck_require__(1017)); /** * toPosixPath converts the given path to the posix form. On Windows, \\ will be * replaced with /. * * @param pth. Path to transform. * @return string Posix path. */ function toPosixPath(pth) { return pth.replace(/[\\]/g, '/'); } exports.toPosixPath = toPosixPath; /** * toWin32Path converts the given path to the win32 form. On Linux, / will be * replaced with \\. * * @param pth. Path to transform. * @return string Win32 path. */ function toWin32Path(pth) { return pth.replace(/[/]/g, '\\'); } exports.toWin32Path = toWin32Path; /** * toPlatformPath converts the given path to a platform-specific path. It does * this by replacing instances of / and \ with the platform-specific path * separator. * * @param pth The path to platformize. * @return string The platform-specific path. */ function toPlatformPath(pth) { return pth.replace(/[/\\]/g, path.sep); } exports.toPlatformPath = toPlatformPath; //# sourceMappingURL=path-utils.js.map /***/ }), /***/ 1327: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; const os_1 = __nccwpck_require__(2037); const fs_1 = __nccwpck_require__(7147); const { access, appendFile, writeFile } = fs_1.promises; exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; class Summary { constructor() { this._buffer = ''; } /** * Finds the summary file path from the environment, rejects if env var is not found or file does not exist * Also checks r/w permissions. * * @returns step summary file path */ filePath() { return __awaiter(this, void 0, void 0, function* () { if (this._filePath) { return this._filePath; } const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; if (!pathFromEnv) { throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); } try { yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); } catch (_a) { throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); } this._filePath = pathFromEnv; return this._filePath; }); } /** * Wraps content in an HTML tag, adding any HTML attributes * * @param {string} tag HTML tag to wrap * @param {string | null} content content within the tag * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add * * @returns {string} content wrapped in HTML element */ wrap(tag, content, attrs = {}) { const htmlAttrs = Object.entries(attrs) .map(([key, value]) => ` ${key}="${value}"`) .join(''); if (!content) { return `<${tag}${htmlAttrs}>`; } return `<${tag}${htmlAttrs}>${content}`; } /** * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. * * @param {SummaryWriteOptions} [options] (optional) options for write operation * * @returns {Promise} summary instance */ write(options) { return __awaiter(this, void 0, void 0, function* () { const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); const filePath = yield this.filePath(); const writeFunc = overwrite ? writeFile : appendFile; yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); return this.emptyBuffer(); }); } /** * Clears the summary buffer and wipes the summary file * * @returns {Summary} summary instance */ clear() { return __awaiter(this, void 0, void 0, function* () { return this.emptyBuffer().write({ overwrite: true }); }); } /** * Returns the current summary buffer as a string * * @returns {string} string of summary buffer */ stringify() { return this._buffer; } /** * If the summary buffer is empty * * @returns {boolen} true if the buffer is empty */ isEmptyBuffer() { return this._buffer.length === 0; } /** * Resets the summary buffer without writing to summary file * * @returns {Summary} summary instance */ emptyBuffer() { this._buffer = ''; return this; } /** * Adds raw text to the summary buffer * * @param {string} text content to add * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) * * @returns {Summary} summary instance */ addRaw(text, addEOL = false) { this._buffer += text; return addEOL ? this.addEOL() : this; } /** * Adds the operating system-specific end-of-line marker to the buffer * * @returns {Summary} summary instance */ addEOL() { return this.addRaw(os_1.EOL); } /** * Adds an HTML codeblock to the summary buffer * * @param {string} code content to render within fenced code block * @param {string} lang (optional) language to syntax highlight code * * @returns {Summary} summary instance */ addCodeBlock(code, lang) { const attrs = Object.assign({}, (lang && { lang })); const element = this.wrap('pre', this.wrap('code', code), attrs); return this.addRaw(element).addEOL(); } /** * Adds an HTML list to the summary buffer * * @param {string[]} items list of items to render * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) * * @returns {Summary} summary instance */ addList(items, ordered = false) { const tag = ordered ? 'ol' : 'ul'; const listItems = items.map(item => this.wrap('li', item)).join(''); const element = this.wrap(tag, listItems); return this.addRaw(element).addEOL(); } /** * Adds an HTML table to the summary buffer * * @param {SummaryTableCell[]} rows table rows * * @returns {Summary} summary instance */ addTable(rows) { const tableBody = rows .map(row => { const cells = row .map(cell => { if (typeof cell === 'string') { return this.wrap('td', cell); } const { header, data, colspan, rowspan } = cell; const tag = header ? 'th' : 'td'; const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); return this.wrap(tag, data, attrs); }) .join(''); return this.wrap('tr', cells); }) .join(''); const element = this.wrap('table', tableBody); return this.addRaw(element).addEOL(); } /** * Adds a collapsable HTML details element to the summary buffer * * @param {string} label text for the closed state * @param {string} content collapsable content * * @returns {Summary} summary instance */ addDetails(label, content) { const element = this.wrap('details', this.wrap('summary', label) + content); return this.addRaw(element).addEOL(); } /** * Adds an HTML image tag to the summary buffer * * @param {string} src path to the image you to embed * @param {string} alt text description of the image * @param {SummaryImageOptions} options (optional) addition image attributes * * @returns {Summary} summary instance */ addImage(src, alt, options) { const { width, height } = options || {}; const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); return this.addRaw(element).addEOL(); } /** * Adds an HTML section heading element * * @param {string} text heading text * @param {number | string} [level=1] (optional) the heading level, default: 1 * * @returns {Summary} summary instance */ addHeading(text, level) { const tag = `h${level}`; const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) ? tag : 'h1'; const element = this.wrap(allowedTag, text); return this.addRaw(element).addEOL(); } /** * Adds an HTML thematic break (
) to the summary buffer * * @returns {Summary} summary instance */ addSeparator() { const element = this.wrap('hr', null); return this.addRaw(element).addEOL(); } /** * Adds an HTML line break (
) to the summary buffer * * @returns {Summary} summary instance */ addBreak() { const element = this.wrap('br', null); return this.addRaw(element).addEOL(); } /** * Adds an HTML blockquote to the summary buffer * * @param {string} text quote text * @param {string} cite (optional) citation url * * @returns {Summary} summary instance */ addQuote(text, cite) { const attrs = Object.assign({}, (cite && { cite })); const element = this.wrap('blockquote', text, attrs); return this.addRaw(element).addEOL(); } /** * Adds an HTML anchor tag to the summary buffer * * @param {string} text link text/content * @param {string} href hyperlink * * @returns {Summary} summary instance */ addLink(text, href) { const element = this.wrap('a', text, { href }); return this.addRaw(element).addEOL(); } } const _summary = new Summary(); /** * @deprecated use `core.summary` */ exports.markdownSummary = _summary; exports.summary = _summary; //# sourceMappingURL=summary.js.map /***/ }), /***/ 5278: /***/ ((__unused_webpack_module, exports) => { "use strict"; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.toCommandProperties = exports.toCommandValue = void 0; /** * Sanitizes an input into a string so it can be passed into issueCommand safely * @param input input to sanitize into a string */ function toCommandValue(input) { if (input === null || input === undefined) { return ''; } else if (typeof input === 'string' || input instanceof String) { return input; } return JSON.stringify(input); } exports.toCommandValue = toCommandValue; /** * * @param annotationProperties * @returns The command properties to send with the actual annotation command * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 */ function toCommandProperties(annotationProperties) { if (!Object.keys(annotationProperties).length) { return {}; } return { title: annotationProperties.title, file: annotationProperties.file, line: annotationProperties.startLine, endLine: annotationProperties.endLine, col: annotationProperties.startColumn, endColumn: annotationProperties.endColumn }; } exports.toCommandProperties = toCommandProperties; //# sourceMappingURL=utils.js.map /***/ }), /***/ 1514: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getExecOutput = exports.exec = void 0; const string_decoder_1 = __nccwpck_require__(1576); const tr = __importStar(__nccwpck_require__(8159)); /** * Exec a command. * Output will be streamed to the live console. * Returns promise with return code * * @param commandLine command to execute (can include additional args). Must be correctly escaped. * @param args optional arguments for tool. Escaping is handled by the lib. * @param options optional exec options. See ExecOptions * @returns Promise exit code */ function exec(commandLine, args, options) { return __awaiter(this, void 0, void 0, function* () { const commandArgs = tr.argStringToArray(commandLine); if (commandArgs.length === 0) { throw new Error(`Parameter 'commandLine' cannot be null or empty.`); } // Path to tool to execute should be first arg const toolPath = commandArgs[0]; args = commandArgs.slice(1).concat(args || []); const runner = new tr.ToolRunner(toolPath, args, options); return runner.exec(); }); } exports.exec = exec; /** * Exec a command and get the output. * Output will be streamed to the live console. * Returns promise with the exit code and collected stdout and stderr * * @param commandLine command to execute (can include additional args). Must be correctly escaped. * @param args optional arguments for tool. Escaping is handled by the lib. * @param options optional exec options. See ExecOptions * @returns Promise exit code, stdout, and stderr */ function getExecOutput(commandLine, args, options) { var _a, _b; return __awaiter(this, void 0, void 0, function* () { let stdout = ''; let stderr = ''; //Using string decoder covers the case where a mult-byte character is split const stdoutDecoder = new string_decoder_1.StringDecoder('utf8'); const stderrDecoder = new string_decoder_1.StringDecoder('utf8'); const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout; const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; const stdErrListener = (data) => { stderr += stderrDecoder.write(data); if (originalStdErrListener) { originalStdErrListener(data); } }; const stdOutListener = (data) => { stdout += stdoutDecoder.write(data); if (originalStdoutListener) { originalStdoutListener(data); } }; const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); //flush any remaining characters stdout += stdoutDecoder.end(); stderr += stderrDecoder.end(); return { exitCode, stdout, stderr }; }); } exports.getExecOutput = getExecOutput; //# sourceMappingURL=exec.js.map /***/ }), /***/ 8159: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.argStringToArray = exports.ToolRunner = void 0; const os = __importStar(__nccwpck_require__(2037)); const events = __importStar(__nccwpck_require__(2361)); const child = __importStar(__nccwpck_require__(2081)); const path = __importStar(__nccwpck_require__(1017)); const io = __importStar(__nccwpck_require__(7436)); const ioUtil = __importStar(__nccwpck_require__(1962)); const timers_1 = __nccwpck_require__(9512); /* eslint-disable @typescript-eslint/unbound-method */ const IS_WINDOWS = process.platform === 'win32'; /* * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. */ class ToolRunner extends events.EventEmitter { constructor(toolPath, args, options) { super(); if (!toolPath) { throw new Error("Parameter 'toolPath' cannot be null or empty."); } this.toolPath = toolPath; this.args = args || []; this.options = options || {}; } _debug(message) { if (this.options.listeners && this.options.listeners.debug) { this.options.listeners.debug(message); } } _getCommandString(options, noPrefix) { const toolPath = this._getSpawnFileName(); const args = this._getSpawnArgs(options); let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool if (IS_WINDOWS) { // Windows + cmd file if (this._isCmdFile()) { cmd += toolPath; for (const a of args) { cmd += ` ${a}`; } } // Windows + verbatim else if (options.windowsVerbatimArguments) { cmd += `"${toolPath}"`; for (const a of args) { cmd += ` ${a}`; } } // Windows (regular) else { cmd += this._windowsQuoteCmdArg(toolPath); for (const a of args) { cmd += ` ${this._windowsQuoteCmdArg(a)}`; } } } else { // OSX/Linux - this can likely be improved with some form of quoting. // creating processes on Unix is fundamentally different than Windows. // on Unix, execvp() takes an arg array. cmd += toolPath; for (const a of args) { cmd += ` ${a}`; } } return cmd; } _processLineBuffer(data, strBuffer, onLine) { try { let s = strBuffer + data.toString(); let n = s.indexOf(os.EOL); while (n > -1) { const line = s.substring(0, n); onLine(line); // the rest of the string ... s = s.substring(n + os.EOL.length); n = s.indexOf(os.EOL); } return s; } catch (err) { // streaming lines to console is best effort. Don't fail a build. this._debug(`error processing line. Failed with error ${err}`); return ''; } } _getSpawnFileName() { if (IS_WINDOWS) { if (this._isCmdFile()) { return process.env['COMSPEC'] || 'cmd.exe'; } } return this.toolPath; } _getSpawnArgs(options) { if (IS_WINDOWS) { if (this._isCmdFile()) { let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; for (const a of this.args) { argline += ' '; argline += options.windowsVerbatimArguments ? a : this._windowsQuoteCmdArg(a); } argline += '"'; return [argline]; } } return this.args; } _endsWith(str, end) { return str.endsWith(end); } _isCmdFile() { const upperToolPath = this.toolPath.toUpperCase(); return (this._endsWith(upperToolPath, '.CMD') || this._endsWith(upperToolPath, '.BAT')); } _windowsQuoteCmdArg(arg) { // for .exe, apply the normal quoting rules that libuv applies if (!this._isCmdFile()) { return this._uvQuoteCmdArg(arg); } // otherwise apply quoting rules specific to the cmd.exe command line parser. // the libuv rules are generic and are not designed specifically for cmd.exe // command line parser. // // for a detailed description of the cmd.exe command line parser, refer to // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 // need quotes for empty arg if (!arg) { return '""'; } // determine whether the arg needs to be quoted const cmdSpecialChars = [ ' ', '\t', '&', '(', ')', '[', ']', '{', '}', '^', '=', ';', '!', "'", '+', ',', '`', '~', '|', '<', '>', '"' ]; let needsQuotes = false; for (const char of arg) { if (cmdSpecialChars.some(x => x === char)) { needsQuotes = true; break; } } // short-circuit if quotes not needed if (!needsQuotes) { return arg; } // the following quoting rules are very similar to the rules that by libuv applies. // // 1) wrap the string in quotes // // 2) double-up quotes - i.e. " => "" // // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately // doesn't work well with a cmd.exe command line. // // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. // for example, the command line: // foo.exe "myarg:""my val""" // is parsed by a .NET console app into an arg array: // [ "myarg:\"my val\"" ] // which is the same end result when applying libuv quoting rules. although the actual // command line from libuv quoting rules would look like: // foo.exe "myarg:\"my val\"" // // 3) double-up slashes that precede a quote, // e.g. hello \world => "hello \world" // hello\"world => "hello\\""world" // hello\\"world => "hello\\\\""world" // hello world\ => "hello world\\" // // technically this is not required for a cmd.exe command line, or the batch argument parser. // the reasons for including this as a .cmd quoting rule are: // // a) this is optimized for the scenario where the argument is passed from the .cmd file to an // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. // // b) it's what we've been doing previously (by deferring to node default behavior) and we // haven't heard any complaints about that aspect. // // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be // escaped when used on the command line directly - even though within a .cmd file % can be escaped // by using %%. // // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. // // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args // to an external program. // // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. // % can be escaped within a .cmd file. let reverse = '"'; let quoteHit = true; for (let i = arg.length; i > 0; i--) { // walk the string in reverse reverse += arg[i - 1]; if (quoteHit && arg[i - 1] === '\\') { reverse += '\\'; // double the slash } else if (arg[i - 1] === '"') { quoteHit = true; reverse += '"'; // double the quote } else { quoteHit = false; } } reverse += '"'; return reverse .split('') .reverse() .join(''); } _uvQuoteCmdArg(arg) { // Tool runner wraps child_process.spawn() and needs to apply the same quoting as // Node in certain cases where the undocumented spawn option windowsVerbatimArguments // is used. // // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), // pasting copyright notice from Node within this function: // // Copyright Joyent, Inc. and other Node contributors. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. if (!arg) { // Need double quotation for empty argument return '""'; } if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { // No quotation needed return arg; } if (!arg.includes('"') && !arg.includes('\\')) { // No embedded double quotes or backslashes, so I can just wrap // quote marks around the whole thing. return `"${arg}"`; } // Expected input/output: // input : hello"world // output: "hello\"world" // input : hello""world // output: "hello\"\"world" // input : hello\world // output: hello\world // input : hello\\world // output: hello\\world // input : hello\"world // output: "hello\\\"world" // input : hello\\"world // output: "hello\\\\\"world" // input : hello world\ // output: "hello world\\" - note the comment in libuv actually reads "hello world\" // but it appears the comment is wrong, it should be "hello world\\" let reverse = '"'; let quoteHit = true; for (let i = arg.length; i > 0; i--) { // walk the string in reverse reverse += arg[i - 1]; if (quoteHit && arg[i - 1] === '\\') { reverse += '\\'; } else if (arg[i - 1] === '"') { quoteHit = true; reverse += '\\'; } else { quoteHit = false; } } reverse += '"'; return reverse .split('') .reverse() .join(''); } _cloneExecOptions(options) { options = options || {}; const result = { cwd: options.cwd || process.cwd(), env: options.env || process.env, silent: options.silent || false, windowsVerbatimArguments: options.windowsVerbatimArguments || false, failOnStdErr: options.failOnStdErr || false, ignoreReturnCode: options.ignoreReturnCode || false, delay: options.delay || 10000 }; result.outStream = options.outStream || process.stdout; result.errStream = options.errStream || process.stderr; return result; } _getSpawnOptions(options, toolPath) { options = options || {}; const result = {}; result.cwd = options.cwd; result.env = options.env; result['windowsVerbatimArguments'] = options.windowsVerbatimArguments || this._isCmdFile(); if (options.windowsVerbatimArguments) { result.argv0 = `"${toolPath}"`; } return result; } /** * Exec a tool. * Output will be streamed to the live console. * Returns promise with return code * * @param tool path to tool to exec * @param options optional exec options. See ExecOptions * @returns number */ exec() { return __awaiter(this, void 0, void 0, function* () { // root the tool path if it is unrooted and contains relative pathing if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes('/') || (IS_WINDOWS && this.toolPath.includes('\\')))) { // prefer options.cwd if it is specified, however options.cwd may also need to be rooted this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } // if the tool is only a file name, then resolve it from the PATH // otherwise verify it exists (add extension on Windows if necessary) this.toolPath = yield io.which(this.toolPath, true); return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { this._debug(`exec tool: ${this.toolPath}`); this._debug('arguments:'); for (const arg of this.args) { this._debug(` ${arg}`); } const optionsNonNull = this._cloneExecOptions(this.options); if (!optionsNonNull.silent && optionsNonNull.outStream) { optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); } const state = new ExecState(optionsNonNull, this.toolPath); state.on('debug', (message) => { this._debug(message); }); if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); } const fileName = this._getSpawnFileName(); const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); let stdbuffer = ''; if (cp.stdout) { cp.stdout.on('data', (data) => { if (this.options.listeners && this.options.listeners.stdout) { this.options.listeners.stdout(data); } if (!optionsNonNull.silent && optionsNonNull.outStream) { optionsNonNull.outStream.write(data); } stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { if (this.options.listeners && this.options.listeners.stdline) { this.options.listeners.stdline(line); } }); }); } let errbuffer = ''; if (cp.stderr) { cp.stderr.on('data', (data) => { state.processStderr = true; if (this.options.listeners && this.options.listeners.stderr) { this.options.listeners.stderr(data); } if (!optionsNonNull.silent && optionsNonNull.errStream && optionsNonNull.outStream) { const s = optionsNonNull.failOnStdErr ? optionsNonNull.errStream : optionsNonNull.outStream; s.write(data); } errbuffer = this._processLineBuffer(data, errbuffer, (line) => { if (this.options.listeners && this.options.listeners.errline) { this.options.listeners.errline(line); } }); }); } cp.on('error', (err) => { state.processError = err.message; state.processExited = true; state.processClosed = true; state.CheckComplete(); }); cp.on('exit', (code) => { state.processExitCode = code; state.processExited = true; this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); state.CheckComplete(); }); cp.on('close', (code) => { state.processExitCode = code; state.processExited = true; state.processClosed = true; this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); state.CheckComplete(); }); state.on('done', (error, exitCode) => { if (stdbuffer.length > 0) { this.emit('stdline', stdbuffer); } if (errbuffer.length > 0) { this.emit('errline', errbuffer); } cp.removeAllListeners(); if (error) { reject(error); } else { resolve(exitCode); } }); if (this.options.input) { if (!cp.stdin) { throw new Error('child process missing stdin'); } cp.stdin.end(this.options.input); } })); }); } } exports.ToolRunner = ToolRunner; /** * Convert an arg string to an array of args. Handles escaping * * @param argString string of arguments * @returns string[] array of arguments */ function argStringToArray(argString) { const args = []; let inQuotes = false; let escaped = false; let arg = ''; function append(c) { // we only escape double quotes. if (escaped && c !== '"') { arg += '\\'; } arg += c; escaped = false; } for (let i = 0; i < argString.length; i++) { const c = argString.charAt(i); if (c === '"') { if (!escaped) { inQuotes = !inQuotes; } else { append(c); } continue; } if (c === '\\' && escaped) { append(c); continue; } if (c === '\\' && inQuotes) { escaped = true; continue; } if (c === ' ' && !inQuotes) { if (arg.length > 0) { args.push(arg); arg = ''; } continue; } append(c); } if (arg.length > 0) { args.push(arg.trim()); } return args; } exports.argStringToArray = argStringToArray; class ExecState extends events.EventEmitter { constructor(options, toolPath) { super(); this.processClosed = false; // tracks whether the process has exited and stdio is closed this.processError = ''; this.processExitCode = 0; this.processExited = false; // tracks whether the process has exited this.processStderr = false; // tracks whether stderr was written to this.delay = 10000; // 10 seconds this.done = false; this.timeout = null; if (!toolPath) { throw new Error('toolPath must not be empty'); } this.options = options; this.toolPath = toolPath; if (options.delay) { this.delay = options.delay; } } CheckComplete() { if (this.done) { return; } if (this.processClosed) { this._setResult(); } else if (this.processExited) { this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this); } } _debug(message) { this.emit('debug', message); } _setResult() { // determine whether there is an error let error; if (this.processExited) { if (this.processError) { error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); } else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); } else if (this.processStderr && this.options.failOnStdErr) { error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); } } // clear the timeout if (this.timeout) { clearTimeout(this.timeout); this.timeout = null; } this.done = true; this.emit('done', error, this.processExitCode); } static HandleTimeout(state) { if (state.done) { return; } if (!state.processClosed && state.processExited) { const message = `The STDIO streams did not close within ${state.delay / 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; state._debug(message); } state._setResult(); } } //# sourceMappingURL=toolrunner.js.map /***/ }), /***/ 4087: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Context = void 0; const fs_1 = __nccwpck_require__(7147); const os_1 = __nccwpck_require__(2037); class Context { /** * Hydrate the context from the environment */ constructor() { var _a, _b, _c; this.payload = {}; if (process.env.GITHUB_EVENT_PATH) { if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); } else { const path = process.env.GITHUB_EVENT_PATH; process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; this.sha = process.env.GITHUB_SHA; this.ref = process.env.GITHUB_REF; this.workflow = process.env.GITHUB_WORKFLOW; this.action = process.env.GITHUB_ACTION; this.actor = process.env.GITHUB_ACTOR; this.job = process.env.GITHUB_JOB; this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; } get issue() { const payload = this.payload; return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); } get repo() { if (process.env.GITHUB_REPOSITORY) { const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); return { owner, repo }; } if (this.payload.repository) { return { owner: this.payload.repository.owner.login, repo: this.payload.repository.name }; } throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); } } exports.Context = Context; //# sourceMappingURL=context.js.map /***/ }), /***/ 5438: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getOctokit = exports.context = void 0; const Context = __importStar(__nccwpck_require__(4087)); const utils_1 = __nccwpck_require__(3030); exports.context = new Context.Context(); /** * Returns a hydrated octokit ready to use for GitHub Actions * * @param token the repo PAT or GITHUB_TOKEN * @param options other options to set */ function getOctokit(token, options, ...additionalPlugins) { const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options)); } exports.getOctokit = getOctokit; //# sourceMappingURL=github.js.map /***/ }), /***/ 7914: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0; const httpClient = __importStar(__nccwpck_require__(6255)); const undici_1 = __nccwpck_require__(1773); function getAuthString(token, options) { if (!token && !options.auth) { throw new Error('Parameter token or opts.auth is required'); } else if (token && options.auth) { throw new Error('Parameters token and opts.auth may not both be specified'); } return typeof options.auth === 'string' ? options.auth : `token ${token}`; } exports.getAuthString = getAuthString; function getProxyAgent(destinationUrl) { const hc = new httpClient.HttpClient(); return hc.getAgent(destinationUrl); } exports.getProxyAgent = getProxyAgent; function getProxyAgentDispatcher(destinationUrl) { const hc = new httpClient.HttpClient(); return hc.getAgentDispatcher(destinationUrl); } exports.getProxyAgentDispatcher = getProxyAgentDispatcher; function getProxyFetch(destinationUrl) { const httpDispatcher = getProxyAgentDispatcher(destinationUrl); const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () { return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher })); }); return proxyFetch; } exports.getProxyFetch = getProxyFetch; function getApiBaseUrl() { return process.env['GITHUB_API_URL'] || 'https://api.github.com'; } exports.getApiBaseUrl = getApiBaseUrl; //# sourceMappingURL=utils.js.map /***/ }), /***/ 3030: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0; const Context = __importStar(__nccwpck_require__(4087)); const Utils = __importStar(__nccwpck_require__(7914)); // octokit + plugins const core_1 = __nccwpck_require__(6762); const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3044); const plugin_paginate_rest_1 = __nccwpck_require__(4193); exports.context = new Context.Context(); const baseUrl = Utils.getApiBaseUrl(); exports.defaults = { baseUrl, request: { agent: Utils.getProxyAgent(baseUrl), fetch: Utils.getProxyFetch(baseUrl) } }; exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults); /** * Convience function to correctly format Octokit Options to pass into the constructor. * * @param token the repo PAT or GITHUB_TOKEN * @param options other options to set */ function getOctokitOptions(token, options) { const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller // Auth const auth = Utils.getAuthString(token, opts); if (auth) { opts.auth = auth; } return opts; } exports.getOctokitOptions = getOctokitOptions; //# sourceMappingURL=utils.js.map /***/ }), /***/ 5526: /***/ (function(__unused_webpack_module, exports) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; class BasicCredentialHandler { constructor(username, password) { this.username = username; this.password = password; } prepareRequest(options) { if (!options.headers) { throw Error('The request has no headers'); } options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; } // This handler cannot handle 401 canHandleAuthentication() { return false; } handleAuthentication() { return __awaiter(this, void 0, void 0, function* () { throw new Error('not implemented'); }); } } exports.BasicCredentialHandler = BasicCredentialHandler; class BearerCredentialHandler { constructor(token) { this.token = token; } // currently implements pre-authorization // TODO: support preAuth = false where it hooks on 401 prepareRequest(options) { if (!options.headers) { throw Error('The request has no headers'); } options.headers['Authorization'] = `Bearer ${this.token}`; } // This handler cannot handle 401 canHandleAuthentication() { return false; } handleAuthentication() { return __awaiter(this, void 0, void 0, function* () { throw new Error('not implemented'); }); } } exports.BearerCredentialHandler = BearerCredentialHandler; class PersonalAccessTokenCredentialHandler { constructor(token) { this.token = token; } // currently implements pre-authorization // TODO: support preAuth = false where it hooks on 401 prepareRequest(options) { if (!options.headers) { throw Error('The request has no headers'); } options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; } // This handler cannot handle 401 canHandleAuthentication() { return false; } handleAuthentication() { return __awaiter(this, void 0, void 0, function* () { throw new Error('not implemented'); }); } } exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; //# sourceMappingURL=auth.js.map /***/ }), /***/ 6255: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; /* eslint-disable @typescript-eslint/no-explicit-any */ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; const http = __importStar(__nccwpck_require__(3685)); const https = __importStar(__nccwpck_require__(5687)); const pm = __importStar(__nccwpck_require__(9835)); const tunnel = __importStar(__nccwpck_require__(4294)); const undici_1 = __nccwpck_require__(1773); var HttpCodes; (function (HttpCodes) { HttpCodes[HttpCodes["OK"] = 200] = "OK"; HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; })(HttpCodes || (exports.HttpCodes = HttpCodes = {})); var Headers; (function (Headers) { Headers["Accept"] = "accept"; Headers["ContentType"] = "content-type"; })(Headers || (exports.Headers = Headers = {})); var MediaTypes; (function (MediaTypes) { MediaTypes["ApplicationJson"] = "application/json"; })(MediaTypes || (exports.MediaTypes = MediaTypes = {})); /** * Returns the proxy URL, depending upon the supplied url and proxy environment variables. * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ function getProxyUrl(serverUrl) { const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); return proxyUrl ? proxyUrl.href : ''; } exports.getProxyUrl = getProxyUrl; const HttpRedirectCodes = [ HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect ]; const HttpResponseRetryCodes = [ HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout ]; const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; const ExponentialBackoffCeiling = 10; const ExponentialBackoffTimeSlice = 5; class HttpClientError extends Error { constructor(message, statusCode) { super(message); this.name = 'HttpClientError'; this.statusCode = statusCode; Object.setPrototypeOf(this, HttpClientError.prototype); } } exports.HttpClientError = HttpClientError; class HttpClientResponse { constructor(message) { this.message = message; } readBody() { return __awaiter(this, void 0, void 0, function* () { return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { let output = Buffer.alloc(0); this.message.on('data', (chunk) => { output = Buffer.concat([output, chunk]); }); this.message.on('end', () => { resolve(output.toString()); }); })); }); } readBodyBuffer() { return __awaiter(this, void 0, void 0, function* () { return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { const chunks = []; this.message.on('data', (chunk) => { chunks.push(chunk); }); this.message.on('end', () => { resolve(Buffer.concat(chunks)); }); })); }); } } exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { const parsedUrl = new URL(requestUrl); return parsedUrl.protocol === 'https:'; } exports.isHttps = isHttps; class HttpClient { constructor(userAgent, handlers, requestOptions) { this._ignoreSslError = false; this._allowRedirects = true; this._allowRedirectDowngrade = false; this._maxRedirects = 50; this._allowRetries = false; this._maxRetries = 1; this._keepAlive = false; this._disposed = false; this.userAgent = userAgent; this.handlers = handlers || []; this.requestOptions = requestOptions; if (requestOptions) { if (requestOptions.ignoreSslError != null) { this._ignoreSslError = requestOptions.ignoreSslError; } this._socketTimeout = requestOptions.socketTimeout; if (requestOptions.allowRedirects != null) { this._allowRedirects = requestOptions.allowRedirects; } if (requestOptions.allowRedirectDowngrade != null) { this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; } if (requestOptions.maxRedirects != null) { this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); } if (requestOptions.keepAlive != null) { this._keepAlive = requestOptions.keepAlive; } if (requestOptions.allowRetries != null) { this._allowRetries = requestOptions.allowRetries; } if (requestOptions.maxRetries != null) { this._maxRetries = requestOptions.maxRetries; } } } options(requestUrl, additionalHeaders) { return __awaiter(this, void 0, void 0, function* () { return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); }); } get(requestUrl, additionalHeaders) { return __awaiter(this, void 0, void 0, function* () { return this.request('GET', requestUrl, null, additionalHeaders || {}); }); } del(requestUrl, additionalHeaders) { return __awaiter(this, void 0, void 0, function* () { return this.request('DELETE', requestUrl, null, additionalHeaders || {}); }); } post(requestUrl, data, additionalHeaders) { return __awaiter(this, void 0, void 0, function* () { return this.request('POST', requestUrl, data, additionalHeaders || {}); }); } patch(requestUrl, data, additionalHeaders) { return __awaiter(this, void 0, void 0, function* () { return this.request('PATCH', requestUrl, data, additionalHeaders || {}); }); } put(requestUrl, data, additionalHeaders) { return __awaiter(this, void 0, void 0, function* () { return this.request('PUT', requestUrl, data, additionalHeaders || {}); }); } head(requestUrl, additionalHeaders) { return __awaiter(this, void 0, void 0, function* () { return this.request('HEAD', requestUrl, null, additionalHeaders || {}); }); } sendStream(verb, requestUrl, stream, additionalHeaders) { return __awaiter(this, void 0, void 0, function* () { return this.request(verb, requestUrl, stream, additionalHeaders); }); } /** * Gets a typed object from an endpoint * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise */ getJson(requestUrl, additionalHeaders = {}) { return __awaiter(this, void 0, void 0, function* () { additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); const res = yield this.get(requestUrl, additionalHeaders); return this._processResponse(res, this.requestOptions); }); } postJson(requestUrl, obj, additionalHeaders = {}) { return __awaiter(this, void 0, void 0, function* () { const data = JSON.stringify(obj, null, 2); additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); const res = yield this.post(requestUrl, data, additionalHeaders); return this._processResponse(res, this.requestOptions); }); } putJson(requestUrl, obj, additionalHeaders = {}) { return __awaiter(this, void 0, void 0, function* () { const data = JSON.stringify(obj, null, 2); additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); const res = yield this.put(requestUrl, data, additionalHeaders); return this._processResponse(res, this.requestOptions); }); } patchJson(requestUrl, obj, additionalHeaders = {}) { return __awaiter(this, void 0, void 0, function* () { const data = JSON.stringify(obj, null, 2); additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); const res = yield this.patch(requestUrl, data, additionalHeaders); return this._processResponse(res, this.requestOptions); }); } /** * Makes a raw http request. * All other methods such as get, post, patch, and request ultimately call this. * Prefer get, del, post and patch */ request(verb, requestUrl, data, headers) { return __awaiter(this, void 0, void 0, function* () { if (this._disposed) { throw new Error('Client has already been disposed.'); } const parsedUrl = new URL(requestUrl); let info = this._prepareRequest(verb, parsedUrl, headers); // Only perform retries on reads since writes may not be idempotent. const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) ? this._maxRetries + 1 : 1; let numTries = 0; let response; do { response = yield this.requestRaw(info, data); // Check if it's an authentication challenge if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) { let authenticationHandler; for (const handler of this.handlers) { if (handler.canHandleAuthentication(response)) { authenticationHandler = handler; break; } } if (authenticationHandler) { return authenticationHandler.handleAuthentication(this, info, data); } else { // We have received an unauthorized response but have no handlers to handle it. // Let the response return to the caller. return response; } } let redirectsRemaining = this._maxRedirects; while (response.message.statusCode && HttpRedirectCodes.includes(response.message.statusCode) && this._allowRedirects && redirectsRemaining > 0) { const redirectUrl = response.message.headers['location']; if (!redirectUrl) { // if there's no location to redirect to, we won't break; } const parsedRedirectUrl = new URL(redirectUrl); if (parsedUrl.protocol === 'https:' && parsedUrl.protocol !== parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) { throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); } // we need to finish reading the response before reassigning response // which will leak the open socket. yield response.readBody(); // strip authorization header if redirected to a different hostname if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { for (const header in headers) { // header names are case insensitive if (header.toLowerCase() === 'authorization') { delete headers[header]; } } } // let's make the request with the new redirectUrl info = this._prepareRequest(verb, parsedRedirectUrl, headers); response = yield this.requestRaw(info, data); redirectsRemaining--; } if (!response.message.statusCode || !HttpResponseRetryCodes.includes(response.message.statusCode)) { // If not a retry code, return immediately instead of retrying return response; } numTries += 1; if (numTries < maxTries) { yield response.readBody(); yield this._performExponentialBackoff(numTries); } } while (numTries < maxTries); return response; }); } /** * Needs to be called if keepAlive is set to true in request options. */ dispose() { if (this._agent) { this._agent.destroy(); } this._disposed = true; } /** * Raw request. * @param info * @param data */ requestRaw(info, data) { return __awaiter(this, void 0, void 0, function* () { return new Promise((resolve, reject) => { function callbackForResult(err, res) { if (err) { reject(err); } else if (!res) { // If `err` is not passed, then `res` must be passed. reject(new Error('Unknown error')); } else { resolve(res); } } this.requestRawWithCallback(info, data, callbackForResult); }); }); } /** * Raw request with callback. * @param info * @param data * @param onResult */ requestRawWithCallback(info, data, onResult) { if (typeof data === 'string') { if (!info.options.headers) { info.options.headers = {}; } info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); } let callbackCalled = false; function handleResult(err, res) { if (!callbackCalled) { callbackCalled = true; onResult(err, res); } } const req = info.httpModule.request(info.options, (msg) => { const res = new HttpClientResponse(msg); handleResult(undefined, res); }); let socket; req.on('socket', sock => { socket = sock; }); // If we ever get disconnected, we want the socket to timeout eventually req.setTimeout(this._socketTimeout || 3 * 60000, () => { if (socket) { socket.end(); } handleResult(new Error(`Request timeout: ${info.options.path}`)); }); req.on('error', function (err) { // err has statusCode property // res should have headers handleResult(err); }); if (data && typeof data === 'string') { req.write(data, 'utf8'); } if (data && typeof data !== 'string') { data.on('close', function () { req.end(); }); data.pipe(req); } else { req.end(); } } /** * Gets an http agent. This function is useful when you need an http agent that handles * routing through a proxy server - depending upon the url and proxy environment variables. * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ getAgent(serverUrl) { const parsedUrl = new URL(serverUrl); return this._getAgent(parsedUrl); } getAgentDispatcher(serverUrl) { const parsedUrl = new URL(serverUrl); const proxyUrl = pm.getProxyUrl(parsedUrl); const useProxy = proxyUrl && proxyUrl.hostname; if (!useProxy) { return; } return this._getProxyAgentDispatcher(parsedUrl, proxyUrl); } _prepareRequest(method, requestUrl, headers) { const info = {}; info.parsedUrl = requestUrl; const usingSsl = info.parsedUrl.protocol === 'https:'; info.httpModule = usingSsl ? https : http; const defaultPort = usingSsl ? 443 : 80; info.options = {}; info.options.host = info.parsedUrl.hostname; info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort; info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); info.options.method = method; info.options.headers = this._mergeHeaders(headers); if (this.userAgent != null) { info.options.headers['user-agent'] = this.userAgent; } info.options.agent = this._getAgent(info.parsedUrl); // gives handlers an opportunity to participate if (this.handlers) { for (const handler of this.handlers) { handler.prepareRequest(info.options); } } return info; } _mergeHeaders(headers) { if (this.requestOptions && this.requestOptions.headers) { return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); } return lowercaseKeys(headers || {}); } _getExistingOrDefaultHeader(additionalHeaders, header, _default) { let clientHeader; if (this.requestOptions && this.requestOptions.headers) { clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; } return additionalHeaders[header] || clientHeader || _default; } _getAgent(parsedUrl) { let agent; const proxyUrl = pm.getProxyUrl(parsedUrl); const useProxy = proxyUrl && proxyUrl.hostname; if (this._keepAlive && useProxy) { agent = this._proxyAgent; } if (this._keepAlive && !useProxy) { agent = this._agent; } // if agent is already assigned use that agent. if (agent) { return agent; } const usingSsl = parsedUrl.protocol === 'https:'; let maxSockets = 100; if (this.requestOptions) { maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; } // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. if (proxyUrl && proxyUrl.hostname) { const agentOptions = { maxSockets, keepAlive: this._keepAlive, proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` })), { host: proxyUrl.hostname, port: proxyUrl.port }) }; let tunnelAgent; const overHttps = proxyUrl.protocol === 'https:'; if (usingSsl) { tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; } else { tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; } agent = tunnelAgent(agentOptions); this._proxyAgent = agent; } // if reusing agent across request and tunneling agent isn't assigned create a new agent if (this._keepAlive && !agent) { const options = { keepAlive: this._keepAlive, maxSockets }; agent = usingSsl ? new https.Agent(options) : new http.Agent(options); this._agent = agent; } // if not using private agent and tunnel agent isn't setup then use global agent if (!agent) { agent = usingSsl ? https.globalAgent : http.globalAgent; } if (usingSsl && this._ignoreSslError) { // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options // we have to cast it to any and change it directly agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false }); } return agent; } _getProxyAgentDispatcher(parsedUrl, proxyUrl) { let proxyAgent; if (this._keepAlive) { proxyAgent = this._proxyAgentDispatcher; } // if agent is already assigned use that agent. if (proxyAgent) { return proxyAgent; } const usingSsl = parsedUrl.protocol === 'https:'; proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { token: `${proxyUrl.username}:${proxyUrl.password}` }))); this._proxyAgentDispatcher = proxyAgent; if (usingSsl && this._ignoreSslError) { // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options // we have to cast it to any and change it directly proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, { rejectUnauthorized: false }); } return proxyAgent; } _performExponentialBackoff(retryNumber) { return __awaiter(this, void 0, void 0, function* () { retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); return new Promise(resolve => setTimeout(() => resolve(), ms)); }); } _processResponse(res, options) { return __awaiter(this, void 0, void 0, function* () { return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { const statusCode = res.message.statusCode || 0; const response = { statusCode, result: null, headers: {} }; // not found leads to null obj returned if (statusCode === HttpCodes.NotFound) { resolve(response); } // get the result from the body function dateTimeDeserializer(key, value) { if (typeof value === 'string') { const a = new Date(value); if (!isNaN(a.valueOf())) { return a; } } return value; } let obj; let contents; try { contents = yield res.readBody(); if (contents && contents.length > 0) { if (options && options.deserializeDates) { obj = JSON.parse(contents, dateTimeDeserializer); } else { obj = JSON.parse(contents); } response.result = obj; } response.headers = res.message.headers; } catch (err) { // Invalid resource (contents not json); leaving result obj null } // note that 3xx redirects are handled by the http layer. if (statusCode > 299) { let msg; // if exception/error in body, attempt to get better error if (obj && obj.message) { msg = obj.message; } else if (contents && contents.length > 0) { // it may be the case that the exception is in the body message as string msg = contents; } else { msg = `Failed request: (${statusCode})`; } const err = new HttpClientError(msg, statusCode); err.result = response.result; reject(err); } else { resolve(response); } })); }); } } exports.HttpClient = HttpClient; const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); //# sourceMappingURL=index.js.map /***/ }), /***/ 9835: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.checkBypass = exports.getProxyUrl = void 0; function getProxyUrl(reqUrl) { const usingSsl = reqUrl.protocol === 'https:'; if (checkBypass(reqUrl)) { return undefined; } const proxyVar = (() => { if (usingSsl) { return process.env['https_proxy'] || process.env['HTTPS_PROXY']; } else { return process.env['http_proxy'] || process.env['HTTP_PROXY']; } })(); if (proxyVar) { try { return new URL(proxyVar); } catch (_a) { if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) return new URL(`http://${proxyVar}`); } } else { return undefined; } } exports.getProxyUrl = getProxyUrl; function checkBypass(reqUrl) { if (!reqUrl.hostname) { return false; } const reqHost = reqUrl.hostname; if (isLoopbackAddress(reqHost)) { return true; } const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; if (!noProxy) { return false; } // Determine the request port let reqPort; if (reqUrl.port) { reqPort = Number(reqUrl.port); } else if (reqUrl.protocol === 'http:') { reqPort = 80; } else if (reqUrl.protocol === 'https:') { reqPort = 443; } // Format the request hostname and hostname with port const upperReqHosts = [reqUrl.hostname.toUpperCase()]; if (typeof reqPort === 'number') { upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); } // Compare request host against noproxy for (const upperNoProxyItem of noProxy .split(',') .map(x => x.trim().toUpperCase()) .filter(x => x)) { if (upperNoProxyItem === '*' || upperReqHosts.some(x => x === upperNoProxyItem || x.endsWith(`.${upperNoProxyItem}`) || (upperNoProxyItem.startsWith('.') && x.endsWith(`${upperNoProxyItem}`)))) { return true; } } return false; } exports.checkBypass = checkBypass; function isLoopbackAddress(host) { const hostLower = host.toLowerCase(); return (hostLower === 'localhost' || hostLower.startsWith('127.') || hostLower.startsWith('[::1]') || hostLower.startsWith('[0:0:0:0:0:0:0:1]')); } //# sourceMappingURL=proxy.js.map /***/ }), /***/ 1962: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var _a; Object.defineProperty(exports, "__esModule", ({ value: true })); const assert_1 = __nccwpck_require__(9491); const fs = __nccwpck_require__(7147); const path = __nccwpck_require__(1017); _a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; exports.IS_WINDOWS = process.platform === 'win32'; function exists(fsPath) { return __awaiter(this, void 0, void 0, function* () { try { yield exports.stat(fsPath); } catch (err) { if (err.code === 'ENOENT') { return false; } throw err; } return true; }); } exports.exists = exists; function isDirectory(fsPath, useStat = false) { return __awaiter(this, void 0, void 0, function* () { const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); return stats.isDirectory(); }); } exports.isDirectory = isDirectory; /** * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). */ function isRooted(p) { p = normalizeSeparators(p); if (!p) { throw new Error('isRooted() parameter "p" cannot be empty'); } if (exports.IS_WINDOWS) { return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello ); // e.g. C: or C:\hello } return p.startsWith('/'); } exports.isRooted = isRooted; /** * Recursively create a directory at `fsPath`. * * This implementation is optimistic, meaning it attempts to create the full * path first, and backs up the path stack from there. * * @param fsPath The path to create * @param maxDepth The maximum recursion depth * @param depth The current recursion depth */ function mkdirP(fsPath, maxDepth = 1000, depth = 1) { return __awaiter(this, void 0, void 0, function* () { assert_1.ok(fsPath, 'a path argument must be provided'); fsPath = path.resolve(fsPath); if (depth >= maxDepth) return exports.mkdir(fsPath); try { yield exports.mkdir(fsPath); return; } catch (err) { switch (err.code) { case 'ENOENT': { yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1); yield exports.mkdir(fsPath); return; } default: { let stats; try { stats = yield exports.stat(fsPath); } catch (err2) { throw err; } if (!stats.isDirectory()) throw err; } } } }); } exports.mkdirP = mkdirP; /** * Best effort attempt to determine whether a file exists and is executable. * @param filePath file path to check * @param extensions additional file extensions to try * @return if file exists and is executable, returns the file path. otherwise empty string. */ function tryGetExecutablePath(filePath, extensions) { return __awaiter(this, void 0, void 0, function* () { let stats = undefined; try { // test file exists stats = yield exports.stat(filePath); } catch (err) { if (err.code !== 'ENOENT') { // eslint-disable-next-line no-console console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); } } if (stats && stats.isFile()) { if (exports.IS_WINDOWS) { // on Windows, test for valid extension const upperExt = path.extname(filePath).toUpperCase(); if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { return filePath; } } else { if (isUnixExecutable(stats)) { return filePath; } } } // try each extension const originalFilePath = filePath; for (const extension of extensions) { filePath = originalFilePath + extension; stats = undefined; try { stats = yield exports.stat(filePath); } catch (err) { if (err.code !== 'ENOENT') { // eslint-disable-next-line no-console console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); } } if (stats && stats.isFile()) { if (exports.IS_WINDOWS) { // preserve the case of the actual file (since an extension was appended) try { const directory = path.dirname(filePath); const upperName = path.basename(filePath).toUpperCase(); for (const actualName of yield exports.readdir(directory)) { if (upperName === actualName.toUpperCase()) { filePath = path.join(directory, actualName); break; } } } catch (err) { // eslint-disable-next-line no-console console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); } return filePath; } else { if (isUnixExecutable(stats)) { return filePath; } } } } return ''; }); } exports.tryGetExecutablePath = tryGetExecutablePath; function normalizeSeparators(p) { p = p || ''; if (exports.IS_WINDOWS) { // convert slashes on Windows p = p.replace(/\//g, '\\'); // remove redundant slashes return p.replace(/\\\\+/g, '\\'); } // remove redundant slashes return p.replace(/\/\/+/g, '/'); } // on Mac/Linux, test the execute bit // R W X R W X R W X // 256 128 64 32 16 8 4 2 1 function isUnixExecutable(stats) { return ((stats.mode & 1) > 0 || ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || ((stats.mode & 64) > 0 && stats.uid === process.getuid())); } //# sourceMappingURL=io-util.js.map /***/ }), /***/ 7436: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); const childProcess = __nccwpck_require__(2081); const path = __nccwpck_require__(1017); const util_1 = __nccwpck_require__(3837); const ioUtil = __nccwpck_require__(1962); const exec = util_1.promisify(childProcess.exec); /** * Copies a file or folder. * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js * * @param source source path * @param dest destination path * @param options optional. See CopyOptions. */ function cp(source, dest, options = {}) { return __awaiter(this, void 0, void 0, function* () { const { force, recursive } = readCopyOptions(options); const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; // Dest is an existing file, but not forcing if (destStat && destStat.isFile() && !force) { return; } // If dest is an existing directory, should copy inside. const newDest = destStat && destStat.isDirectory() ? path.join(dest, path.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } const sourceStat = yield ioUtil.stat(source); if (sourceStat.isDirectory()) { if (!recursive) { throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); } else { yield cpDirRecursive(source, newDest, 0, force); } } else { if (path.relative(source, newDest) === '') { // a file cannot be copied to itself throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile(source, newDest, force); } }); } exports.cp = cp; /** * Moves a path. * * @param source source path * @param dest destination path * @param options optional. See MoveOptions. */ function mv(source, dest, options = {}) { return __awaiter(this, void 0, void 0, function* () { if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { // If dest is directory copy src into dest dest = path.join(dest, path.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { if (options.force == null || options.force) { yield rmRF(dest); } else { throw new Error('Destination already exists'); } } } yield mkdirP(path.dirname(dest)); yield ioUtil.rename(source, dest); }); } exports.mv = mv; /** * Remove a path recursively with force * * @param inputPath path to remove */ function rmRF(inputPath) { return __awaiter(this, void 0, void 0, function* () { if (ioUtil.IS_WINDOWS) { // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del. try { if (yield ioUtil.isDirectory(inputPath, true)) { yield exec(`rd /s /q "${inputPath}"`); } else { yield exec(`del /f /a "${inputPath}"`); } } catch (err) { // if you try to delete a file that doesn't exist, desired result is achieved // other errors are valid if (err.code !== 'ENOENT') throw err; } // Shelling out fails to remove a symlink folder with missing source, this unlink catches that try { yield ioUtil.unlink(inputPath); } catch (err) { // if you try to delete a file that doesn't exist, desired result is achieved // other errors are valid if (err.code !== 'ENOENT') throw err; } } else { let isDir = false; try { isDir = yield ioUtil.isDirectory(inputPath); } catch (err) { // if you try to delete a file that doesn't exist, desired result is achieved // other errors are valid if (err.code !== 'ENOENT') throw err; return; } if (isDir) { yield exec(`rm -rf "${inputPath}"`); } else { yield ioUtil.unlink(inputPath); } } }); } exports.rmRF = rmRF; /** * Make a directory. Creates the full path with folders in between * Will throw if it fails * * @param fsPath path to create * @returns Promise */ function mkdirP(fsPath) { return __awaiter(this, void 0, void 0, function* () { yield ioUtil.mkdirP(fsPath); }); } exports.mkdirP = mkdirP; /** * Returns path of a tool had the tool actually been invoked. Resolves via paths. * If you check and the tool does not exist, it will throw. * * @param tool name of the tool * @param check whether to check if tool exists * @returns Promise path to tool */ function which(tool, check) { return __awaiter(this, void 0, void 0, function* () { if (!tool) { throw new Error("parameter 'tool' is required"); } // recursive when check=true if (check) { const result = yield which(tool, false); if (!result) { if (ioUtil.IS_WINDOWS) { throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); } else { throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); } } } try { // build the list of extensions to try const extensions = []; if (ioUtil.IS_WINDOWS && process.env.PATHEXT) { for (const extension of process.env.PATHEXT.split(path.delimiter)) { if (extension) { extensions.push(extension); } } } // if it's rooted, return it if exists. otherwise return empty. if (ioUtil.isRooted(tool)) { const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); if (filePath) { return filePath; } return ''; } // if any path separators, return empty if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) { return ''; } // build the list of directories // // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, // it feels like we should not do this. Checking the current directory seems like more of a use // case of a shell, and the which() function exposed by the toolkit should strive for consistency // across platforms. const directories = []; if (process.env.PATH) { for (const p of process.env.PATH.split(path.delimiter)) { if (p) { directories.push(p); } } } // return the first match for (const directory of directories) { const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions); if (filePath) { return filePath; } } return ''; } catch (err) { throw new Error(`which failed with message ${err.message}`); } }); } exports.which = which; function readCopyOptions(options) { const force = options.force == null ? true : options.force; const recursive = Boolean(options.recursive); return { force, recursive }; } function cpDirRecursive(sourceDir, destDir, currentDepth, force) { return __awaiter(this, void 0, void 0, function* () { // Ensure there is not a run away recursive copy if (currentDepth >= 255) return; currentDepth++; yield mkdirP(destDir); const files = yield ioUtil.readdir(sourceDir); for (const fileName of files) { const srcFile = `${sourceDir}/${fileName}`; const destFile = `${destDir}/${fileName}`; const srcFileStat = yield ioUtil.lstat(srcFile); if (srcFileStat.isDirectory()) { // Recurse yield cpDirRecursive(srcFile, destFile, currentDepth, force); } else { yield copyFile(srcFile, destFile, force); } } // Change the mode for the newly created directory yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); }); } // Buffered file copy function copyFile(srcFile, destFile, force) { return __awaiter(this, void 0, void 0, function* () { if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { // unlink/re-link it try { yield ioUtil.lstat(destFile); yield ioUtil.unlink(destFile); } catch (e) { // Try to override file permission if (e.code === 'EPERM') { yield ioUtil.chmod(destFile, '0666'); yield ioUtil.unlink(destFile); } // other errors = it doesn't exist, no work to do } // Copy over symlink const symlinkFull = yield ioUtil.readlink(srcFile); yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); } else if (!(yield ioUtil.exists(destFile)) || force) { yield ioUtil.copyFile(srcFile, destFile); } }); } //# sourceMappingURL=io.js.map /***/ }), /***/ 3803: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; const fs = __nccwpck_require__(7147); exports.FILE_SYSTEM_ADAPTER = { lstat: fs.lstat, stat: fs.stat, lstatSync: fs.lstatSync, statSync: fs.statSync, readdir: fs.readdir, readdirSync: fs.readdirSync }; function createFileSystemAdapter(fsMethods) { if (fsMethods === undefined) { return exports.FILE_SYSTEM_ADAPTER; } return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); } exports.createFileSystemAdapter = createFileSystemAdapter; /***/ }), /***/ 8838: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0; const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.'); if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) { throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`); } const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10); const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10); const SUPPORTED_MAJOR_VERSION = 10; const SUPPORTED_MINOR_VERSION = 10; const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION; const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION; /** * IS `true` for Node.js 10.10 and greater. */ exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR; /***/ }), /***/ 5667: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Settings = exports.scandirSync = exports.scandir = void 0; const async = __nccwpck_require__(4507); const sync = __nccwpck_require__(9560); const settings_1 = __nccwpck_require__(8662); exports.Settings = settings_1.default; function scandir(path, optionsOrSettingsOrCallback, callback) { if (typeof optionsOrSettingsOrCallback === 'function') { async.read(path, getSettings(), optionsOrSettingsOrCallback); return; } async.read(path, getSettings(optionsOrSettingsOrCallback), callback); } exports.scandir = scandir; function scandirSync(path, optionsOrSettings) { const settings = getSettings(optionsOrSettings); return sync.read(path, settings); } exports.scandirSync = scandirSync; function getSettings(settingsOrOptions = {}) { if (settingsOrOptions instanceof settings_1.default) { return settingsOrOptions; } return new settings_1.default(settingsOrOptions); } /***/ }), /***/ 4507: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; const fsStat = __nccwpck_require__(109); const rpl = __nccwpck_require__(5288); const constants_1 = __nccwpck_require__(8838); const utils = __nccwpck_require__(6297); const common = __nccwpck_require__(3847); function read(directory, settings, callback) { if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { readdirWithFileTypes(directory, settings, callback); return; } readdir(directory, settings, callback); } exports.read = read; function readdirWithFileTypes(directory, settings, callback) { settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => { if (readdirError !== null) { callFailureCallback(callback, readdirError); return; } const entries = dirents.map((dirent) => ({ dirent, name: dirent.name, path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) })); if (!settings.followSymbolicLinks) { callSuccessCallback(callback, entries); return; } const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings)); rpl(tasks, (rplError, rplEntries) => { if (rplError !== null) { callFailureCallback(callback, rplError); return; } callSuccessCallback(callback, rplEntries); }); }); } exports.readdirWithFileTypes = readdirWithFileTypes; function makeRplTaskEntry(entry, settings) { return (done) => { if (!entry.dirent.isSymbolicLink()) { done(null, entry); return; } settings.fs.stat(entry.path, (statError, stats) => { if (statError !== null) { if (settings.throwErrorOnBrokenSymbolicLink) { done(statError); return; } done(null, entry); return; } entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); done(null, entry); }); }; } function readdir(directory, settings, callback) { settings.fs.readdir(directory, (readdirError, names) => { if (readdirError !== null) { callFailureCallback(callback, readdirError); return; } const tasks = names.map((name) => { const path = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); return (done) => { fsStat.stat(path, settings.fsStatSettings, (error, stats) => { if (error !== null) { done(error); return; } const entry = { name, path, dirent: utils.fs.createDirentFromStats(name, stats) }; if (settings.stats) { entry.stats = stats; } done(null, entry); }); }; }); rpl(tasks, (rplError, entries) => { if (rplError !== null) { callFailureCallback(callback, rplError); return; } callSuccessCallback(callback, entries); }); }); } exports.readdir = readdir; function callFailureCallback(callback, error) { callback(error); } function callSuccessCallback(callback, result) { callback(null, result); } /***/ }), /***/ 3847: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.joinPathSegments = void 0; function joinPathSegments(a, b, separator) { /** * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). */ if (a.endsWith(separator)) { return a + b; } return a + separator + b; } exports.joinPathSegments = joinPathSegments; /***/ }), /***/ 9560: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; const fsStat = __nccwpck_require__(109); const constants_1 = __nccwpck_require__(8838); const utils = __nccwpck_require__(6297); const common = __nccwpck_require__(3847); function read(directory, settings) { if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { return readdirWithFileTypes(directory, settings); } return readdir(directory, settings); } exports.read = read; function readdirWithFileTypes(directory, settings) { const dirents = settings.fs.readdirSync(directory, { withFileTypes: true }); return dirents.map((dirent) => { const entry = { dirent, name: dirent.name, path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) }; if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) { try { const stats = settings.fs.statSync(entry.path); entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); } catch (error) { if (settings.throwErrorOnBrokenSymbolicLink) { throw error; } } } return entry; }); } exports.readdirWithFileTypes = readdirWithFileTypes; function readdir(directory, settings) { const names = settings.fs.readdirSync(directory); return names.map((name) => { const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); const stats = fsStat.statSync(entryPath, settings.fsStatSettings); const entry = { name, path: entryPath, dirent: utils.fs.createDirentFromStats(name, stats) }; if (settings.stats) { entry.stats = stats; } return entry; }); } exports.readdir = readdir; /***/ }), /***/ 8662: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const path = __nccwpck_require__(1017); const fsStat = __nccwpck_require__(109); const fs = __nccwpck_require__(3803); class Settings { constructor(_options = {}) { this._options = _options; this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false); this.fs = fs.createFileSystemAdapter(this._options.fs); this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); this.stats = this._getValue(this._options.stats, false); this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); this.fsStatSettings = new fsStat.Settings({ followSymbolicLink: this.followSymbolicLinks, fs: this.fs, throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink }); } _getValue(option, value) { return option !== null && option !== void 0 ? option : value; } } exports["default"] = Settings; /***/ }), /***/ 883: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createDirentFromStats = void 0; class DirentFromStats { constructor(name, stats) { this.name = name; this.isBlockDevice = stats.isBlockDevice.bind(stats); this.isCharacterDevice = stats.isCharacterDevice.bind(stats); this.isDirectory = stats.isDirectory.bind(stats); this.isFIFO = stats.isFIFO.bind(stats); this.isFile = stats.isFile.bind(stats); this.isSocket = stats.isSocket.bind(stats); this.isSymbolicLink = stats.isSymbolicLink.bind(stats); } } function createDirentFromStats(name, stats) { return new DirentFromStats(name, stats); } exports.createDirentFromStats = createDirentFromStats; /***/ }), /***/ 6297: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.fs = void 0; const fs = __nccwpck_require__(883); exports.fs = fs; /***/ }), /***/ 2987: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; const fs = __nccwpck_require__(7147); exports.FILE_SYSTEM_ADAPTER = { lstat: fs.lstat, stat: fs.stat, lstatSync: fs.lstatSync, statSync: fs.statSync }; function createFileSystemAdapter(fsMethods) { if (fsMethods === undefined) { return exports.FILE_SYSTEM_ADAPTER; } return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); } exports.createFileSystemAdapter = createFileSystemAdapter; /***/ }), /***/ 109: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.statSync = exports.stat = exports.Settings = void 0; const async = __nccwpck_require__(4147); const sync = __nccwpck_require__(4527); const settings_1 = __nccwpck_require__(2410); exports.Settings = settings_1.default; function stat(path, optionsOrSettingsOrCallback, callback) { if (typeof optionsOrSettingsOrCallback === 'function') { async.read(path, getSettings(), optionsOrSettingsOrCallback); return; } async.read(path, getSettings(optionsOrSettingsOrCallback), callback); } exports.stat = stat; function statSync(path, optionsOrSettings) { const settings = getSettings(optionsOrSettings); return sync.read(path, settings); } exports.statSync = statSync; function getSettings(settingsOrOptions = {}) { if (settingsOrOptions instanceof settings_1.default) { return settingsOrOptions; } return new settings_1.default(settingsOrOptions); } /***/ }), /***/ 4147: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.read = void 0; function read(path, settings, callback) { settings.fs.lstat(path, (lstatError, lstat) => { if (lstatError !== null) { callFailureCallback(callback, lstatError); return; } if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { callSuccessCallback(callback, lstat); return; } settings.fs.stat(path, (statError, stat) => { if (statError !== null) { if (settings.throwErrorOnBrokenSymbolicLink) { callFailureCallback(callback, statError); return; } callSuccessCallback(callback, lstat); return; } if (settings.markSymbolicLink) { stat.isSymbolicLink = () => true; } callSuccessCallback(callback, stat); }); }); } exports.read = read; function callFailureCallback(callback, error) { callback(error); } function callSuccessCallback(callback, result) { callback(null, result); } /***/ }), /***/ 4527: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.read = void 0; function read(path, settings) { const lstat = settings.fs.lstatSync(path); if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { return lstat; } try { const stat = settings.fs.statSync(path); if (settings.markSymbolicLink) { stat.isSymbolicLink = () => true; } return stat; } catch (error) { if (!settings.throwErrorOnBrokenSymbolicLink) { return lstat; } throw error; } } exports.read = read; /***/ }), /***/ 2410: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const fs = __nccwpck_require__(2987); class Settings { constructor(_options = {}) { this._options = _options; this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true); this.fs = fs.createFileSystemAdapter(this._options.fs); this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false); this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); } _getValue(option, value) { return option !== null && option !== void 0 ? option : value; } } exports["default"] = Settings; /***/ }), /***/ 6026: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Settings = exports.walkStream = exports.walkSync = exports.walk = void 0; const async_1 = __nccwpck_require__(7523); const stream_1 = __nccwpck_require__(6737); const sync_1 = __nccwpck_require__(3068); const settings_1 = __nccwpck_require__(141); exports.Settings = settings_1.default; function walk(directory, optionsOrSettingsOrCallback, callback) { if (typeof optionsOrSettingsOrCallback === 'function') { new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback); return; } new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback); } exports.walk = walk; function walkSync(directory, optionsOrSettings) { const settings = getSettings(optionsOrSettings); const provider = new sync_1.default(directory, settings); return provider.read(); } exports.walkSync = walkSync; function walkStream(directory, optionsOrSettings) { const settings = getSettings(optionsOrSettings); const provider = new stream_1.default(directory, settings); return provider.read(); } exports.walkStream = walkStream; function getSettings(settingsOrOptions = {}) { if (settingsOrOptions instanceof settings_1.default) { return settingsOrOptions; } return new settings_1.default(settingsOrOptions); } /***/ }), /***/ 7523: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const async_1 = __nccwpck_require__(5732); class AsyncProvider { constructor(_root, _settings) { this._root = _root; this._settings = _settings; this._reader = new async_1.default(this._root, this._settings); this._storage = []; } read(callback) { this._reader.onError((error) => { callFailureCallback(callback, error); }); this._reader.onEntry((entry) => { this._storage.push(entry); }); this._reader.onEnd(() => { callSuccessCallback(callback, this._storage); }); this._reader.read(); } } exports["default"] = AsyncProvider; function callFailureCallback(callback, error) { callback(error); } function callSuccessCallback(callback, entries) { callback(null, entries); } /***/ }), /***/ 6737: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const stream_1 = __nccwpck_require__(2781); const async_1 = __nccwpck_require__(5732); class StreamProvider { constructor(_root, _settings) { this._root = _root; this._settings = _settings; this._reader = new async_1.default(this._root, this._settings); this._stream = new stream_1.Readable({ objectMode: true, read: () => { }, destroy: () => { if (!this._reader.isDestroyed) { this._reader.destroy(); } } }); } read() { this._reader.onError((error) => { this._stream.emit('error', error); }); this._reader.onEntry((entry) => { this._stream.push(entry); }); this._reader.onEnd(() => { this._stream.push(null); }); this._reader.read(); return this._stream; } } exports["default"] = StreamProvider; /***/ }), /***/ 3068: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const sync_1 = __nccwpck_require__(3595); class SyncProvider { constructor(_root, _settings) { this._root = _root; this._settings = _settings; this._reader = new sync_1.default(this._root, this._settings); } read() { return this._reader.read(); } } exports["default"] = SyncProvider; /***/ }), /***/ 5732: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const events_1 = __nccwpck_require__(2361); const fsScandir = __nccwpck_require__(5667); const fastq = __nccwpck_require__(7340); const common = __nccwpck_require__(7988); const reader_1 = __nccwpck_require__(8311); class AsyncReader extends reader_1.default { constructor(_root, _settings) { super(_root, _settings); this._settings = _settings; this._scandir = fsScandir.scandir; this._emitter = new events_1.EventEmitter(); this._queue = fastq(this._worker.bind(this), this._settings.concurrency); this._isFatalError = false; this._isDestroyed = false; this._queue.drain = () => { if (!this._isFatalError) { this._emitter.emit('end'); } }; } read() { this._isFatalError = false; this._isDestroyed = false; setImmediate(() => { this._pushToQueue(this._root, this._settings.basePath); }); return this._emitter; } get isDestroyed() { return this._isDestroyed; } destroy() { if (this._isDestroyed) { throw new Error('The reader is already destroyed'); } this._isDestroyed = true; this._queue.killAndDrain(); } onEntry(callback) { this._emitter.on('entry', callback); } onError(callback) { this._emitter.once('error', callback); } onEnd(callback) { this._emitter.once('end', callback); } _pushToQueue(directory, base) { const queueItem = { directory, base }; this._queue.push(queueItem, (error) => { if (error !== null) { this._handleError(error); } }); } _worker(item, done) { this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => { if (error !== null) { done(error, undefined); return; } for (const entry of entries) { this._handleEntry(entry, item.base); } done(null, undefined); }); } _handleError(error) { if (this._isDestroyed || !common.isFatalError(this._settings, error)) { return; } this._isFatalError = true; this._isDestroyed = true; this._emitter.emit('error', error); } _handleEntry(entry, base) { if (this._isDestroyed || this._isFatalError) { return; } const fullpath = entry.path; if (base !== undefined) { entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); } if (common.isAppliedFilter(this._settings.entryFilter, entry)) { this._emitEntry(entry); } if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); } } _emitEntry(entry) { this._emitter.emit('entry', entry); } } exports["default"] = AsyncReader; /***/ }), /***/ 7988: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.joinPathSegments = exports.replacePathSegmentSeparator = exports.isAppliedFilter = exports.isFatalError = void 0; function isFatalError(settings, error) { if (settings.errorFilter === null) { return true; } return !settings.errorFilter(error); } exports.isFatalError = isFatalError; function isAppliedFilter(filter, value) { return filter === null || filter(value); } exports.isAppliedFilter = isAppliedFilter; function replacePathSegmentSeparator(filepath, separator) { return filepath.split(/[/\\]/).join(separator); } exports.replacePathSegmentSeparator = replacePathSegmentSeparator; function joinPathSegments(a, b, separator) { if (a === '') { return b; } /** * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). */ if (a.endsWith(separator)) { return a + b; } return a + separator + b; } exports.joinPathSegments = joinPathSegments; /***/ }), /***/ 8311: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const common = __nccwpck_require__(7988); class Reader { constructor(_root, _settings) { this._root = _root; this._settings = _settings; this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator); } } exports["default"] = Reader; /***/ }), /***/ 3595: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const fsScandir = __nccwpck_require__(5667); const common = __nccwpck_require__(7988); const reader_1 = __nccwpck_require__(8311); class SyncReader extends reader_1.default { constructor() { super(...arguments); this._scandir = fsScandir.scandirSync; this._storage = []; this._queue = new Set(); } read() { this._pushToQueue(this._root, this._settings.basePath); this._handleQueue(); return this._storage; } _pushToQueue(directory, base) { this._queue.add({ directory, base }); } _handleQueue() { for (const item of this._queue.values()) { this._handleDirectory(item.directory, item.base); } } _handleDirectory(directory, base) { try { const entries = this._scandir(directory, this._settings.fsScandirSettings); for (const entry of entries) { this._handleEntry(entry, base); } } catch (error) { this._handleError(error); } } _handleError(error) { if (!common.isFatalError(this._settings, error)) { return; } throw error; } _handleEntry(entry, base) { const fullpath = entry.path; if (base !== undefined) { entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); } if (common.isAppliedFilter(this._settings.entryFilter, entry)) { this._pushToStorage(entry); } if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); } } _pushToStorage(entry) { this._storage.push(entry); } } exports["default"] = SyncReader; /***/ }), /***/ 141: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const path = __nccwpck_require__(1017); const fsScandir = __nccwpck_require__(5667); class Settings { constructor(_options = {}) { this._options = _options; this.basePath = this._getValue(this._options.basePath, undefined); this.concurrency = this._getValue(this._options.concurrency, Number.POSITIVE_INFINITY); this.deepFilter = this._getValue(this._options.deepFilter, null); this.entryFilter = this._getValue(this._options.entryFilter, null); this.errorFilter = this._getValue(this._options.errorFilter, null); this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); this.fsScandirSettings = new fsScandir.Settings({ followSymbolicLinks: this._options.followSymbolicLinks, fs: this._options.fs, pathSegmentSeparator: this._options.pathSegmentSeparator, stats: this._options.stats, throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink }); } _getValue(option, value) { return option !== null && option !== void 0 ? option : value; } } exports["default"] = Settings; /***/ }), /***/ 334: /***/ ((module) => { "use strict"; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // pkg/dist-src/index.js var dist_src_exports = {}; __export(dist_src_exports, { createTokenAuth: () => createTokenAuth }); module.exports = __toCommonJS(dist_src_exports); // pkg/dist-src/auth.js var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; var REGEX_IS_INSTALLATION = /^ghs_/; var REGEX_IS_USER_TO_SERVER = /^ghu_/; async function auth(token) { const isApp = token.split(/\./).length === 3; const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; return { type: "token", token, tokenType }; } // pkg/dist-src/with-authorization-prefix.js function withAuthorizationPrefix(token) { if (token.split(/\./).length === 3) { return `bearer ${token}`; } return `token ${token}`; } // pkg/dist-src/hook.js async function hook(token, request, route, parameters) { const endpoint = request.endpoint.merge( route, parameters ); endpoint.headers.authorization = withAuthorizationPrefix(token); return request(endpoint); } // pkg/dist-src/index.js var createTokenAuth = function createTokenAuth2(token) { if (!token) { throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); } if (typeof token !== "string") { throw new Error( "[@octokit/auth-token] Token passed to createTokenAuth is not a string" ); } token = token.replace(/^(token|bearer) +/i, ""); return Object.assign(auth.bind(null, token), { hook: hook.bind(null, token) }); }; // Annotate the CommonJS export names for ESM import in node: 0 && (0); /***/ }), /***/ 6762: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // pkg/dist-src/index.js var dist_src_exports = {}; __export(dist_src_exports, { Octokit: () => Octokit }); module.exports = __toCommonJS(dist_src_exports); var import_universal_user_agent = __nccwpck_require__(5030); var import_before_after_hook = __nccwpck_require__(3682); var import_request = __nccwpck_require__(6234); var import_graphql = __nccwpck_require__(8467); var import_auth_token = __nccwpck_require__(334); // pkg/dist-src/version.js var VERSION = "5.0.2"; // pkg/dist-src/index.js var noop = () => { }; var consoleWarn = console.warn.bind(console); var consoleError = console.error.bind(console); var userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; var Octokit = class { static { this.VERSION = VERSION; } static defaults(defaults) { const OctokitWithDefaults = class extends this { constructor(...args) { const options = args[0] || {}; if (typeof defaults === "function") { super(defaults(options)); return; } super( Object.assign( {}, defaults, options, options.userAgent && defaults.userAgent ? { userAgent: `${options.userAgent} ${defaults.userAgent}` } : null ) ); } }; return OctokitWithDefaults; } static { this.plugins = []; } /** * Attach a plugin (or many) to your Octokit instance. * * @example * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) */ static plugin(...newPlugins) { const currentPlugins = this.plugins; const NewOctokit = class extends this { static { this.plugins = currentPlugins.concat( newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) ); } }; return NewOctokit; } constructor(options = {}) { const hook = new import_before_after_hook.Collection(); const requestDefaults = { baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, headers: {}, request: Object.assign({}, options.request, { // @ts-ignore internal usage only, no need to type hook: hook.bind(null, "request") }), mediaType: { previews: [], format: "" } }; requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; if (options.baseUrl) { requestDefaults.baseUrl = options.baseUrl; } if (options.previews) { requestDefaults.mediaType.previews = options.previews; } if (options.timeZone) { requestDefaults.headers["time-zone"] = options.timeZone; } this.request = import_request.request.defaults(requestDefaults); this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); this.log = Object.assign( { debug: noop, info: noop, warn: consoleWarn, error: consoleError }, options.log ); this.hook = hook; if (!options.authStrategy) { if (!options.auth) { this.auth = async () => ({ type: "unauthenticated" }); } else { const auth = (0, import_auth_token.createTokenAuth)(options.auth); hook.wrap("request", auth.hook); this.auth = auth; } } else { const { authStrategy, ...otherOptions } = options; const auth = authStrategy( Object.assign( { request: this.request, log: this.log, // we pass the current octokit instance as well as its constructor options // to allow for authentication strategies that return a new octokit instance // that shares the same internal state as the current one. The original // requirement for this was the "event-octokit" authentication strategy // of https://github.com/probot/octokit-auth-probot. octokit: this, octokitOptions: otherOptions }, options.auth ) ); hook.wrap("request", auth.hook); this.auth = auth; } const classConstructor = this.constructor; for (let i = 0; i < classConstructor.plugins.length; ++i) { Object.assign(this, classConstructor.plugins[i](this, options)); } } }; // Annotate the CommonJS export names for ESM import in node: 0 && (0); /***/ }), /***/ 9440: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // pkg/dist-src/index.js var dist_src_exports = {}; __export(dist_src_exports, { endpoint: () => endpoint }); module.exports = __toCommonJS(dist_src_exports); // pkg/dist-src/defaults.js var import_universal_user_agent = __nccwpck_require__(5030); // pkg/dist-src/version.js var VERSION = "9.0.4"; // pkg/dist-src/defaults.js var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; var DEFAULTS = { method: "GET", baseUrl: "https://api.github.com", headers: { accept: "application/vnd.github.v3+json", "user-agent": userAgent }, mediaType: { format: "" } }; // pkg/dist-src/util/lowercase-keys.js function lowercaseKeys(object) { if (!object) { return {}; } return Object.keys(object).reduce((newObj, key) => { newObj[key.toLowerCase()] = object[key]; return newObj; }, {}); } // pkg/dist-src/util/is-plain-object.js function isPlainObject(value) { if (typeof value !== "object" || value === null) return false; if (Object.prototype.toString.call(value) !== "[object Object]") return false; const proto = Object.getPrototypeOf(value); if (proto === null) return true; const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); } // pkg/dist-src/util/merge-deep.js function mergeDeep(defaults, options) { const result = Object.assign({}, defaults); Object.keys(options).forEach((key) => { if (isPlainObject(options[key])) { if (!(key in defaults)) Object.assign(result, { [key]: options[key] }); else result[key] = mergeDeep(defaults[key], options[key]); } else { Object.assign(result, { [key]: options[key] }); } }); return result; } // pkg/dist-src/util/remove-undefined-properties.js function removeUndefinedProperties(obj) { for (const key in obj) { if (obj[key] === void 0) { delete obj[key]; } } return obj; } // pkg/dist-src/merge.js function merge(defaults, route, options) { if (typeof route === "string") { let [method, url] = route.split(" "); options = Object.assign(url ? { method, url } : { url: method }, options); } else { options = Object.assign({}, route); } options.headers = lowercaseKeys(options.headers); removeUndefinedProperties(options); removeUndefinedProperties(options.headers); const mergedOptions = mergeDeep(defaults || {}, options); if (options.url === "/graphql") { if (defaults && defaults.mediaType.previews?.length) { mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( (preview) => !mergedOptions.mediaType.previews.includes(preview) ).concat(mergedOptions.mediaType.previews); } mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); } return mergedOptions; } // pkg/dist-src/util/add-query-parameters.js function addQueryParameters(url, parameters) { const separator = /\?/.test(url) ? "&" : "?"; const names = Object.keys(parameters); if (names.length === 0) { return url; } return url + separator + names.map((name) => { if (name === "q") { return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); } return `${name}=${encodeURIComponent(parameters[name])}`; }).join("&"); } // pkg/dist-src/util/extract-url-variable-names.js var urlVariableRegex = /\{[^}]+\}/g; function removeNonChars(variableName) { return variableName.replace(/^\W+|\W+$/g, "").split(/,/); } function extractUrlVariableNames(url) { const matches = url.match(urlVariableRegex); if (!matches) { return []; } return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); } // pkg/dist-src/util/omit.js function omit(object, keysToOmit) { const result = { __proto__: null }; for (const key of Object.keys(object)) { if (keysToOmit.indexOf(key) === -1) { result[key] = object[key]; } } return result; } // pkg/dist-src/util/url-template.js function encodeReserved(str) { return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { if (!/%[0-9A-Fa-f]/.test(part)) { part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); } return part; }).join(""); } function encodeUnreserved(str) { return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { return "%" + c.charCodeAt(0).toString(16).toUpperCase(); }); } function encodeValue(operator, value, key) { value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); if (key) { return encodeUnreserved(key) + "=" + value; } else { return value; } } function isDefined(value) { return value !== void 0 && value !== null; } function isKeyOperator(operator) { return operator === ";" || operator === "&" || operator === "?"; } function getValues(context, operator, key, modifier) { var value = context[key], result = []; if (isDefined(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); if (modifier && modifier !== "*") { value = value.substring(0, parseInt(modifier, 10)); } result.push( encodeValue(operator, value, isKeyOperator(operator) ? key : "") ); } else { if (modifier === "*") { if (Array.isArray(value)) { value.filter(isDefined).forEach(function(value2) { result.push( encodeValue(operator, value2, isKeyOperator(operator) ? key : "") ); }); } else { Object.keys(value).forEach(function(k) { if (isDefined(value[k])) { result.push(encodeValue(operator, value[k], k)); } }); } } else { const tmp = []; if (Array.isArray(value)) { value.filter(isDefined).forEach(function(value2) { tmp.push(encodeValue(operator, value2)); }); } else { Object.keys(value).forEach(function(k) { if (isDefined(value[k])) { tmp.push(encodeUnreserved(k)); tmp.push(encodeValue(operator, value[k].toString())); } }); } if (isKeyOperator(operator)) { result.push(encodeUnreserved(key) + "=" + tmp.join(",")); } else if (tmp.length !== 0) { result.push(tmp.join(",")); } } } } else { if (operator === ";") { if (isDefined(value)) { result.push(encodeUnreserved(key)); } } else if (value === "" && (operator === "&" || operator === "?")) { result.push(encodeUnreserved(key) + "="); } else if (value === "") { result.push(""); } } return result; } function parseUrl(template) { return { expand: expand.bind(null, template) }; } function expand(template, context) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; template = template.replace( /\{([^\{\}]+)\}|([^\{\}]+)/g, function(_, expression, literal) { if (expression) { let operator = ""; const values = []; if (operators.indexOf(expression.charAt(0)) !== -1) { operator = expression.charAt(0); expression = expression.substr(1); } expression.split(/,/g).forEach(function(variable) { var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); }); if (operator && operator !== "+") { var separator = ","; if (operator === "?") { separator = "&"; } else if (operator !== "#") { separator = operator; } return (values.length !== 0 ? operator : "") + values.join(separator); } else { return values.join(","); } } else { return encodeReserved(literal); } } ); if (template === "/") { return template; } else { return template.replace(/\/$/, ""); } } // pkg/dist-src/parse.js function parse(options) { let method = options.method.toUpperCase(); let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); let headers = Object.assign({}, options.headers); let body; let parameters = omit(options, [ "method", "baseUrl", "url", "headers", "request", "mediaType" ]); const urlVariableNames = extractUrlVariableNames(url); url = parseUrl(url).expand(parameters); if (!/^http/.test(url)) { url = options.baseUrl + url; } const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); const remainingParameters = omit(parameters, omittedParameters); const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); if (!isBinaryRequest) { if (options.mediaType.format) { headers.accept = headers.accept.split(/,/).map( (format) => format.replace( /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}` ) ).join(","); } if (url.endsWith("/graphql")) { if (options.mediaType.previews?.length) { const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; return `application/vnd.github.${preview}-preview${format}`; }).join(","); } } } if (["GET", "HEAD"].includes(method)) { url = addQueryParameters(url, remainingParameters); } else { if ("data" in remainingParameters) { body = remainingParameters.data; } else { if (Object.keys(remainingParameters).length) { body = remainingParameters; } } } if (!headers["content-type"] && typeof body !== "undefined") { headers["content-type"] = "application/json; charset=utf-8"; } if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { body = ""; } return Object.assign( { method, url, headers }, typeof body !== "undefined" ? { body } : null, options.request ? { request: options.request } : null ); } // pkg/dist-src/endpoint-with-defaults.js function endpointWithDefaults(defaults, route, options) { return parse(merge(defaults, route, options)); } // pkg/dist-src/with-defaults.js function withDefaults(oldDefaults, newDefaults) { const DEFAULTS2 = merge(oldDefaults, newDefaults); const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); return Object.assign(endpoint2, { DEFAULTS: DEFAULTS2, defaults: withDefaults.bind(null, DEFAULTS2), merge: merge.bind(null, DEFAULTS2), parse }); } // pkg/dist-src/index.js var endpoint = withDefaults(null, DEFAULTS); // Annotate the CommonJS export names for ESM import in node: 0 && (0); /***/ }), /***/ 8467: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // pkg/dist-src/index.js var dist_src_exports = {}; __export(dist_src_exports, { GraphqlResponseError: () => GraphqlResponseError, graphql: () => graphql2, withCustomRequest: () => withCustomRequest }); module.exports = __toCommonJS(dist_src_exports); var import_request3 = __nccwpck_require__(6234); var import_universal_user_agent = __nccwpck_require__(5030); // pkg/dist-src/version.js var VERSION = "7.0.2"; // pkg/dist-src/with-defaults.js var import_request2 = __nccwpck_require__(6234); // pkg/dist-src/graphql.js var import_request = __nccwpck_require__(6234); // pkg/dist-src/error.js function _buildMessageForResponseErrors(data) { return `Request failed due to following response errors: ` + data.errors.map((e) => ` - ${e.message}`).join("\n"); } var GraphqlResponseError = class extends Error { constructor(request2, headers, response) { super(_buildMessageForResponseErrors(response)); this.request = request2; this.headers = headers; this.response = response; this.name = "GraphqlResponseError"; this.errors = response.errors; this.data = response.data; if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } } }; // pkg/dist-src/graphql.js var NON_VARIABLE_OPTIONS = [ "method", "baseUrl", "url", "headers", "request", "query", "mediaType" ]; var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; function graphql(request2, query, options) { if (options) { if (typeof query === "string" && "query" in options) { return Promise.reject( new Error(`[@octokit/graphql] "query" cannot be used as variable name`) ); } for (const key in options) { if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue; return Promise.reject( new Error( `[@octokit/graphql] "${key}" cannot be used as variable name` ) ); } } const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; const requestOptions = Object.keys( parsedOptions ).reduce((result, key) => { if (NON_VARIABLE_OPTIONS.includes(key)) { result[key] = parsedOptions[key]; return result; } if (!result.variables) { result.variables = {}; } result.variables[key] = parsedOptions[key]; return result; }, {}); const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); } return request2(requestOptions).then((response) => { if (response.data.errors) { const headers = {}; for (const key of Object.keys(response.headers)) { headers[key] = response.headers[key]; } throw new GraphqlResponseError( requestOptions, headers, response.data ); } return response.data.data; }); } // pkg/dist-src/with-defaults.js function withDefaults(request2, newDefaults) { const newRequest = request2.defaults(newDefaults); const newApi = (query, options) => { return graphql(newRequest, query, options); }; return Object.assign(newApi, { defaults: withDefaults.bind(null, newRequest), endpoint: newRequest.endpoint }); } // pkg/dist-src/index.js var graphql2 = withDefaults(import_request3.request, { headers: { "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` }, method: "POST", url: "/graphql" }); function withCustomRequest(customRequest) { return withDefaults(customRequest, { method: "POST", url: "/graphql" }); } // Annotate the CommonJS export names for ESM import in node: 0 && (0); /***/ }), /***/ 4193: /***/ ((module) => { "use strict"; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // pkg/dist-src/index.js var dist_src_exports = {}; __export(dist_src_exports, { composePaginateRest: () => composePaginateRest, isPaginatingEndpoint: () => isPaginatingEndpoint, paginateRest: () => paginateRest, paginatingEndpoints: () => paginatingEndpoints }); module.exports = __toCommonJS(dist_src_exports); // pkg/dist-src/version.js var VERSION = "9.1.5"; // pkg/dist-src/normalize-paginated-list-response.js function normalizePaginatedListResponse(response) { if (!response.data) { return { ...response, data: [] }; } const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); if (!responseNeedsNormalization) return response; const incompleteResults = response.data.incomplete_results; const repositorySelection = response.data.repository_selection; const totalCount = response.data.total_count; delete response.data.incomplete_results; delete response.data.repository_selection; delete response.data.total_count; const namespaceKey = Object.keys(response.data)[0]; const data = response.data[namespaceKey]; response.data = data; if (typeof incompleteResults !== "undefined") { response.data.incomplete_results = incompleteResults; } if (typeof repositorySelection !== "undefined") { response.data.repository_selection = repositorySelection; } response.data.total_count = totalCount; return response; } // pkg/dist-src/iterator.js function iterator(octokit, route, parameters) { const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); const requestMethod = typeof route === "function" ? route : octokit.request; const method = options.method; const headers = options.headers; let url = options.url; return { [Symbol.asyncIterator]: () => ({ async next() { if (!url) return { done: true }; try { const response = await requestMethod({ method, url, headers }); const normalizedResponse = normalizePaginatedListResponse(response); url = ((normalizedResponse.headers.link || "").match( /<([^>]+)>;\s*rel="next"/ ) || [])[1]; return { value: normalizedResponse }; } catch (error) { if (error.status !== 409) throw error; url = ""; return { value: { status: 200, headers: {}, data: [] } }; } } }) }; } // pkg/dist-src/paginate.js function paginate(octokit, route, parameters, mapFn) { if (typeof parameters === "function") { mapFn = parameters; parameters = void 0; } return gather( octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn ); } function gather(octokit, results, iterator2, mapFn) { return iterator2.next().then((result) => { if (result.done) { return results; } let earlyExit = false; function done() { earlyExit = true; } results = results.concat( mapFn ? mapFn(result.value, done) : result.value.data ); if (earlyExit) { return results; } return gather(octokit, results, iterator2, mapFn); }); } // pkg/dist-src/compose-paginate.js var composePaginateRest = Object.assign(paginate, { iterator }); // pkg/dist-src/generated/paginating-endpoints.js var paginatingEndpoints = [ "GET /advisories", "GET /app/hook/deliveries", "GET /app/installation-requests", "GET /app/installations", "GET /assignments/{assignment_id}/accepted_assignments", "GET /classrooms", "GET /classrooms/{classroom_id}/assignments", "GET /enterprises/{enterprise}/dependabot/alerts", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/actions/variables", "GET /orgs/{org}/actions/variables/{name}/repositories", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/codespaces/secrets", "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", "GET /orgs/{org}/copilot/billing/seats", "GET /orgs/{org}/dependabot/alerts", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/members/{username}/codespaces", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/personal-access-token-requests", "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", "GET /orgs/{org}/personal-access-tokens", "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories", "GET /orgs/{org}/projects", "GET /orgs/{org}/properties/values", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/rulesets", "GET /orgs/{org}/rulesets/rule-suites", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/security-advisories", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/organization-secrets", "GET /repos/{owner}/{repo}/actions/organization-variables", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/variables", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/activity", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/alerts", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/rules/branches/{branch}", "GET /repos/{owner}/{repo}/rulesets", "GET /repos/{owner}/{repo}/rulesets/rule-suites", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/security-advisories", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /repositories/{repository_id}/environments/{environment_name}/variables", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/social_accounts", "GET /user/ssh_signing_keys", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/social_accounts", "GET /users/{username}/ssh_signing_keys", "GET /users/{username}/starred", "GET /users/{username}/subscriptions" ]; // pkg/dist-src/paginating-endpoints.js function isPaginatingEndpoint(arg) { if (typeof arg === "string") { return paginatingEndpoints.includes(arg); } else { return false; } } // pkg/dist-src/index.js function paginateRest(octokit) { return { paginate: Object.assign(paginate.bind(null, octokit), { iterator: iterator.bind(null, octokit) }) }; } paginateRest.VERSION = VERSION; // Annotate the CommonJS export names for ESM import in node: 0 && (0); /***/ }), /***/ 3044: /***/ ((module) => { "use strict"; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // pkg/dist-src/index.js var dist_src_exports = {}; __export(dist_src_exports, { legacyRestEndpointMethods: () => legacyRestEndpointMethods, restEndpointMethods: () => restEndpointMethods }); module.exports = __toCommonJS(dist_src_exports); // pkg/dist-src/version.js var VERSION = "10.2.0"; // pkg/dist-src/generated/endpoints.js var Endpoints = { actions: { addCustomLabelsToSelfHostedRunnerForOrg: [ "POST /orgs/{org}/actions/runners/{runner_id}/labels" ], addCustomLabelsToSelfHostedRunnerForRepo: [ "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" ], addSelectedRepoToOrgSecret: [ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" ], addSelectedRepoToOrgVariable: [ "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" ], approveWorkflowRun: [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" ], cancelWorkflowRun: [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" ], createEnvironmentVariable: [ "POST /repositories/{repository_id}/environments/{environment_name}/variables" ], createOrUpdateEnvironmentSecret: [ "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" ], createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], createOrUpdateRepoSecret: [ "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" ], createOrgVariable: ["POST /orgs/{org}/actions/variables"], createRegistrationTokenForOrg: [ "POST /orgs/{org}/actions/runners/registration-token" ], createRegistrationTokenForRepo: [ "POST /repos/{owner}/{repo}/actions/runners/registration-token" ], createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], createRemoveTokenForRepo: [ "POST /repos/{owner}/{repo}/actions/runners/remove-token" ], createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], createWorkflowDispatch: [ "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" ], deleteActionsCacheById: [ "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" ], deleteActionsCacheByKey: [ "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" ], deleteArtifact: [ "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" ], deleteEnvironmentSecret: [ "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" ], deleteEnvironmentVariable: [ "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" ], deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], deleteRepoSecret: [ "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" ], deleteRepoVariable: [ "DELETE /repos/{owner}/{repo}/actions/variables/{name}" ], deleteSelfHostedRunnerFromOrg: [ "DELETE /orgs/{org}/actions/runners/{runner_id}" ], deleteSelfHostedRunnerFromRepo: [ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" ], deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], deleteWorkflowRunLogs: [ "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" ], disableSelectedRepositoryGithubActionsOrganization: [ "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" ], disableWorkflow: [ "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" ], downloadArtifact: [ "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" ], downloadJobLogsForWorkflowRun: [ "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" ], downloadWorkflowRunAttemptLogs: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" ], downloadWorkflowRunLogs: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" ], enableSelectedRepositoryGithubActionsOrganization: [ "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" ], enableWorkflow: [ "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" ], forceCancelWorkflowRun: [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" ], generateRunnerJitconfigForOrg: [ "POST /orgs/{org}/actions/runners/generate-jitconfig" ], generateRunnerJitconfigForRepo: [ "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" ], getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], getActionsCacheUsageByRepoForOrg: [ "GET /orgs/{org}/actions/cache/usage-by-repository" ], getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], getAllowedActionsOrganization: [ "GET /orgs/{org}/actions/permissions/selected-actions" ], getAllowedActionsRepository: [ "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" ], getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], getEnvironmentPublicKey: [ "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" ], getEnvironmentSecret: [ "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" ], getEnvironmentVariable: [ "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" ], getGithubActionsDefaultWorkflowPermissionsOrganization: [ "GET /orgs/{org}/actions/permissions/workflow" ], getGithubActionsDefaultWorkflowPermissionsRepository: [ "GET /repos/{owner}/{repo}/actions/permissions/workflow" ], getGithubActionsPermissionsOrganization: [ "GET /orgs/{org}/actions/permissions" ], getGithubActionsPermissionsRepository: [ "GET /repos/{owner}/{repo}/actions/permissions" ], getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], getPendingDeploymentsForRun: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" ], getRepoPermissions: [ "GET /repos/{owner}/{repo}/actions/permissions", {}, { renamed: ["actions", "getGithubActionsPermissionsRepository"] } ], getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], getReviewsForRun: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" ], getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], getSelfHostedRunnerForRepo: [ "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" ], getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], getWorkflowAccessToRepository: [ "GET /repos/{owner}/{repo}/actions/permissions/access" ], getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], getWorkflowRunAttempt: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" ], getWorkflowRunUsage: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" ], getWorkflowUsage: [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" ], listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], listEnvironmentSecrets: [ "GET /repositories/{repository_id}/environments/{environment_name}/secrets" ], listEnvironmentVariables: [ "GET /repositories/{repository_id}/environments/{environment_name}/variables" ], listJobsForWorkflowRun: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" ], listJobsForWorkflowRunAttempt: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" ], listLabelsForSelfHostedRunnerForOrg: [ "GET /orgs/{org}/actions/runners/{runner_id}/labels" ], listLabelsForSelfHostedRunnerForRepo: [ "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" ], listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], listOrgVariables: ["GET /orgs/{org}/actions/variables"], listRepoOrganizationSecrets: [ "GET /repos/{owner}/{repo}/actions/organization-secrets" ], listRepoOrganizationVariables: [ "GET /repos/{owner}/{repo}/actions/organization-variables" ], listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], listRunnerApplicationsForRepo: [ "GET /repos/{owner}/{repo}/actions/runners/downloads" ], listSelectedReposForOrgSecret: [ "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" ], listSelectedReposForOrgVariable: [ "GET /orgs/{org}/actions/variables/{name}/repositories" ], listSelectedRepositoriesEnabledGithubActionsOrganization: [ "GET /orgs/{org}/actions/permissions/repositories" ], listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], listWorkflowRunArtifacts: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" ], listWorkflowRuns: [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" ], listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], reRunJobForWorkflowRun: [ "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" ], reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], reRunWorkflowFailedJobs: [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" ], removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" ], removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" ], removeCustomLabelFromSelfHostedRunnerForOrg: [ "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" ], removeCustomLabelFromSelfHostedRunnerForRepo: [ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" ], removeSelectedRepoFromOrgSecret: [ "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" ], removeSelectedRepoFromOrgVariable: [ "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" ], reviewCustomGatesForRun: [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" ], reviewPendingDeploymentsForRun: [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" ], setAllowedActionsOrganization: [ "PUT /orgs/{org}/actions/permissions/selected-actions" ], setAllowedActionsRepository: [ "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" ], setCustomLabelsForSelfHostedRunnerForOrg: [ "PUT /orgs/{org}/actions/runners/{runner_id}/labels" ], setCustomLabelsForSelfHostedRunnerForRepo: [ "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" ], setGithubActionsDefaultWorkflowPermissionsOrganization: [ "PUT /orgs/{org}/actions/permissions/workflow" ], setGithubActionsDefaultWorkflowPermissionsRepository: [ "PUT /repos/{owner}/{repo}/actions/permissions/workflow" ], setGithubActionsPermissionsOrganization: [ "PUT /orgs/{org}/actions/permissions" ], setGithubActionsPermissionsRepository: [ "PUT /repos/{owner}/{repo}/actions/permissions" ], setSelectedReposForOrgSecret: [ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" ], setSelectedReposForOrgVariable: [ "PUT /orgs/{org}/actions/variables/{name}/repositories" ], setSelectedRepositoriesEnabledGithubActionsOrganization: [ "PUT /orgs/{org}/actions/permissions/repositories" ], setWorkflowAccessToRepository: [ "PUT /repos/{owner}/{repo}/actions/permissions/access" ], updateEnvironmentVariable: [ "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" ], updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], updateRepoVariable: [ "PATCH /repos/{owner}/{repo}/actions/variables/{name}" ] }, activity: { checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], deleteThreadSubscription: [ "DELETE /notifications/threads/{thread_id}/subscription" ], getFeeds: ["GET /feeds"], getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], getThread: ["GET /notifications/threads/{thread_id}"], getThreadSubscriptionForAuthenticatedUser: [ "GET /notifications/threads/{thread_id}/subscription" ], listEventsForAuthenticatedUser: ["GET /users/{username}/events"], listNotificationsForAuthenticatedUser: ["GET /notifications"], listOrgEventsForAuthenticatedUser: [ "GET /users/{username}/events/orgs/{org}" ], listPublicEvents: ["GET /events"], listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], listPublicEventsForUser: ["GET /users/{username}/events/public"], listPublicOrgEvents: ["GET /orgs/{org}/events"], listReceivedEventsForUser: ["GET /users/{username}/received_events"], listReceivedPublicEventsForUser: [ "GET /users/{username}/received_events/public" ], listRepoEvents: ["GET /repos/{owner}/{repo}/events"], listRepoNotificationsForAuthenticatedUser: [ "GET /repos/{owner}/{repo}/notifications" ], listReposStarredByAuthenticatedUser: ["GET /user/starred"], listReposStarredByUser: ["GET /users/{username}/starred"], listReposWatchedByUser: ["GET /users/{username}/subscriptions"], listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], markNotificationsAsRead: ["PUT /notifications"], markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], setThreadSubscription: [ "PUT /notifications/threads/{thread_id}/subscription" ], starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] }, apps: { addRepoToInstallation: [ "PUT /user/installations/{installation_id}/repositories/{repository_id}", {}, { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } ], addRepoToInstallationForAuthenticatedUser: [ "PUT /user/installations/{installation_id}/repositories/{repository_id}" ], checkToken: ["POST /applications/{client_id}/token"], createFromManifest: ["POST /app-manifests/{code}/conversions"], createInstallationAccessToken: [ "POST /app/installations/{installation_id}/access_tokens" ], deleteAuthorization: ["DELETE /applications/{client_id}/grant"], deleteInstallation: ["DELETE /app/installations/{installation_id}"], deleteToken: ["DELETE /applications/{client_id}/token"], getAuthenticated: ["GET /app"], getBySlug: ["GET /apps/{app_slug}"], getInstallation: ["GET /app/installations/{installation_id}"], getOrgInstallation: ["GET /orgs/{org}/installation"], getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], getSubscriptionPlanForAccount: [ "GET /marketplace_listing/accounts/{account_id}" ], getSubscriptionPlanForAccountStubbed: [ "GET /marketplace_listing/stubbed/accounts/{account_id}" ], getUserInstallation: ["GET /users/{username}/installation"], getWebhookConfigForApp: ["GET /app/hook/config"], getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], listAccountsForPlanStubbed: [ "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" ], listInstallationReposForAuthenticatedUser: [ "GET /user/installations/{installation_id}/repositories" ], listInstallationRequestsForAuthenticatedApp: [ "GET /app/installation-requests" ], listInstallations: ["GET /app/installations"], listInstallationsForAuthenticatedUser: ["GET /user/installations"], listPlans: ["GET /marketplace_listing/plans"], listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], listReposAccessibleToInstallation: ["GET /installation/repositories"], listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], listSubscriptionsForAuthenticatedUserStubbed: [ "GET /user/marketplace_purchases/stubbed" ], listWebhookDeliveries: ["GET /app/hook/deliveries"], redeliverWebhookDelivery: [ "POST /app/hook/deliveries/{delivery_id}/attempts" ], removeRepoFromInstallation: [ "DELETE /user/installations/{installation_id}/repositories/{repository_id}", {}, { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } ], removeRepoFromInstallationForAuthenticatedUser: [ "DELETE /user/installations/{installation_id}/repositories/{repository_id}" ], resetToken: ["PATCH /applications/{client_id}/token"], revokeInstallationAccessToken: ["DELETE /installation/token"], scopeToken: ["POST /applications/{client_id}/token/scoped"], suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], unsuspendInstallation: [ "DELETE /app/installations/{installation_id}/suspended" ], updateWebhookConfigForApp: ["PATCH /app/hook/config"] }, billing: { getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], getGithubActionsBillingUser: [ "GET /users/{username}/settings/billing/actions" ], getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], getGithubPackagesBillingUser: [ "GET /users/{username}/settings/billing/packages" ], getSharedStorageBillingOrg: [ "GET /orgs/{org}/settings/billing/shared-storage" ], getSharedStorageBillingUser: [ "GET /users/{username}/settings/billing/shared-storage" ] }, checks: { create: ["POST /repos/{owner}/{repo}/check-runs"], createSuite: ["POST /repos/{owner}/{repo}/check-suites"], get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], listAnnotations: [ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" ], listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], listForSuite: [ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" ], listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], rerequestRun: [ "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" ], rerequestSuite: [ "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" ], setSuitesPreferences: [ "PATCH /repos/{owner}/{repo}/check-suites/preferences" ], update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] }, codeScanning: { deleteAnalysis: [ "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" ], getAlert: [ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, { renamedParameters: { alert_id: "alert_number" } } ], getAnalysis: [ "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" ], getCodeqlDatabase: [ "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" ], getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], listAlertInstances: [ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" ], listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], listAlertsInstances: [ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, { renamed: ["codeScanning", "listAlertInstances"] } ], listCodeqlDatabases: [ "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" ], listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], updateAlert: [ "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" ], updateDefaultSetup: [ "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" ], uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] }, codesOfConduct: { getAllCodesOfConduct: ["GET /codes_of_conduct"], getConductCode: ["GET /codes_of_conduct/{key}"] }, codespaces: { addRepositoryForSecretForAuthenticatedUser: [ "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" ], addSelectedRepoToOrgSecret: [ "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" ], checkPermissionsForDevcontainer: [ "GET /repos/{owner}/{repo}/codespaces/permissions_check" ], codespaceMachinesForAuthenticatedUser: [ "GET /user/codespaces/{codespace_name}/machines" ], createForAuthenticatedUser: ["POST /user/codespaces"], createOrUpdateOrgSecret: [ "PUT /orgs/{org}/codespaces/secrets/{secret_name}" ], createOrUpdateRepoSecret: [ "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" ], createOrUpdateSecretForAuthenticatedUser: [ "PUT /user/codespaces/secrets/{secret_name}" ], createWithPrForAuthenticatedUser: [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" ], createWithRepoForAuthenticatedUser: [ "POST /repos/{owner}/{repo}/codespaces" ], deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], deleteFromOrganization: [ "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" ], deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], deleteRepoSecret: [ "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" ], deleteSecretForAuthenticatedUser: [ "DELETE /user/codespaces/secrets/{secret_name}" ], exportForAuthenticatedUser: [ "POST /user/codespaces/{codespace_name}/exports" ], getCodespacesForUserInOrg: [ "GET /orgs/{org}/members/{username}/codespaces" ], getExportDetailsForAuthenticatedUser: [ "GET /user/codespaces/{codespace_name}/exports/{export_id}" ], getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], getPublicKeyForAuthenticatedUser: [ "GET /user/codespaces/secrets/public-key" ], getRepoPublicKey: [ "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" ], getRepoSecret: [ "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" ], getSecretForAuthenticatedUser: [ "GET /user/codespaces/secrets/{secret_name}" ], listDevcontainersInRepositoryForAuthenticatedUser: [ "GET /repos/{owner}/{repo}/codespaces/devcontainers" ], listForAuthenticatedUser: ["GET /user/codespaces"], listInOrganization: [ "GET /orgs/{org}/codespaces", {}, { renamedParameters: { org_id: "org" } } ], listInRepositoryForAuthenticatedUser: [ "GET /repos/{owner}/{repo}/codespaces" ], listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], listRepositoriesForSecretForAuthenticatedUser: [ "GET /user/codespaces/secrets/{secret_name}/repositories" ], listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], listSelectedReposForOrgSecret: [ "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" ], preFlightWithRepoForAuthenticatedUser: [ "GET /repos/{owner}/{repo}/codespaces/new" ], publishForAuthenticatedUser: [ "POST /user/codespaces/{codespace_name}/publish" ], removeRepositoryForSecretForAuthenticatedUser: [ "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" ], removeSelectedRepoFromOrgSecret: [ "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" ], repoMachinesForAuthenticatedUser: [ "GET /repos/{owner}/{repo}/codespaces/machines" ], setRepositoriesForSecretForAuthenticatedUser: [ "PUT /user/codespaces/secrets/{secret_name}/repositories" ], setSelectedReposForOrgSecret: [ "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" ], startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], stopInOrganization: [ "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" ], updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] }, copilot: { addCopilotForBusinessSeatsForTeams: [ "POST /orgs/{org}/copilot/billing/selected_teams" ], addCopilotForBusinessSeatsForUsers: [ "POST /orgs/{org}/copilot/billing/selected_users" ], cancelCopilotSeatAssignmentForTeams: [ "DELETE /orgs/{org}/copilot/billing/selected_teams" ], cancelCopilotSeatAssignmentForUsers: [ "DELETE /orgs/{org}/copilot/billing/selected_users" ], getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], getCopilotSeatDetailsForUser: [ "GET /orgs/{org}/members/{username}/copilot" ], listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] }, dependabot: { addSelectedRepoToOrgSecret: [ "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" ], createOrUpdateOrgSecret: [ "PUT /orgs/{org}/dependabot/secrets/{secret_name}" ], createOrUpdateRepoSecret: [ "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" ], deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], deleteRepoSecret: [ "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" ], getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], getRepoPublicKey: [ "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" ], getRepoSecret: [ "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" ], listAlertsForEnterprise: [ "GET /enterprises/{enterprise}/dependabot/alerts" ], listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], listSelectedReposForOrgSecret: [ "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" ], removeSelectedRepoFromOrgSecret: [ "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" ], setSelectedReposForOrgSecret: [ "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" ], updateAlert: [ "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" ] }, dependencyGraph: { createRepositorySnapshot: [ "POST /repos/{owner}/{repo}/dependency-graph/snapshots" ], diffRange: [ "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" ], exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] }, emojis: { get: ["GET /emojis"] }, gists: { checkIsStarred: ["GET /gists/{gist_id}/star"], create: ["POST /gists"], createComment: ["POST /gists/{gist_id}/comments"], delete: ["DELETE /gists/{gist_id}"], deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], fork: ["POST /gists/{gist_id}/forks"], get: ["GET /gists/{gist_id}"], getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], getRevision: ["GET /gists/{gist_id}/{sha}"], list: ["GET /gists"], listComments: ["GET /gists/{gist_id}/comments"], listCommits: ["GET /gists/{gist_id}/commits"], listForUser: ["GET /users/{username}/gists"], listForks: ["GET /gists/{gist_id}/forks"], listPublic: ["GET /gists/public"], listStarred: ["GET /gists/starred"], star: ["PUT /gists/{gist_id}/star"], unstar: ["DELETE /gists/{gist_id}/star"], update: ["PATCH /gists/{gist_id}"], updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] }, git: { createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], createCommit: ["POST /repos/{owner}/{repo}/git/commits"], createRef: ["POST /repos/{owner}/{repo}/git/refs"], createTag: ["POST /repos/{owner}/{repo}/git/tags"], createTree: ["POST /repos/{owner}/{repo}/git/trees"], deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] }, gitignore: { getAllTemplates: ["GET /gitignore/templates"], getTemplate: ["GET /gitignore/templates/{name}"] }, interactions: { getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], getRestrictionsForYourPublicRepos: [ "GET /user/interaction-limits", {}, { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } ], removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], removeRestrictionsForRepo: [ "DELETE /repos/{owner}/{repo}/interaction-limits" ], removeRestrictionsForYourPublicRepos: [ "DELETE /user/interaction-limits", {}, { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } ], setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], setRestrictionsForYourPublicRepos: [ "PUT /user/interaction-limits", {}, { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } ] }, issues: { addAssignees: [ "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" ], addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], checkUserCanBeAssignedToIssue: [ "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" ], create: ["POST /repos/{owner}/{repo}/issues"], createComment: [ "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" ], createLabel: ["POST /repos/{owner}/{repo}/labels"], createMilestone: ["POST /repos/{owner}/{repo}/milestones"], deleteComment: [ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" ], deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], deleteMilestone: [ "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" ], get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], list: ["GET /issues"], listAssignees: ["GET /repos/{owner}/{repo}/assignees"], listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], listEventsForTimeline: [ "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" ], listForAuthenticatedUser: ["GET /user/issues"], listForOrg: ["GET /orgs/{org}/issues"], listForRepo: ["GET /repos/{owner}/{repo}/issues"], listLabelsForMilestone: [ "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" ], listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], listLabelsOnIssue: [ "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" ], listMilestones: ["GET /repos/{owner}/{repo}/milestones"], lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], removeAllLabels: [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" ], removeAssignees: [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" ], removeLabel: [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" ], setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], updateMilestone: [ "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" ] }, licenses: { get: ["GET /licenses/{license}"], getAllCommonlyUsed: ["GET /licenses"], getForRepo: ["GET /repos/{owner}/{repo}/license"] }, markdown: { render: ["POST /markdown"], renderRaw: [ "POST /markdown/raw", { headers: { "content-type": "text/plain; charset=utf-8" } } ] }, meta: { get: ["GET /meta"], getAllVersions: ["GET /versions"], getOctocat: ["GET /octocat"], getZen: ["GET /zen"], root: ["GET /"] }, migrations: { cancelImport: [ "DELETE /repos/{owner}/{repo}/import", {}, { deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import" } ], deleteArchiveForAuthenticatedUser: [ "DELETE /user/migrations/{migration_id}/archive" ], deleteArchiveForOrg: [ "DELETE /orgs/{org}/migrations/{migration_id}/archive" ], downloadArchiveForOrg: [ "GET /orgs/{org}/migrations/{migration_id}/archive" ], getArchiveForAuthenticatedUser: [ "GET /user/migrations/{migration_id}/archive" ], getCommitAuthors: [ "GET /repos/{owner}/{repo}/import/authors", {}, { deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors" } ], getImportStatus: [ "GET /repos/{owner}/{repo}/import", {}, { deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status" } ], getLargeFiles: [ "GET /repos/{owner}/{repo}/import/large_files", {}, { deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files" } ], getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], listForAuthenticatedUser: ["GET /user/migrations"], listForOrg: ["GET /orgs/{org}/migrations"], listReposForAuthenticatedUser: [ "GET /user/migrations/{migration_id}/repositories" ], listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], listReposForUser: [ "GET /user/migrations/{migration_id}/repositories", {}, { renamed: ["migrations", "listReposForAuthenticatedUser"] } ], mapCommitAuthor: [ "PATCH /repos/{owner}/{repo}/import/authors/{author_id}", {}, { deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author" } ], setLfsPreference: [ "PATCH /repos/{owner}/{repo}/import/lfs", {}, { deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference" } ], startForAuthenticatedUser: ["POST /user/migrations"], startForOrg: ["POST /orgs/{org}/migrations"], startImport: [ "PUT /repos/{owner}/{repo}/import", {}, { deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import" } ], unlockRepoForAuthenticatedUser: [ "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" ], unlockRepoForOrg: [ "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" ], updateImport: [ "PATCH /repos/{owner}/{repo}/import", {}, { deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import" } ] }, orgs: { addSecurityManagerTeam: [ "PUT /orgs/{org}/security-managers/teams/{team_slug}" ], blockUser: ["PUT /orgs/{org}/blocks/{username}"], cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], convertMemberToOutsideCollaborator: [ "PUT /orgs/{org}/outside_collaborators/{username}" ], createInvitation: ["POST /orgs/{org}/invitations"], createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], createOrUpdateCustomPropertiesValuesForRepos: [ "PATCH /orgs/{org}/properties/values" ], createOrUpdateCustomProperty: [ "PUT /orgs/{org}/properties/schema/{custom_property_name}" ], createWebhook: ["POST /orgs/{org}/hooks"], delete: ["DELETE /orgs/{org}"], deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], enableOrDisableSecurityProductOnAllOrgRepos: [ "POST /orgs/{org}/{security_product}/{enablement}" ], get: ["GET /orgs/{org}"], getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], getCustomProperty: [ "GET /orgs/{org}/properties/schema/{custom_property_name}" ], getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], getWebhookDelivery: [ "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" ], list: ["GET /organizations"], listAppInstallations: ["GET /orgs/{org}/installations"], listBlockedUsers: ["GET /orgs/{org}/blocks"], listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], listForAuthenticatedUser: ["GET /user/orgs"], listForUser: ["GET /users/{username}/orgs"], listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], listMembers: ["GET /orgs/{org}/members"], listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], listPatGrantRepositories: [ "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" ], listPatGrantRequestRepositories: [ "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" ], listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], listPendingInvitations: ["GET /orgs/{org}/invitations"], listPublicMembers: ["GET /orgs/{org}/public_members"], listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], listWebhooks: ["GET /orgs/{org}/hooks"], pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], redeliverWebhookDelivery: [ "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" ], removeCustomProperty: [ "DELETE /orgs/{org}/properties/schema/{custom_property_name}" ], removeMember: ["DELETE /orgs/{org}/members/{username}"], removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], removeOutsideCollaborator: [ "DELETE /orgs/{org}/outside_collaborators/{username}" ], removePublicMembershipForAuthenticatedUser: [ "DELETE /orgs/{org}/public_members/{username}" ], removeSecurityManagerTeam: [ "DELETE /orgs/{org}/security-managers/teams/{team_slug}" ], reviewPatGrantRequest: [ "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" ], reviewPatGrantRequestsInBulk: [ "POST /orgs/{org}/personal-access-token-requests" ], setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], setPublicMembershipForAuthenticatedUser: [ "PUT /orgs/{org}/public_members/{username}" ], unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], update: ["PATCH /orgs/{org}"], updateMembershipForAuthenticatedUser: [ "PATCH /user/memberships/orgs/{org}" ], updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] }, packages: { deletePackageForAuthenticatedUser: [ "DELETE /user/packages/{package_type}/{package_name}" ], deletePackageForOrg: [ "DELETE /orgs/{org}/packages/{package_type}/{package_name}" ], deletePackageForUser: [ "DELETE /users/{username}/packages/{package_type}/{package_name}" ], deletePackageVersionForAuthenticatedUser: [ "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" ], deletePackageVersionForOrg: [ "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" ], deletePackageVersionForUser: [ "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" ], getAllPackageVersionsForAPackageOwnedByAnOrg: [ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } ], getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ "GET /user/packages/{package_type}/{package_name}/versions", {}, { renamed: [ "packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" ] } ], getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ "GET /user/packages/{package_type}/{package_name}/versions" ], getAllPackageVersionsForPackageOwnedByOrg: [ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" ], getAllPackageVersionsForPackageOwnedByUser: [ "GET /users/{username}/packages/{package_type}/{package_name}/versions" ], getPackageForAuthenticatedUser: [ "GET /user/packages/{package_type}/{package_name}" ], getPackageForOrganization: [ "GET /orgs/{org}/packages/{package_type}/{package_name}" ], getPackageForUser: [ "GET /users/{username}/packages/{package_type}/{package_name}" ], getPackageVersionForAuthenticatedUser: [ "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" ], getPackageVersionForOrganization: [ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" ], getPackageVersionForUser: [ "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" ], listDockerMigrationConflictingPackagesForAuthenticatedUser: [ "GET /user/docker/conflicts" ], listDockerMigrationConflictingPackagesForOrganization: [ "GET /orgs/{org}/docker/conflicts" ], listDockerMigrationConflictingPackagesForUser: [ "GET /users/{username}/docker/conflicts" ], listPackagesForAuthenticatedUser: ["GET /user/packages"], listPackagesForOrganization: ["GET /orgs/{org}/packages"], listPackagesForUser: ["GET /users/{username}/packages"], restorePackageForAuthenticatedUser: [ "POST /user/packages/{package_type}/{package_name}/restore{?token}" ], restorePackageForOrg: [ "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" ], restorePackageForUser: [ "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" ], restorePackageVersionForAuthenticatedUser: [ "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" ], restorePackageVersionForOrg: [ "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" ], restorePackageVersionForUser: [ "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" ] }, projects: { addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], createCard: ["POST /projects/columns/{column_id}/cards"], createColumn: ["POST /projects/{project_id}/columns"], createForAuthenticatedUser: ["POST /user/projects"], createForOrg: ["POST /orgs/{org}/projects"], createForRepo: ["POST /repos/{owner}/{repo}/projects"], delete: ["DELETE /projects/{project_id}"], deleteCard: ["DELETE /projects/columns/cards/{card_id}"], deleteColumn: ["DELETE /projects/columns/{column_id}"], get: ["GET /projects/{project_id}"], getCard: ["GET /projects/columns/cards/{card_id}"], getColumn: ["GET /projects/columns/{column_id}"], getPermissionForUser: [ "GET /projects/{project_id}/collaborators/{username}/permission" ], listCards: ["GET /projects/columns/{column_id}/cards"], listCollaborators: ["GET /projects/{project_id}/collaborators"], listColumns: ["GET /projects/{project_id}/columns"], listForOrg: ["GET /orgs/{org}/projects"], listForRepo: ["GET /repos/{owner}/{repo}/projects"], listForUser: ["GET /users/{username}/projects"], moveCard: ["POST /projects/columns/cards/{card_id}/moves"], moveColumn: ["POST /projects/columns/{column_id}/moves"], removeCollaborator: [ "DELETE /projects/{project_id}/collaborators/{username}" ], update: ["PATCH /projects/{project_id}"], updateCard: ["PATCH /projects/columns/cards/{card_id}"], updateColumn: ["PATCH /projects/columns/{column_id}"] }, pulls: { checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], create: ["POST /repos/{owner}/{repo}/pulls"], createReplyForReviewComment: [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" ], createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], createReviewComment: [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" ], deletePendingReview: [ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ], deleteReviewComment: [ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" ], dismissReview: [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" ], get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], getReview: [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ], getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], list: ["GET /repos/{owner}/{repo}/pulls"], listCommentsForReview: [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" ], listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], listRequestedReviewers: [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ], listReviewComments: [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" ], listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], removeRequestedReviewers: [ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ], requestReviewers: [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ], submitReview: [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" ], update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], updateBranch: [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" ], updateReview: [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ], updateReviewComment: [ "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" ] }, rateLimit: { get: ["GET /rate_limit"] }, reactions: { createForCommitComment: [ "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" ], createForIssue: [ "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" ], createForIssueComment: [ "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" ], createForPullRequestReviewComment: [ "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" ], createForRelease: [ "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" ], createForTeamDiscussionCommentInOrg: [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" ], createForTeamDiscussionInOrg: [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" ], deleteForCommitComment: [ "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" ], deleteForIssue: [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" ], deleteForIssueComment: [ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" ], deleteForPullRequestComment: [ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" ], deleteForRelease: [ "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" ], deleteForTeamDiscussion: [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" ], deleteForTeamDiscussionComment: [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" ], listForCommitComment: [ "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" ], listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], listForIssueComment: [ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" ], listForPullRequestReviewComment: [ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" ], listForRelease: [ "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" ], listForTeamDiscussionCommentInOrg: [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" ], listForTeamDiscussionInOrg: [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" ] }, repos: { acceptInvitation: [ "PATCH /user/repository_invitations/{invitation_id}", {}, { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } ], acceptInvitationForAuthenticatedUser: [ "PATCH /user/repository_invitations/{invitation_id}" ], addAppAccessRestrictions: [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { mapToData: "apps" } ], addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], addStatusCheckContexts: [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { mapToData: "contexts" } ], addTeamAccessRestrictions: [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { mapToData: "teams" } ], addUserAccessRestrictions: [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { mapToData: "users" } ], checkAutomatedSecurityFixes: [ "GET /repos/{owner}/{repo}/automated-security-fixes" ], checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], checkVulnerabilityAlerts: [ "GET /repos/{owner}/{repo}/vulnerability-alerts" ], codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], compareCommitsWithBasehead: [ "GET /repos/{owner}/{repo}/compare/{basehead}" ], createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], createCommitComment: [ "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" ], createCommitSignatureProtection: [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" ], createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], createDeployKey: ["POST /repos/{owner}/{repo}/keys"], createDeployment: ["POST /repos/{owner}/{repo}/deployments"], createDeploymentBranchPolicy: [ "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" ], createDeploymentProtectionRule: [ "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" ], createDeploymentStatus: [ "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" ], createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], createForAuthenticatedUser: ["POST /user/repos"], createFork: ["POST /repos/{owner}/{repo}/forks"], createInOrg: ["POST /orgs/{org}/repos"], createOrUpdateEnvironment: [ "PUT /repos/{owner}/{repo}/environments/{environment_name}" ], createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], createOrgRuleset: ["POST /orgs/{org}/rulesets"], createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployment"], createPagesSite: ["POST /repos/{owner}/{repo}/pages"], createRelease: ["POST /repos/{owner}/{repo}/releases"], createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], createUsingTemplate: [ "POST /repos/{template_owner}/{template_repo}/generate" ], createWebhook: ["POST /repos/{owner}/{repo}/hooks"], declineInvitation: [ "DELETE /user/repository_invitations/{invitation_id}", {}, { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } ], declineInvitationForAuthenticatedUser: [ "DELETE /user/repository_invitations/{invitation_id}" ], delete: ["DELETE /repos/{owner}/{repo}"], deleteAccessRestrictions: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" ], deleteAdminBranchProtection: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ], deleteAnEnvironment: [ "DELETE /repos/{owner}/{repo}/environments/{environment_name}" ], deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], deleteBranchProtection: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" ], deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], deleteCommitSignatureProtection: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" ], deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], deleteDeployment: [ "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" ], deleteDeploymentBranchPolicy: [ "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" ], deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], deleteInvitation: [ "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" ], deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], deletePullRequestReviewProtection: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ], deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], deleteReleaseAsset: [ "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" ], deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], deleteTagProtection: [ "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" ], deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], disableAutomatedSecurityFixes: [ "DELETE /repos/{owner}/{repo}/automated-security-fixes" ], disableDeploymentProtectionRule: [ "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" ], disablePrivateVulnerabilityReporting: [ "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" ], disableVulnerabilityAlerts: [ "DELETE /repos/{owner}/{repo}/vulnerability-alerts" ], downloadArchive: [ "GET /repos/{owner}/{repo}/zipball/{ref}", {}, { renamed: ["repos", "downloadZipballArchive"] } ], downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], enableAutomatedSecurityFixes: [ "PUT /repos/{owner}/{repo}/automated-security-fixes" ], enablePrivateVulnerabilityReporting: [ "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" ], enableVulnerabilityAlerts: [ "PUT /repos/{owner}/{repo}/vulnerability-alerts" ], generateReleaseNotes: [ "POST /repos/{owner}/{repo}/releases/generate-notes" ], get: ["GET /repos/{owner}/{repo}"], getAccessRestrictions: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" ], getAdminBranchProtection: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ], getAllDeploymentProtectionRules: [ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" ], getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], getAllStatusCheckContexts: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" ], getAllTopics: ["GET /repos/{owner}/{repo}/topics"], getAppsWithAccessToProtectedBranch: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" ], getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], getBranchProtection: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection" ], getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], getCollaboratorPermissionLevel: [ "GET /repos/{owner}/{repo}/collaborators/{username}/permission" ], getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], getCommitSignatureProtection: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" ], getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], getCustomDeploymentProtectionRule: [ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" ], getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], getDeploymentBranchPolicy: [ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" ], getDeploymentStatus: [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" ], getEnvironment: [ "GET /repos/{owner}/{repo}/environments/{environment_name}" ], getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], getOrgRulesets: ["GET /orgs/{org}/rulesets"], getPages: ["GET /repos/{owner}/{repo}/pages"], getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], getPullRequestReviewProtection: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ], getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], getReadme: ["GET /repos/{owner}/{repo}/readme"], getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], getRepoRuleSuite: [ "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" ], getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], getStatusChecksProtection: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ], getTeamsWithAccessToProtectedBranch: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" ], getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], getUsersWithAccessToProtectedBranch: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" ], getViews: ["GET /repos/{owner}/{repo}/traffic/views"], getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], getWebhookConfigForRepo: [ "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" ], getWebhookDelivery: [ "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" ], listActivities: ["GET /repos/{owner}/{repo}/activity"], listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], listBranches: ["GET /repos/{owner}/{repo}/branches"], listBranchesForHeadCommit: [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" ], listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], listCommentsForCommit: [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" ], listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], listCommitStatusesForRef: [ "GET /repos/{owner}/{repo}/commits/{ref}/statuses" ], listCommits: ["GET /repos/{owner}/{repo}/commits"], listContributors: ["GET /repos/{owner}/{repo}/contributors"], listCustomDeploymentRuleIntegrations: [ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" ], listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], listDeploymentBranchPolicies: [ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" ], listDeploymentStatuses: [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" ], listDeployments: ["GET /repos/{owner}/{repo}/deployments"], listForAuthenticatedUser: ["GET /user/repos"], listForOrg: ["GET /orgs/{org}/repos"], listForUser: ["GET /users/{username}/repos"], listForks: ["GET /repos/{owner}/{repo}/forks"], listInvitations: ["GET /repos/{owner}/{repo}/invitations"], listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], listLanguages: ["GET /repos/{owner}/{repo}/languages"], listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], listPublic: ["GET /repositories"], listPullRequestsAssociatedWithCommit: [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" ], listReleaseAssets: [ "GET /repos/{owner}/{repo}/releases/{release_id}/assets" ], listReleases: ["GET /repos/{owner}/{repo}/releases"], listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], listTags: ["GET /repos/{owner}/{repo}/tags"], listTeams: ["GET /repos/{owner}/{repo}/teams"], listWebhookDeliveries: [ "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" ], listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], merge: ["POST /repos/{owner}/{repo}/merges"], mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], redeliverWebhookDelivery: [ "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" ], removeAppAccessRestrictions: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { mapToData: "apps" } ], removeCollaborator: [ "DELETE /repos/{owner}/{repo}/collaborators/{username}" ], removeStatusCheckContexts: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { mapToData: "contexts" } ], removeStatusCheckProtection: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ], removeTeamAccessRestrictions: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { mapToData: "teams" } ], removeUserAccessRestrictions: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { mapToData: "users" } ], renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], setAdminBranchProtection: [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ], setAppAccessRestrictions: [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { mapToData: "apps" } ], setStatusCheckContexts: [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { mapToData: "contexts" } ], setTeamAccessRestrictions: [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { mapToData: "teams" } ], setUserAccessRestrictions: [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { mapToData: "users" } ], testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], transfer: ["POST /repos/{owner}/{repo}/transfer"], update: ["PATCH /repos/{owner}/{repo}"], updateBranchProtection: [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection" ], updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], updateDeploymentBranchPolicy: [ "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" ], updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], updateInvitation: [ "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" ], updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], updatePullRequestReviewProtection: [ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ], updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], updateReleaseAsset: [ "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" ], updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], updateStatusCheckPotection: [ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { renamed: ["repos", "updateStatusCheckProtection"] } ], updateStatusCheckProtection: [ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ], updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], updateWebhookConfigForRepo: [ "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" ], uploadReleaseAsset: [ "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { baseUrl: "https://uploads.github.com" } ] }, search: { code: ["GET /search/code"], commits: ["GET /search/commits"], issuesAndPullRequests: ["GET /search/issues"], labels: ["GET /search/labels"], repos: ["GET /search/repositories"], topics: ["GET /search/topics"], users: ["GET /search/users"] }, secretScanning: { getAlert: [ "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" ], listAlertsForEnterprise: [ "GET /enterprises/{enterprise}/secret-scanning/alerts" ], listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], listLocationsForAlert: [ "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" ], updateAlert: [ "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" ] }, securityAdvisories: { createPrivateVulnerabilityReport: [ "POST /repos/{owner}/{repo}/security-advisories/reports" ], createRepositoryAdvisory: [ "POST /repos/{owner}/{repo}/security-advisories" ], createRepositoryAdvisoryCveRequest: [ "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" ], getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], getRepositoryAdvisory: [ "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" ], listGlobalAdvisories: ["GET /advisories"], listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], updateRepositoryAdvisory: [ "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" ] }, teams: { addOrUpdateMembershipForUserInOrg: [ "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" ], addOrUpdateProjectPermissionsInOrg: [ "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" ], addOrUpdateRepoPermissionsInOrg: [ "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ], checkPermissionsForProjectInOrg: [ "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" ], checkPermissionsForRepoInOrg: [ "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ], create: ["POST /orgs/{org}/teams"], createDiscussionCommentInOrg: [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" ], createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], deleteDiscussionCommentInOrg: [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ], deleteDiscussionInOrg: [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ], deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], getByName: ["GET /orgs/{org}/teams/{team_slug}"], getDiscussionCommentInOrg: [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ], getDiscussionInOrg: [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ], getMembershipForUserInOrg: [ "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" ], list: ["GET /orgs/{org}/teams"], listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], listDiscussionCommentsInOrg: [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" ], listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], listForAuthenticatedUser: ["GET /user/teams"], listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], listPendingInvitationsInOrg: [ "GET /orgs/{org}/teams/{team_slug}/invitations" ], listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], removeMembershipForUserInOrg: [ "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" ], removeProjectInOrg: [ "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" ], removeRepoInOrg: [ "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ], updateDiscussionCommentInOrg: [ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ], updateDiscussionInOrg: [ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ], updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] }, users: { addEmailForAuthenticated: [ "POST /user/emails", {}, { renamed: ["users", "addEmailForAuthenticatedUser"] } ], addEmailForAuthenticatedUser: ["POST /user/emails"], addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], block: ["PUT /user/blocks/{username}"], checkBlocked: ["GET /user/blocks/{username}"], checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], createGpgKeyForAuthenticated: [ "POST /user/gpg_keys", {}, { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } ], createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], createPublicSshKeyForAuthenticated: [ "POST /user/keys", {}, { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } ], createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], deleteEmailForAuthenticated: [ "DELETE /user/emails", {}, { renamed: ["users", "deleteEmailForAuthenticatedUser"] } ], deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], deleteGpgKeyForAuthenticated: [ "DELETE /user/gpg_keys/{gpg_key_id}", {}, { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } ], deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], deletePublicSshKeyForAuthenticated: [ "DELETE /user/keys/{key_id}", {}, { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } ], deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], deleteSshSigningKeyForAuthenticatedUser: [ "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" ], follow: ["PUT /user/following/{username}"], getAuthenticated: ["GET /user"], getByUsername: ["GET /users/{username}"], getContextForUser: ["GET /users/{username}/hovercard"], getGpgKeyForAuthenticated: [ "GET /user/gpg_keys/{gpg_key_id}", {}, { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } ], getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], getPublicSshKeyForAuthenticated: [ "GET /user/keys/{key_id}", {}, { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } ], getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], getSshSigningKeyForAuthenticatedUser: [ "GET /user/ssh_signing_keys/{ssh_signing_key_id}" ], list: ["GET /users"], listBlockedByAuthenticated: [ "GET /user/blocks", {}, { renamed: ["users", "listBlockedByAuthenticatedUser"] } ], listBlockedByAuthenticatedUser: ["GET /user/blocks"], listEmailsForAuthenticated: [ "GET /user/emails", {}, { renamed: ["users", "listEmailsForAuthenticatedUser"] } ], listEmailsForAuthenticatedUser: ["GET /user/emails"], listFollowedByAuthenticated: [ "GET /user/following", {}, { renamed: ["users", "listFollowedByAuthenticatedUser"] } ], listFollowedByAuthenticatedUser: ["GET /user/following"], listFollowersForAuthenticatedUser: ["GET /user/followers"], listFollowersForUser: ["GET /users/{username}/followers"], listFollowingForUser: ["GET /users/{username}/following"], listGpgKeysForAuthenticated: [ "GET /user/gpg_keys", {}, { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } ], listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], listPublicEmailsForAuthenticated: [ "GET /user/public_emails", {}, { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } ], listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], listPublicKeysForUser: ["GET /users/{username}/keys"], listPublicSshKeysForAuthenticated: [ "GET /user/keys", {}, { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } ], listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], setPrimaryEmailVisibilityForAuthenticated: [ "PATCH /user/email/visibility", {}, { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } ], setPrimaryEmailVisibilityForAuthenticatedUser: [ "PATCH /user/email/visibility" ], unblock: ["DELETE /user/blocks/{username}"], unfollow: ["DELETE /user/following/{username}"], updateAuthenticated: ["PATCH /user"] } }; var endpoints_default = Endpoints; // pkg/dist-src/endpoints-to-methods.js var endpointMethodsMap = /* @__PURE__ */ new Map(); for (const [scope, endpoints] of Object.entries(endpoints_default)) { for (const [methodName, endpoint] of Object.entries(endpoints)) { const [route, defaults, decorations] = endpoint; const [method, url] = route.split(/ /); const endpointDefaults = Object.assign( { method, url }, defaults ); if (!endpointMethodsMap.has(scope)) { endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); } endpointMethodsMap.get(scope).set(methodName, { scope, methodName, endpointDefaults, decorations }); } } var handler = { has({ scope }, methodName) { return endpointMethodsMap.get(scope).has(methodName); }, getOwnPropertyDescriptor(target, methodName) { return { value: this.get(target, methodName), // ensures method is in the cache configurable: true, writable: true, enumerable: true }; }, defineProperty(target, methodName, descriptor) { Object.defineProperty(target.cache, methodName, descriptor); return true; }, deleteProperty(target, methodName) { delete target.cache[methodName]; return true; }, ownKeys({ scope }) { return [...endpointMethodsMap.get(scope).keys()]; }, set(target, methodName, value) { return target.cache[methodName] = value; }, get({ octokit, scope, cache }, methodName) { if (cache[methodName]) { return cache[methodName]; } const method = endpointMethodsMap.get(scope).get(methodName); if (!method) { return void 0; } const { endpointDefaults, decorations } = method; if (decorations) { cache[methodName] = decorate( octokit, scope, methodName, endpointDefaults, decorations ); } else { cache[methodName] = octokit.request.defaults(endpointDefaults); } return cache[methodName]; } }; function endpointsToMethods(octokit) { const newMethods = {}; for (const scope of endpointMethodsMap.keys()) { newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); } return newMethods; } function decorate(octokit, scope, methodName, defaults, decorations) { const requestWithDefaults = octokit.request.defaults(defaults); function withDecorations(...args) { let options = requestWithDefaults.endpoint.merge(...args); if (decorations.mapToData) { options = Object.assign({}, options, { data: options[decorations.mapToData], [decorations.mapToData]: void 0 }); return requestWithDefaults(options); } if (decorations.renamed) { const [newScope, newMethodName] = decorations.renamed; octokit.log.warn( `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` ); } if (decorations.deprecated) { octokit.log.warn(decorations.deprecated); } if (decorations.renamedParameters) { const options2 = requestWithDefaults.endpoint.merge(...args); for (const [name, alias] of Object.entries( decorations.renamedParameters )) { if (name in options2) { octokit.log.warn( `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` ); if (!(alias in options2)) { options2[alias] = options2[name]; } delete options2[name]; } } return requestWithDefaults(options2); } return requestWithDefaults(...args); } return Object.assign(withDecorations, requestWithDefaults); } // pkg/dist-src/index.js function restEndpointMethods(octokit) { const api = endpointsToMethods(octokit); return { rest: api }; } restEndpointMethods.VERSION = VERSION; function legacyRestEndpointMethods(octokit) { const api = endpointsToMethods(octokit); return { ...api, rest: api }; } legacyRestEndpointMethods.VERSION = VERSION; // Annotate the CommonJS export names for ESM import in node: 0 && (0); /***/ }), /***/ 537: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var __create = Object.create; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __getProtoOf = Object.getPrototypeOf; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( // If the importer is in node compatibility mode or this is not an ESM // file that has been converted to a CommonJS file using a Babel- // compatible transform (i.e. "__esModule" has not been set), then set // "default" to the CommonJS "module.exports" for node compatibility. isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, mod )); var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // pkg/dist-src/index.js var dist_src_exports = {}; __export(dist_src_exports, { RequestError: () => RequestError }); module.exports = __toCommonJS(dist_src_exports); var import_deprecation = __nccwpck_require__(8932); var import_once = __toESM(__nccwpck_require__(1223)); var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); var RequestError = class extends Error { constructor(message, statusCode, options) { super(message); if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } this.name = "HttpError"; this.status = statusCode; let headers; if ("headers" in options && typeof options.headers !== "undefined") { headers = options.headers; } if ("response" in options) { this.response = options.response; headers = options.response.headers; } const requestCopy = Object.assign({}, options.request); if (options.request.headers.authorization) { requestCopy.headers = Object.assign({}, options.request.headers, { authorization: options.request.headers.authorization.replace( / .*$/, " [REDACTED]" ) }); } requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); this.request = requestCopy; Object.defineProperty(this, "code", { get() { logOnceCode( new import_deprecation.Deprecation( "[@octokit/request-error] `error.code` is deprecated, use `error.status`." ) ); return statusCode; } }); Object.defineProperty(this, "headers", { get() { logOnceHeaders( new import_deprecation.Deprecation( "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." ) ); return headers || {}; } }); } }; // Annotate the CommonJS export names for ESM import in node: 0 && (0); /***/ }), /***/ 6234: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // pkg/dist-src/index.js var dist_src_exports = {}; __export(dist_src_exports, { request: () => request }); module.exports = __toCommonJS(dist_src_exports); var import_endpoint = __nccwpck_require__(9440); var import_universal_user_agent = __nccwpck_require__(5030); // pkg/dist-src/version.js var VERSION = "8.1.6"; // pkg/dist-src/is-plain-object.js function isPlainObject(value) { if (typeof value !== "object" || value === null) return false; if (Object.prototype.toString.call(value) !== "[object Object]") return false; const proto = Object.getPrototypeOf(value); if (proto === null) return true; const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); } // pkg/dist-src/fetch-wrapper.js var import_request_error = __nccwpck_require__(537); // pkg/dist-src/get-buffer-response.js function getBufferResponse(response) { return response.arrayBuffer(); } // pkg/dist-src/fetch-wrapper.js function fetchWrapper(requestOptions) { var _a, _b, _c; const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { requestOptions.body = JSON.stringify(requestOptions.body); } let headers = {}; let status; let url; let { fetch } = globalThis; if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { fetch = requestOptions.request.fetch; } if (!fetch) { throw new Error( "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" ); } return fetch(requestOptions.url, { method: requestOptions.method, body: requestOptions.body, headers: requestOptions.headers, signal: (_c = requestOptions.request) == null ? void 0 : _c.signal, // duplex must be set if request.body is ReadableStream or Async Iterables. // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. ...requestOptions.body && { duplex: "half" } }).then(async (response) => { url = response.url; status = response.status; for (const keyAndValue of response.headers) { headers[keyAndValue[0]] = keyAndValue[1]; } if ("deprecation" in headers) { const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); const deprecationLink = matches && matches.pop(); log.warn( `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` ); } if (status === 204 || status === 205) { return; } if (requestOptions.method === "HEAD") { if (status < 400) { return; } throw new import_request_error.RequestError(response.statusText, status, { response: { url, status, headers, data: void 0 }, request: requestOptions }); } if (status === 304) { throw new import_request_error.RequestError("Not modified", status, { response: { url, status, headers, data: await getResponseData(response) }, request: requestOptions }); } if (status >= 400) { const data = await getResponseData(response); const error = new import_request_error.RequestError(toErrorMessage(data), status, { response: { url, status, headers, data }, request: requestOptions }); throw error; } return parseSuccessResponseBody ? await getResponseData(response) : response.body; }).then((data) => { return { status, url, headers, data }; }).catch((error) => { if (error instanceof import_request_error.RequestError) throw error; else if (error.name === "AbortError") throw error; let message = error.message; if (error.name === "TypeError" && "cause" in error) { if (error.cause instanceof Error) { message = error.cause.message; } else if (typeof error.cause === "string") { message = error.cause; } } throw new import_request_error.RequestError(message, 500, { request: requestOptions }); }); } async function getResponseData(response) { const contentType = response.headers.get("content-type"); if (/application\/json/.test(contentType)) { return response.json().catch(() => response.text()).catch(() => ""); } if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { return response.text(); } return getBufferResponse(response); } function toErrorMessage(data) { if (typeof data === "string") return data; if ("message" in data) { if (Array.isArray(data.errors)) { return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; } return data.message; } return `Unknown error: ${JSON.stringify(data)}`; } // pkg/dist-src/with-defaults.js function withDefaults(oldEndpoint, newDefaults) { const endpoint2 = oldEndpoint.defaults(newDefaults); const newApi = function(route, parameters) { const endpointOptions = endpoint2.merge(route, parameters); if (!endpointOptions.request || !endpointOptions.request.hook) { return fetchWrapper(endpoint2.parse(endpointOptions)); } const request2 = (route2, parameters2) => { return fetchWrapper( endpoint2.parse(endpoint2.merge(route2, parameters2)) ); }; Object.assign(request2, { endpoint: endpoint2, defaults: withDefaults.bind(null, endpoint2) }); return endpointOptions.request.hook(request2, endpointOptions); }; return Object.assign(newApi, { endpoint: endpoint2, defaults: withDefaults.bind(null, endpoint2) }); } // pkg/dist-src/index.js var request = withDefaults(import_endpoint.endpoint, { headers: { "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` } }); // Annotate the CommonJS export names for ESM import in node: 0 && (0); /***/ }), /***/ 3178: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.IncomingWebhook = void 0; const axios_1 = __importDefault(__nccwpck_require__(8757)); const errors_1 = __nccwpck_require__(8564); const instrument_1 = __nccwpck_require__(8645); /** * A client for Slack's Incoming Webhooks */ class IncomingWebhook { constructor(url, defaults = { timeout: 0, }) { if (url === undefined) { throw new Error('Incoming webhook URL is required'); } this.url = url; this.defaults = defaults; this.axios = axios_1.default.create({ baseURL: url, httpAgent: defaults.agent, httpsAgent: defaults.agent, maxRedirects: 0, proxy: false, timeout: defaults.timeout, headers: { // eslint-disable-next-line @typescript-eslint/naming-convention 'User-Agent': (0, instrument_1.getUserAgent)(), }, }); delete this.defaults.agent; } /** * Send a notification to a conversation * @param message - the message (a simple string, or an object describing the message) */ async send(message) { // NOTE: no support for TLS config let payload = Object.assign({}, this.defaults); if (typeof message === 'string') { payload.text = message; } else { payload = Object.assign(payload, message); } try { const response = await this.axios.post(this.url, payload); return this.buildResult(response); // eslint-disable-next-line @typescript-eslint/no-explicit-any } catch (error) { // Wrap errors in this packages own error types (abstract the implementation details' types) if (error.response !== undefined) { throw (0, errors_1.httpErrorWithOriginal)(error); } else if (error.request !== undefined) { throw (0, errors_1.requestErrorWithOriginal)(error); } else { throw error; } } } /** * Processes an HTTP response into an IncomingWebhookResult. */ // eslint-disable-next-line class-methods-use-this buildResult(response) { return { text: response.data, }; } } exports.IncomingWebhook = IncomingWebhook; //# sourceMappingURL=IncomingWebhook.js.map /***/ }), /***/ 8564: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.httpErrorWithOriginal = exports.requestErrorWithOriginal = exports.ErrorCode = void 0; /** * A dictionary of codes for errors produced by this package */ var ErrorCode; (function (ErrorCode) { ErrorCode["RequestError"] = "slack_webhook_request_error"; ErrorCode["HTTPError"] = "slack_webhook_http_error"; })(ErrorCode = exports.ErrorCode || (exports.ErrorCode = {})); /** * Factory for producing a {@link CodedError} from a generic error */ function errorWithCode(error, code) { // NOTE: might be able to return something more specific than a CodedError with conditional typing const codedError = error; codedError.code = code; return codedError; } /** * A factory to create IncomingWebhookRequestError objects * @param original The original error */ function requestErrorWithOriginal(original) { const error = errorWithCode(new Error(`A request error occurred: ${original.message}`), ErrorCode.RequestError); error.original = original; return error; } exports.requestErrorWithOriginal = requestErrorWithOriginal; /** * A factory to create IncomingWebhookHTTPError objects * @param original The original error */ function httpErrorWithOriginal(original) { const error = errorWithCode(new Error(`An HTTP protocol error occurred: statusCode = ${original.response.status}`), ErrorCode.HTTPError); error.original = original; return error; } exports.httpErrorWithOriginal = httpErrorWithOriginal; //# sourceMappingURL=errors.js.map /***/ }), /***/ 1095: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; /// Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ErrorCode = exports.IncomingWebhook = void 0; var IncomingWebhook_1 = __nccwpck_require__(3178); Object.defineProperty(exports, "IncomingWebhook", ({ enumerable: true, get: function () { return IncomingWebhook_1.IncomingWebhook; } })); var errors_1 = __nccwpck_require__(8564); Object.defineProperty(exports, "ErrorCode", ({ enumerable: true, get: function () { return errors_1.ErrorCode; } })); //# sourceMappingURL=index.js.map /***/ }), /***/ 8645: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getUserAgent = exports.addAppMetadata = void 0; const os = __importStar(__nccwpck_require__(2037)); const packageJson = __nccwpck_require__(2849); // eslint-disable-line import/no-commonjs, @typescript-eslint/no-var-requires /** * Replaces occurrences of '/' with ':' in a string, since '/' is meaningful inside User-Agent strings as a separator. */ function replaceSlashes(s) { return s.replace('/', ':'); } const baseUserAgent = `${replaceSlashes(packageJson.name)}/${packageJson.version} ` + `node/${process.version.replace('v', '')} ` + `${os.platform()}/${os.release()}`; const appMetadata = {}; /** * Appends the app metadata into the User-Agent value * @param appMetadata.name name of tool to be counted in instrumentation * @param appMetadata.version version of tool to be counted in instrumentation */ function addAppMetadata({ name, version }) { appMetadata[replaceSlashes(name)] = version; } exports.addAppMetadata = addAppMetadata; /** * Returns the current User-Agent value for instrumentation */ function getUserAgent() { const appIdentifier = Object.entries(appMetadata).map(([name, version]) => `${name}/${version}`).join(' '); // only prepend the appIdentifier when its not empty return ((appIdentifier.length > 0) ? `${appIdentifier} ` : '') + baseUserAgent; } exports.getUserAgent = getUserAgent; //# sourceMappingURL=instrument.js.map /***/ }), /***/ 6761: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const Utils = __nccwpck_require__(5182); const pth = __nccwpck_require__(1017); const ZipEntry = __nccwpck_require__(4057); const ZipFile = __nccwpck_require__(7744); const get_Bool = (val, def) => (typeof val === "boolean" ? val : def); const get_Str = (val, def) => (typeof val === "string" ? val : def); const defaultOptions = { // option "noSort" : if true it disables files sorting noSort: false, // read entries during load (initial loading may be slower) readEntries: false, // default method is none method: Utils.Constants.NONE, // file system fs: null }; module.exports = function (/**String*/ input, /** object */ options) { let inBuffer = null; // create object based default options, allowing them to be overwritten const opts = Object.assign(Object.create(null), defaultOptions); // test input variable if (input && "object" === typeof input) { // if value is not buffer we accept it to be object with options if (!(input instanceof Uint8Array)) { Object.assign(opts, input); input = opts.input ? opts.input : undefined; if (opts.input) delete opts.input; } // if input is buffer if (Buffer.isBuffer(input)) { inBuffer = input; opts.method = Utils.Constants.BUFFER; input = undefined; } } // assign options Object.assign(opts, options); // instanciate utils filesystem const filetools = new Utils(opts); // if input is file name we retrieve its content if (input && "string" === typeof input) { // load zip file if (filetools.fs.existsSync(input)) { opts.method = Utils.Constants.FILE; opts.filename = input; inBuffer = filetools.fs.readFileSync(input); } else { throw new Error(Utils.Errors.INVALID_FILENAME); } } // create variable const _zip = new ZipFile(inBuffer, opts); const { canonical, sanitize } = Utils; function getEntry(/**Object*/ entry) { if (entry && _zip) { var item; // If entry was given as a file name if (typeof entry === "string") item = _zip.getEntry(entry); // if entry was given as a ZipEntry object if (typeof entry === "object" && typeof entry.entryName !== "undefined" && typeof entry.header !== "undefined") item = _zip.getEntry(entry.entryName); if (item) { return item; } } return null; } function fixPath(zipPath) { const { join, normalize, sep } = pth.posix; // convert windows file separators and normalize return join(".", normalize(sep + zipPath.split("\\").join(sep) + sep)); } return { /** * Extracts the given entry from the archive and returns the content as a Buffer object * @param entry ZipEntry object or String with the full path of the entry * * @return Buffer or Null in case of error */ readFile: function (/**Object*/ entry, /*String, Buffer*/ pass) { var item = getEntry(entry); return (item && item.getData(pass)) || null; }, /** * Asynchronous readFile * @param entry ZipEntry object or String with the full path of the entry * @param callback * * @return Buffer or Null in case of error */ readFileAsync: function (/**Object*/ entry, /**Function*/ callback) { var item = getEntry(entry); if (item) { item.getDataAsync(callback); } else { callback(null, "getEntry failed for:" + entry); } }, /** * Extracts the given entry from the archive and returns the content as plain text in the given encoding * @param entry ZipEntry object or String with the full path of the entry * @param encoding Optional. If no encoding is specified utf8 is used * * @return String */ readAsText: function (/**Object*/ entry, /**String=*/ encoding) { var item = getEntry(entry); if (item) { var data = item.getData(); if (data && data.length) { return data.toString(encoding || "utf8"); } } return ""; }, /** * Asynchronous readAsText * @param entry ZipEntry object or String with the full path of the entry * @param callback * @param encoding Optional. If no encoding is specified utf8 is used * * @return String */ readAsTextAsync: function (/**Object*/ entry, /**Function*/ callback, /**String=*/ encoding) { var item = getEntry(entry); if (item) { item.getDataAsync(function (data, err) { if (err) { callback(data, err); return; } if (data && data.length) { callback(data.toString(encoding || "utf8")); } else { callback(""); } }); } else { callback(""); } }, /** * Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory * * @param entry */ deleteFile: function (/**Object*/ entry) { // @TODO: test deleteFile var item = getEntry(entry); if (item) { _zip.deleteEntry(item.entryName); } }, /** * Adds a comment to the zip. The zip must be rewritten after adding the comment. * * @param comment */ addZipComment: function (/**String*/ comment) { // @TODO: test addZipComment _zip.comment = comment; }, /** * Returns the zip comment * * @return String */ getZipComment: function () { return _zip.comment || ""; }, /** * Adds a comment to a specified zipEntry. The zip must be rewritten after adding the comment * The comment cannot exceed 65535 characters in length * * @param entry * @param comment */ addZipEntryComment: function (/**Object*/ entry, /**String*/ comment) { var item = getEntry(entry); if (item) { item.comment = comment; } }, /** * Returns the comment of the specified entry * * @param entry * @return String */ getZipEntryComment: function (/**Object*/ entry) { var item = getEntry(entry); if (item) { return item.comment || ""; } return ""; }, /** * Updates the content of an existing entry inside the archive. The zip must be rewritten after updating the content * * @param entry * @param content */ updateFile: function (/**Object*/ entry, /**Buffer*/ content) { var item = getEntry(entry); if (item) { item.setData(content); } }, /** * Adds a file from the disk to the archive * * @param localPath File to add to zip * @param zipPath Optional path inside the zip * @param zipName Optional name for the file */ addLocalFile: function (/**String*/ localPath, /**String=*/ zipPath, /**String=*/ zipName, /**String*/ comment) { if (filetools.fs.existsSync(localPath)) { // fix ZipPath zipPath = zipPath ? fixPath(zipPath) : ""; // p - local file name var p = localPath.split("\\").join("/").split("/").pop(); // add file name into zippath zipPath += zipName ? zipName : p; // read file attributes const _attr = filetools.fs.statSync(localPath); // add file into zip file this.addFile(zipPath, filetools.fs.readFileSync(localPath), comment, _attr); } else { throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); } }, /** * Adds a local directory and all its nested files and directories to the archive * * @param localPath * @param zipPath optional path inside zip * @param filter optional RegExp or Function if files match will * be included. * @param {number | object} attr - number as unix file permissions, object as filesystem Stats object */ addLocalFolder: function (/**String*/ localPath, /**String=*/ zipPath, /**=RegExp|Function*/ filter, /**=number|object*/ attr) { // Prepare filter if (filter instanceof RegExp) { // if filter is RegExp wrap it filter = (function (rx) { return function (filename) { return rx.test(filename); }; })(filter); } else if ("function" !== typeof filter) { // if filter is not function we will replace it filter = function () { return true; }; } // fix ZipPath zipPath = zipPath ? fixPath(zipPath) : ""; // normalize the path first localPath = pth.normalize(localPath); if (filetools.fs.existsSync(localPath)) { const items = filetools.findFiles(localPath); const self = this; if (items.length) { items.forEach(function (filepath) { var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix if (filter(p)) { var stats = filetools.fs.statSync(filepath); if (stats.isFile()) { self.addFile(zipPath + p, filetools.fs.readFileSync(filepath), "", attr ? attr : stats); } else { self.addFile(zipPath + p + "/", Buffer.alloc(0), "", attr ? attr : stats); } } }); } } else { throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); } }, /** * Asynchronous addLocalFile * @param localPath * @param callback * @param zipPath optional path inside zip * @param filter optional RegExp or Function if files match will * be included. */ addLocalFolderAsync: function (/*String*/ localPath, /*Function*/ callback, /*String*/ zipPath, /*RegExp|Function*/ filter) { if (filter instanceof RegExp) { filter = (function (rx) { return function (filename) { return rx.test(filename); }; })(filter); } else if ("function" !== typeof filter) { filter = function () { return true; }; } // fix ZipPath zipPath = zipPath ? fixPath(zipPath) : ""; // normalize the path first localPath = pth.normalize(localPath); var self = this; filetools.fs.open(localPath, "r", function (err) { if (err && err.code === "ENOENT") { callback(undefined, Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); } else if (err) { callback(undefined, err); } else { var items = filetools.findFiles(localPath); var i = -1; var next = function () { i += 1; if (i < items.length) { var filepath = items[i]; var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix p = p .normalize("NFD") .replace(/[\u0300-\u036f]/g, "") .replace(/[^\x20-\x7E]/g, ""); // accent fix if (filter(p)) { filetools.fs.stat(filepath, function (er0, stats) { if (er0) callback(undefined, er0); if (stats.isFile()) { filetools.fs.readFile(filepath, function (er1, data) { if (er1) { callback(undefined, er1); } else { self.addFile(zipPath + p, data, "", stats); next(); } }); } else { self.addFile(zipPath + p + "/", Buffer.alloc(0), "", stats); next(); } }); } else { process.nextTick(() => { next(); }); } } else { callback(true, undefined); } }; next(); } }); }, /** * * @param {string} localPath - path where files will be extracted * @param {object} props - optional properties * @param {string} props.zipPath - optional path inside zip * @param {regexp, function} props.filter - RegExp or Function if files match will be included. */ addLocalFolderPromise: function (/*String*/ localPath, /* object */ props) { return new Promise((resolve, reject) => { const { filter, zipPath } = Object.assign({}, props); this.addLocalFolderAsync( localPath, (done, err) => { if (err) reject(err); if (done) resolve(this); }, zipPath, filter ); }); }, /** * Allows you to create a entry (file or directory) in the zip file. * If you want to create a directory the entryName must end in / and a null buffer should be provided. * Comment and attributes are optional * * @param {string} entryName * @param {Buffer | string} content - file content as buffer or utf8 coded string * @param {string} comment - file comment * @param {number | object} attr - number as unix file permissions, object as filesystem Stats object */ addFile: function (/**String*/ entryName, /**Buffer*/ content, /**String*/ comment, /**Number*/ attr) { let entry = getEntry(entryName); const update = entry != null; // prepare new entry if (!update) { entry = new ZipEntry(); entry.entryName = entryName; } entry.comment = comment || ""; const isStat = "object" === typeof attr && attr instanceof filetools.fs.Stats; // last modification time from file stats if (isStat) { entry.header.time = attr.mtime; } // Set file attribute var fileattr = entry.isDirectory ? 0x10 : 0; // (MS-DOS directory flag) // extended attributes field for Unix // set file type either S_IFDIR / S_IFREG let unix = entry.isDirectory ? 0x4000 : 0x8000; if (isStat) { // File attributes from file stats unix |= 0xfff & attr.mode; } else if ("number" === typeof attr) { // attr from given attr values unix |= 0xfff & attr; } else { // Default values: unix |= entry.isDirectory ? 0o755 : 0o644; // permissions (drwxr-xr-x) or (-r-wr--r--) } fileattr = (fileattr | (unix << 16)) >>> 0; // add attributes entry.attr = fileattr; entry.setData(content); if (!update) _zip.setEntry(entry); }, /** * Returns an array of ZipEntry objects representing the files and folders inside the archive * * @return Array */ getEntries: function () { return _zip ? _zip.entries : []; }, /** * Returns a ZipEntry object representing the file or folder specified by ``name``. * * @param name * @return ZipEntry */ getEntry: function (/**String*/ name) { return getEntry(name); }, getEntryCount: function () { return _zip.getEntryCount(); }, forEach: function (callback) { return _zip.forEach(callback); }, /** * Extracts the given entry to the given targetPath * If the entry is a directory inside the archive, the entire directory and it's subdirectories will be extracted * * @param entry ZipEntry object or String with the full path of the entry * @param targetPath Target folder where to write the file * @param maintainEntryPath If maintainEntryPath is true and the entry is inside a folder, the entry folder * will be created in targetPath as well. Default is TRUE * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. * Default is FALSE * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. * Default is FALSE * @param outFileName String If set will override the filename of the extracted file (Only works if the entry is a file) * * @return Boolean */ extractEntryTo: function ( /**Object*/ entry, /**String*/ targetPath, /**Boolean*/ maintainEntryPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /**String**/ outFileName ) { overwrite = get_Bool(overwrite, false); keepOriginalPermission = get_Bool(keepOriginalPermission, false); maintainEntryPath = get_Bool(maintainEntryPath, true); outFileName = get_Str(outFileName, get_Str(keepOriginalPermission, undefined)); var item = getEntry(entry); if (!item) { throw new Error(Utils.Errors.NO_ENTRY); } var entryName = canonical(item.entryName); var target = sanitize(targetPath, outFileName && !item.isDirectory ? outFileName : maintainEntryPath ? entryName : pth.basename(entryName)); if (item.isDirectory) { var children = _zip.getEntryChildren(item); children.forEach(function (child) { if (child.isDirectory) return; var content = child.getData(); if (!content) { throw new Error(Utils.Errors.CANT_EXTRACT_FILE); } var name = canonical(child.entryName); var childName = sanitize(targetPath, maintainEntryPath ? name : pth.basename(name)); // The reverse operation for attr depend on method addFile() const fileAttr = keepOriginalPermission ? child.header.fileAttr : undefined; filetools.writeFileTo(childName, content, overwrite, fileAttr); }); return true; } var content = item.getData(); if (!content) throw new Error(Utils.Errors.CANT_EXTRACT_FILE); if (filetools.fs.existsSync(target) && !overwrite) { throw new Error(Utils.Errors.CANT_OVERRIDE); } // The reverse operation for attr depend on method addFile() const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; filetools.writeFileTo(target, content, overwrite, fileAttr); return true; }, /** * Test the archive * */ test: function (pass) { if (!_zip) { return false; } for (var entry in _zip.entries) { try { if (entry.isDirectory) { continue; } var content = _zip.entries[entry].getData(pass); if (!content) { return false; } } catch (err) { return false; } } return true; }, /** * Extracts the entire archive to the given location * * @param targetPath Target location * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. * Default is FALSE * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. * Default is FALSE */ extractAllTo: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /*String, Buffer*/ pass) { overwrite = get_Bool(overwrite, false); pass = get_Str(keepOriginalPermission, pass); keepOriginalPermission = get_Bool(keepOriginalPermission, false); if (!_zip) { throw new Error(Utils.Errors.NO_ZIP); } _zip.entries.forEach(function (entry) { var entryName = sanitize(targetPath, canonical(entry.entryName.toString())); if (entry.isDirectory) { filetools.makeDir(entryName); return; } var content = entry.getData(pass); if (!content) { throw new Error(Utils.Errors.CANT_EXTRACT_FILE); } // The reverse operation for attr depend on method addFile() const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; filetools.writeFileTo(entryName, content, overwrite, fileAttr); try { filetools.fs.utimesSync(entryName, entry.header.time, entry.header.time); } catch (err) { throw new Error(Utils.Errors.CANT_EXTRACT_FILE); } }); }, /** * Asynchronous extractAllTo * * @param targetPath Target location * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. * Default is FALSE * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. * Default is FALSE * @param callback The callback will be executed when all entries are extracted successfully or any error is thrown. */ extractAllToAsync: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /**Function*/ callback) { overwrite = get_Bool(overwrite, false); if (typeof keepOriginalPermission === "function" && !callback) callback = keepOriginalPermission; keepOriginalPermission = get_Bool(keepOriginalPermission, false); if (!callback) { callback = function (err) { throw new Error(err); }; } if (!_zip) { callback(new Error(Utils.Errors.NO_ZIP)); return; } targetPath = pth.resolve(targetPath); // convert entryName to const getPath = (entry) => sanitize(targetPath, pth.normalize(canonical(entry.entryName.toString()))); const getError = (msg, file) => new Error(msg + ': "' + file + '"'); // separate directories from files const dirEntries = []; const fileEntries = new Set(); _zip.entries.forEach((e) => { if (e.isDirectory) { dirEntries.push(e); } else { fileEntries.add(e); } }); // Create directory entries first synchronously // this prevents race condition and assures folders are there before writing files for (const entry of dirEntries) { const dirPath = getPath(entry); // The reverse operation for attr depend on method addFile() const dirAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; try { filetools.makeDir(dirPath); if (dirAttr) filetools.fs.chmodSync(dirPath, dirAttr); // in unix timestamp will change if files are later added to folder, but still filetools.fs.utimesSync(dirPath, entry.header.time, entry.header.time); } catch (er) { callback(getError("Unable to create folder", dirPath)); } } // callback wrapper, for some house keeping const done = () => { if (fileEntries.size === 0) { callback(); } }; // Extract file entries asynchronously for (const entry of fileEntries.values()) { const entryName = pth.normalize(canonical(entry.entryName.toString())); const filePath = sanitize(targetPath, entryName); entry.getDataAsync(function (content, err_1) { if (err_1) { callback(new Error(err_1)); return; } if (!content) { callback(new Error(Utils.Errors.CANT_EXTRACT_FILE)); } else { // The reverse operation for attr depend on method addFile() const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; filetools.writeFileToAsync(filePath, content, overwrite, fileAttr, function (succ) { if (!succ) { callback(getError("Unable to write file", filePath)); return; } filetools.fs.utimes(filePath, entry.header.time, entry.header.time, function (err_2) { if (err_2) { callback(getError("Unable to set times", filePath)); return; } fileEntries.delete(entry); // call the callback if it was last entry done(); }); }); } }); } // call the callback if fileEntries was empty done(); }, /** * Writes the newly created zip file to disk at the specified location or if a zip was opened and no ``targetFileName`` is provided, it will overwrite the opened zip * * @param targetFileName * @param callback */ writeZip: function (/**String*/ targetFileName, /**Function*/ callback) { if (arguments.length === 1) { if (typeof targetFileName === "function") { callback = targetFileName; targetFileName = ""; } } if (!targetFileName && opts.filename) { targetFileName = opts.filename; } if (!targetFileName) return; var zipData = _zip.compressToBuffer(); if (zipData) { var ok = filetools.writeFileTo(targetFileName, zipData, true); if (typeof callback === "function") callback(!ok ? new Error("failed") : null, ""); } }, writeZipPromise: function (/**String*/ targetFileName, /* object */ props) { const { overwrite, perm } = Object.assign({ overwrite: true }, props); return new Promise((resolve, reject) => { // find file name if (!targetFileName && opts.filename) targetFileName = opts.filename; if (!targetFileName) reject("ADM-ZIP: ZIP File Name Missing"); this.toBufferPromise().then((zipData) => { const ret = (done) => (done ? resolve(done) : reject("ADM-ZIP: Wasn't able to write zip file")); filetools.writeFileToAsync(targetFileName, zipData, overwrite, perm, ret); }, reject); }); }, toBufferPromise: function () { return new Promise((resolve, reject) => { _zip.toAsyncBuffer(resolve, reject); }); }, /** * Returns the content of the entire zip file as a Buffer object * * @return Buffer */ toBuffer: function (/**Function=*/ onSuccess, /**Function=*/ onFail, /**Function=*/ onItemStart, /**Function=*/ onItemEnd) { this.valueOf = 2; if (typeof onSuccess === "function") { _zip.toAsyncBuffer(onSuccess, onFail, onItemStart, onItemEnd); return null; } return _zip.compressToBuffer(); } }; }; /***/ }), /***/ 9032: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var Utils = __nccwpck_require__(5182), Constants = Utils.Constants; /* The central directory file header */ module.exports = function () { var _verMade = 20, // v2.0 _version = 10, // v1.0 _flags = 0, _method = 0, _time = 0, _crc = 0, _compressedSize = 0, _size = 0, _fnameLen = 0, _extraLen = 0, _comLen = 0, _diskStart = 0, _inattr = 0, _attr = 0, _offset = 0; _verMade |= Utils.isWin ? 0x0a00 : 0x0300; // Set EFS flag since filename and comment fields are all by default encoded using UTF-8. // Without it file names may be corrupted for other apps when file names use unicode chars _flags |= Constants.FLG_EFS; var _dataHeader = {}; function setTime(val) { val = new Date(val); _time = (((val.getFullYear() - 1980) & 0x7f) << 25) | // b09-16 years from 1980 ((val.getMonth() + 1) << 21) | // b05-08 month (val.getDate() << 16) | // b00-04 hour // 2 bytes time (val.getHours() << 11) | // b11-15 hour (val.getMinutes() << 5) | // b05-10 minute (val.getSeconds() >> 1); // b00-04 seconds divided by 2 } setTime(+new Date()); return { get made() { return _verMade; }, set made(val) { _verMade = val; }, get version() { return _version; }, set version(val) { _version = val; }, get flags() { return _flags; }, set flags(val) { _flags = val; }, get method() { return _method; }, set method(val) { switch (val) { case Constants.STORED: this.version = 10; case Constants.DEFLATED: default: this.version = 20; } _method = val; }, get time() { return new Date(((_time >> 25) & 0x7f) + 1980, ((_time >> 21) & 0x0f) - 1, (_time >> 16) & 0x1f, (_time >> 11) & 0x1f, (_time >> 5) & 0x3f, (_time & 0x1f) << 1); }, set time(val) { setTime(val); }, get crc() { return _crc; }, set crc(val) { _crc = Math.max(0, val) >>> 0; }, get compressedSize() { return _compressedSize; }, set compressedSize(val) { _compressedSize = Math.max(0, val) >>> 0; }, get size() { return _size; }, set size(val) { _size = Math.max(0, val) >>> 0; }, get fileNameLength() { return _fnameLen; }, set fileNameLength(val) { _fnameLen = val; }, get extraLength() { return _extraLen; }, set extraLength(val) { _extraLen = val; }, get commentLength() { return _comLen; }, set commentLength(val) { _comLen = val; }, get diskNumStart() { return _diskStart; }, set diskNumStart(val) { _diskStart = Math.max(0, val) >>> 0; }, get inAttr() { return _inattr; }, set inAttr(val) { _inattr = Math.max(0, val) >>> 0; }, get attr() { return _attr; }, set attr(val) { _attr = Math.max(0, val) >>> 0; }, // get Unix file permissions get fileAttr() { return _attr ? (((_attr >>> 0) | 0) >> 16) & 0xfff : 0; }, get offset() { return _offset; }, set offset(val) { _offset = Math.max(0, val) >>> 0; }, get encripted() { return (_flags & 1) === 1; }, get entryHeaderSize() { return Constants.CENHDR + _fnameLen + _extraLen + _comLen; }, get realDataOffset() { return _offset + Constants.LOCHDR + _dataHeader.fnameLen + _dataHeader.extraLen; }, get dataHeader() { return _dataHeader; }, loadDataHeaderFromBinary: function (/*Buffer*/ input) { var data = input.slice(_offset, _offset + Constants.LOCHDR); // 30 bytes and should start with "PK\003\004" if (data.readUInt32LE(0) !== Constants.LOCSIG) { throw new Error(Utils.Errors.INVALID_LOC); } _dataHeader = { // version needed to extract version: data.readUInt16LE(Constants.LOCVER), // general purpose bit flag flags: data.readUInt16LE(Constants.LOCFLG), // compression method method: data.readUInt16LE(Constants.LOCHOW), // modification time (2 bytes time, 2 bytes date) time: data.readUInt32LE(Constants.LOCTIM), // uncompressed file crc-32 value crc: data.readUInt32LE(Constants.LOCCRC), // compressed size compressedSize: data.readUInt32LE(Constants.LOCSIZ), // uncompressed size size: data.readUInt32LE(Constants.LOCLEN), // filename length fnameLen: data.readUInt16LE(Constants.LOCNAM), // extra field length extraLen: data.readUInt16LE(Constants.LOCEXT) }; }, loadFromBinary: function (/*Buffer*/ data) { // data should be 46 bytes and start with "PK 01 02" if (data.length !== Constants.CENHDR || data.readUInt32LE(0) !== Constants.CENSIG) { throw new Error(Utils.Errors.INVALID_CEN); } // version made by _verMade = data.readUInt16LE(Constants.CENVEM); // version needed to extract _version = data.readUInt16LE(Constants.CENVER); // encrypt, decrypt flags _flags = data.readUInt16LE(Constants.CENFLG); // compression method _method = data.readUInt16LE(Constants.CENHOW); // modification time (2 bytes time, 2 bytes date) _time = data.readUInt32LE(Constants.CENTIM); // uncompressed file crc-32 value _crc = data.readUInt32LE(Constants.CENCRC); // compressed size _compressedSize = data.readUInt32LE(Constants.CENSIZ); // uncompressed size _size = data.readUInt32LE(Constants.CENLEN); // filename length _fnameLen = data.readUInt16LE(Constants.CENNAM); // extra field length _extraLen = data.readUInt16LE(Constants.CENEXT); // file comment length _comLen = data.readUInt16LE(Constants.CENCOM); // volume number start _diskStart = data.readUInt16LE(Constants.CENDSK); // internal file attributes _inattr = data.readUInt16LE(Constants.CENATT); // external file attributes _attr = data.readUInt32LE(Constants.CENATX); // LOC header offset _offset = data.readUInt32LE(Constants.CENOFF); }, dataHeaderToBinary: function () { // LOC header size (30 bytes) var data = Buffer.alloc(Constants.LOCHDR); // "PK\003\004" data.writeUInt32LE(Constants.LOCSIG, 0); // version needed to extract data.writeUInt16LE(_version, Constants.LOCVER); // general purpose bit flag data.writeUInt16LE(_flags, Constants.LOCFLG); // compression method data.writeUInt16LE(_method, Constants.LOCHOW); // modification time (2 bytes time, 2 bytes date) data.writeUInt32LE(_time, Constants.LOCTIM); // uncompressed file crc-32 value data.writeUInt32LE(_crc, Constants.LOCCRC); // compressed size data.writeUInt32LE(_compressedSize, Constants.LOCSIZ); // uncompressed size data.writeUInt32LE(_size, Constants.LOCLEN); // filename length data.writeUInt16LE(_fnameLen, Constants.LOCNAM); // extra field length data.writeUInt16LE(_extraLen, Constants.LOCEXT); return data; }, entryHeaderToBinary: function () { // CEN header size (46 bytes) var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen); // "PK\001\002" data.writeUInt32LE(Constants.CENSIG, 0); // version made by data.writeUInt16LE(_verMade, Constants.CENVEM); // version needed to extract data.writeUInt16LE(_version, Constants.CENVER); // encrypt, decrypt flags data.writeUInt16LE(_flags, Constants.CENFLG); // compression method data.writeUInt16LE(_method, Constants.CENHOW); // modification time (2 bytes time, 2 bytes date) data.writeUInt32LE(_time, Constants.CENTIM); // uncompressed file crc-32 value data.writeUInt32LE(_crc, Constants.CENCRC); // compressed size data.writeUInt32LE(_compressedSize, Constants.CENSIZ); // uncompressed size data.writeUInt32LE(_size, Constants.CENLEN); // filename length data.writeUInt16LE(_fnameLen, Constants.CENNAM); // extra field length data.writeUInt16LE(_extraLen, Constants.CENEXT); // file comment length data.writeUInt16LE(_comLen, Constants.CENCOM); // volume number start data.writeUInt16LE(_diskStart, Constants.CENDSK); // internal file attributes data.writeUInt16LE(_inattr, Constants.CENATT); // external file attributes data.writeUInt32LE(_attr, Constants.CENATX); // LOC header offset data.writeUInt32LE(_offset, Constants.CENOFF); // fill all with data.fill(0x00, Constants.CENHDR); return data; }, toJSON: function () { const bytes = function (nr) { return nr + " bytes"; }; return { made: _verMade, version: _version, flags: _flags, method: Utils.methodToString(_method), time: this.time, crc: "0x" + _crc.toString(16).toUpperCase(), compressedSize: bytes(_compressedSize), size: bytes(_size), fileNameLength: bytes(_fnameLen), extraLength: bytes(_extraLen), commentLength: bytes(_comLen), diskNumStart: _diskStart, inAttr: _inattr, attr: _attr, offset: _offset, entryHeaderSize: bytes(Constants.CENHDR + _fnameLen + _extraLen + _comLen) }; }, toString: function () { return JSON.stringify(this.toJSON(), null, "\t"); } }; }; /***/ }), /***/ 4958: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { exports.EntryHeader = __nccwpck_require__(9032); exports.MainHeader = __nccwpck_require__(8952); /***/ }), /***/ 8952: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var Utils = __nccwpck_require__(5182), Constants = Utils.Constants; /* The entries in the end of central directory */ module.exports = function () { var _volumeEntries = 0, _totalEntries = 0, _size = 0, _offset = 0, _commentLength = 0; return { get diskEntries() { return _volumeEntries; }, set diskEntries(/*Number*/ val) { _volumeEntries = _totalEntries = val; }, get totalEntries() { return _totalEntries; }, set totalEntries(/*Number*/ val) { _totalEntries = _volumeEntries = val; }, get size() { return _size; }, set size(/*Number*/ val) { _size = val; }, get offset() { return _offset; }, set offset(/*Number*/ val) { _offset = val; }, get commentLength() { return _commentLength; }, set commentLength(/*Number*/ val) { _commentLength = val; }, get mainHeaderSize() { return Constants.ENDHDR + _commentLength; }, loadFromBinary: function (/*Buffer*/ data) { // data should be 22 bytes and start with "PK 05 06" // or be 56+ bytes and start with "PK 06 06" for Zip64 if ( (data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) && (data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG) ) { throw new Error(Utils.Errors.INVALID_END); } if (data.readUInt32LE(0) === Constants.ENDSIG) { // number of entries on this volume _volumeEntries = data.readUInt16LE(Constants.ENDSUB); // total number of entries _totalEntries = data.readUInt16LE(Constants.ENDTOT); // central directory size in bytes _size = data.readUInt32LE(Constants.ENDSIZ); // offset of first CEN header _offset = data.readUInt32LE(Constants.ENDOFF); // zip file comment length _commentLength = data.readUInt16LE(Constants.ENDCOM); } else { // number of entries on this volume _volumeEntries = Utils.readBigUInt64LE(data, Constants.ZIP64SUB); // total number of entries _totalEntries = Utils.readBigUInt64LE(data, Constants.ZIP64TOT); // central directory size in bytes _size = Utils.readBigUInt64LE(data, Constants.ZIP64SIZE); // offset of first CEN header _offset = Utils.readBigUInt64LE(data, Constants.ZIP64OFF); _commentLength = 0; } }, toBinary: function () { var b = Buffer.alloc(Constants.ENDHDR + _commentLength); // "PK 05 06" signature b.writeUInt32LE(Constants.ENDSIG, 0); b.writeUInt32LE(0, 4); // number of entries on this volume b.writeUInt16LE(_volumeEntries, Constants.ENDSUB); // total number of entries b.writeUInt16LE(_totalEntries, Constants.ENDTOT); // central directory size in bytes b.writeUInt32LE(_size, Constants.ENDSIZ); // offset of first CEN header b.writeUInt32LE(_offset, Constants.ENDOFF); // zip file comment length b.writeUInt16LE(_commentLength, Constants.ENDCOM); // fill comment memory with spaces so no garbage is left there b.fill(" ", Constants.ENDHDR); return b; }, toJSON: function () { // creates 0x0000 style output const offset = function (nr, len) { let offs = nr.toString(16).toUpperCase(); while (offs.length < len) offs = "0" + offs; return "0x" + offs; }; return { diskEntries: _volumeEntries, totalEntries: _totalEntries, size: _size + " bytes", offset: offset(_offset, 4), commentLength: _commentLength }; }, toString: function () { return JSON.stringify(this.toJSON(), null, "\t"); } }; }; // Misspelled /***/ }), /***/ 7686: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = function (/*Buffer*/ inbuf) { var zlib = __nccwpck_require__(9796); var opts = { chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024 }; return { deflate: function () { return zlib.deflateRawSync(inbuf, opts); }, deflateAsync: function (/*Function*/ callback) { var tmp = zlib.createDeflateRaw(opts), parts = [], total = 0; tmp.on("data", function (data) { parts.push(data); total += data.length; }); tmp.on("end", function () { var buf = Buffer.alloc(total), written = 0; buf.fill(0); for (var i = 0; i < parts.length; i++) { var part = parts[i]; part.copy(buf, written); written += part.length; } callback && callback(buf); }); tmp.end(inbuf); } }; }; /***/ }), /***/ 3928: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { exports.Deflater = __nccwpck_require__(7686); exports.Inflater = __nccwpck_require__(2153); exports.ZipCrypto = __nccwpck_require__(3228); /***/ }), /***/ 2153: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = function (/*Buffer*/ inbuf) { var zlib = __nccwpck_require__(9796); return { inflate: function () { return zlib.inflateRawSync(inbuf); }, inflateAsync: function (/*Function*/ callback) { var tmp = zlib.createInflateRaw(), parts = [], total = 0; tmp.on("data", function (data) { parts.push(data); total += data.length; }); tmp.on("end", function () { var buf = Buffer.alloc(total), written = 0; buf.fill(0); for (var i = 0; i < parts.length; i++) { var part = parts[i]; part.copy(buf, written); written += part.length; } callback && callback(buf); }); tmp.end(inbuf); } }; }; /***/ }), /***/ 3228: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // node crypt, we use it for generate salt // eslint-disable-next-line node/no-unsupported-features/node-builtins const { randomFillSync } = __nccwpck_require__(6113); // generate CRC32 lookup table const crctable = new Uint32Array(256).map((t, crc) => { for (let j = 0; j < 8; j++) { if (0 !== (crc & 1)) { crc = (crc >>> 1) ^ 0xedb88320; } else { crc >>>= 1; } } return crc >>> 0; }); // C-style uInt32 Multiply (discards higher bits, when JS multiply discards lower bits) const uMul = (a, b) => Math.imul(a, b) >>> 0; // crc32 byte single update (actually same function is part of utils.crc32 function :) ) const crc32update = (pCrc32, bval) => { return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8); }; // function for generating salt for encrytion header const genSalt = () => { if ("function" === typeof randomFillSync) { return randomFillSync(Buffer.alloc(12)); } else { // fallback if function is not defined return genSalt.node(); } }; // salt generation with node random function (mainly as fallback) genSalt.node = () => { const salt = Buffer.alloc(12); const len = salt.length; for (let i = 0; i < len; i++) salt[i] = (Math.random() * 256) & 0xff; return salt; }; // general config const config = { genSalt }; // Class Initkeys handles same basic ops with keys function Initkeys(pw) { const pass = Buffer.isBuffer(pw) ? pw : Buffer.from(pw); this.keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]); for (let i = 0; i < pass.length; i++) { this.updateKeys(pass[i]); } } Initkeys.prototype.updateKeys = function (byteValue) { const keys = this.keys; keys[0] = crc32update(keys[0], byteValue); keys[1] += keys[0] & 0xff; keys[1] = uMul(keys[1], 134775813) + 1; keys[2] = crc32update(keys[2], keys[1] >>> 24); return byteValue; }; Initkeys.prototype.next = function () { const k = (this.keys[2] | 2) >>> 0; // key return (uMul(k, k ^ 1) >> 8) & 0xff; // decode }; function make_decrypter(/*Buffer*/ pwd) { // 1. Stage initialize key const keys = new Initkeys(pwd); // return decrypter function return function (/*Buffer*/ data) { // result - we create new Buffer for results const result = Buffer.alloc(data.length); let pos = 0; // process input data for (let c of data) { //c ^= keys.next(); //result[pos++] = c; // decode & Save Value result[pos++] = keys.updateKeys(c ^ keys.next()); // update keys with decoded byte } return result; }; } function make_encrypter(/*Buffer*/ pwd) { // 1. Stage initialize key const keys = new Initkeys(pwd); // return encrypting function, result and pos is here so we dont have to merge buffers later return function (/*Buffer*/ data, /*Buffer*/ result, /* Number */ pos = 0) { // result - we create new Buffer for results if (!result) result = Buffer.alloc(data.length); // process input data for (let c of data) { const k = keys.next(); // save key byte result[pos++] = c ^ k; // save val keys.updateKeys(c); // update keys with decoded byte } return result; }; } function decrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd) { if (!data || !Buffer.isBuffer(data) || data.length < 12) { return Buffer.alloc(0); } // 1. We Initialize and generate decrypting function const decrypter = make_decrypter(pwd); // 2. decrypt salt what is always 12 bytes and is a part of file content const salt = decrypter(data.slice(0, 12)); // 3. does password meet expectations if (salt[11] !== header.crc >>> 24) { throw "ADM-ZIP: Wrong Password"; } // 4. decode content return decrypter(data.slice(12)); } // lets add way to populate salt, NOT RECOMMENDED for production but maybe useful for testing general functionality function _salter(data) { if (Buffer.isBuffer(data) && data.length >= 12) { // be aware - currently salting buffer data is modified config.genSalt = function () { return data.slice(0, 12); }; } else if (data === "node") { // test salt generation with node random function config.genSalt = genSalt.node; } else { // if value is not acceptable config gets reset. config.genSalt = genSalt; } } function encrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd, /*Boolean*/ oldlike = false) { // 1. test data if data is not Buffer we make buffer from it if (data == null) data = Buffer.alloc(0); // if data is not buffer be make buffer from it if (!Buffer.isBuffer(data)) data = Buffer.from(data.toString()); // 2. We Initialize and generate encrypting function const encrypter = make_encrypter(pwd); // 3. generate salt (12-bytes of random data) const salt = config.genSalt(); salt[11] = (header.crc >>> 24) & 0xff; // old implementations (before PKZip 2.04g) used two byte check if (oldlike) salt[10] = (header.crc >>> 16) & 0xff; // 4. create output const result = Buffer.alloc(data.length + 12); encrypter(salt, result); // finally encode content return encrypter(data, result, 12); } module.exports = { decrypt, encrypt, _salter }; /***/ }), /***/ 4522: /***/ ((module) => { module.exports = { /* The local file header */ LOCHDR : 30, // LOC header size LOCSIG : 0x04034b50, // "PK\003\004" LOCVER : 4, // version needed to extract LOCFLG : 6, // general purpose bit flag LOCHOW : 8, // compression method LOCTIM : 10, // modification time (2 bytes time, 2 bytes date) LOCCRC : 14, // uncompressed file crc-32 value LOCSIZ : 18, // compressed size LOCLEN : 22, // uncompressed size LOCNAM : 26, // filename length LOCEXT : 28, // extra field length /* The Data descriptor */ EXTSIG : 0x08074b50, // "PK\007\008" EXTHDR : 16, // EXT header size EXTCRC : 4, // uncompressed file crc-32 value EXTSIZ : 8, // compressed size EXTLEN : 12, // uncompressed size /* The central directory file header */ CENHDR : 46, // CEN header size CENSIG : 0x02014b50, // "PK\001\002" CENVEM : 4, // version made by CENVER : 6, // version needed to extract CENFLG : 8, // encrypt, decrypt flags CENHOW : 10, // compression method CENTIM : 12, // modification time (2 bytes time, 2 bytes date) CENCRC : 16, // uncompressed file crc-32 value CENSIZ : 20, // compressed size CENLEN : 24, // uncompressed size CENNAM : 28, // filename length CENEXT : 30, // extra field length CENCOM : 32, // file comment length CENDSK : 34, // volume number start CENATT : 36, // internal file attributes CENATX : 38, // external file attributes (host system dependent) CENOFF : 42, // LOC header offset /* The entries in the end of central directory */ ENDHDR : 22, // END header size ENDSIG : 0x06054b50, // "PK\005\006" ENDSUB : 8, // number of entries on this disk ENDTOT : 10, // total number of entries ENDSIZ : 12, // central directory size in bytes ENDOFF : 16, // offset of first CEN header ENDCOM : 20, // zip file comment length END64HDR : 20, // zip64 END header size END64SIG : 0x07064b50, // zip64 Locator signature, "PK\006\007" END64START : 4, // number of the disk with the start of the zip64 END64OFF : 8, // relative offset of the zip64 end of central directory END64NUMDISKS : 16, // total number of disks ZIP64SIG : 0x06064b50, // zip64 signature, "PK\006\006" ZIP64HDR : 56, // zip64 record minimum size ZIP64LEAD : 12, // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE ZIP64SIZE : 4, // zip64 size of the central directory record ZIP64VEM : 12, // zip64 version made by ZIP64VER : 14, // zip64 version needed to extract ZIP64DSK : 16, // zip64 number of this disk ZIP64DSKDIR : 20, // number of the disk with the start of the record directory ZIP64SUB : 24, // number of entries on this disk ZIP64TOT : 32, // total number of entries ZIP64SIZB : 40, // zip64 central directory size in bytes ZIP64OFF : 48, // offset of start of central directory with respect to the starting disk number ZIP64EXTRA : 56, // extensible data sector /* Compression methods */ STORED : 0, // no compression SHRUNK : 1, // shrunk REDUCED1 : 2, // reduced with compression factor 1 REDUCED2 : 3, // reduced with compression factor 2 REDUCED3 : 4, // reduced with compression factor 3 REDUCED4 : 5, // reduced with compression factor 4 IMPLODED : 6, // imploded // 7 reserved for Tokenizing compression algorithm DEFLATED : 8, // deflated ENHANCED_DEFLATED: 9, // enhanced deflated PKWARE : 10,// PKWare DCL imploded // 11 reserved by PKWARE BZIP2 : 12, // compressed using BZIP2 // 13 reserved by PKWARE LZMA : 14, // LZMA // 15-17 reserved by PKWARE IBM_TERSE : 18, // compressed using IBM TERSE IBM_LZ77 : 19, // IBM LZ77 z AES_ENCRYPT : 99, // WinZIP AES encryption method /* General purpose bit flag */ // values can obtained with expression 2**bitnr FLG_ENC : 1, // Bit 0: encrypted file FLG_COMP1 : 2, // Bit 1, compression option FLG_COMP2 : 4, // Bit 2, compression option FLG_DESC : 8, // Bit 3, data descriptor FLG_ENH : 16, // Bit 4, enhanced deflating FLG_PATCH : 32, // Bit 5, indicates that the file is compressed patched data. FLG_STR : 64, // Bit 6, strong encryption (patented) // Bits 7-10: Currently unused. FLG_EFS : 2048, // Bit 11: Language encoding flag (EFS) // Bit 12: Reserved by PKWARE for enhanced compression. // Bit 13: encrypted the Central Directory (patented). // Bits 14-15: Reserved by PKWARE. FLG_MSK : 4096, // mask header values /* Load type */ FILE : 2, BUFFER : 1, NONE : 0, /* 4.5 Extensible data fields */ EF_ID : 0, EF_SIZE : 2, /* Header IDs */ ID_ZIP64 : 0x0001, ID_AVINFO : 0x0007, ID_PFS : 0x0008, ID_OS2 : 0x0009, ID_NTFS : 0x000a, ID_OPENVMS : 0x000c, ID_UNIX : 0x000d, ID_FORK : 0x000e, ID_PATCH : 0x000f, ID_X509_PKCS7 : 0x0014, ID_X509_CERTID_F : 0x0015, ID_X509_CERTID_C : 0x0016, ID_STRONGENC : 0x0017, ID_RECORD_MGT : 0x0018, ID_X509_PKCS7_RL : 0x0019, ID_IBM1 : 0x0065, ID_IBM2 : 0x0066, ID_POSZIP : 0x4690, EF_ZIP64_OR_32 : 0xffffffff, EF_ZIP64_OR_16 : 0xffff, EF_ZIP64_SUNCOMP : 0, EF_ZIP64_SCOMP : 8, EF_ZIP64_RHO : 16, EF_ZIP64_DSN : 24 }; /***/ }), /***/ 1255: /***/ ((module) => { module.exports = { /* Header error messages */ INVALID_LOC: "Invalid LOC header (bad signature)", INVALID_CEN: "Invalid CEN header (bad signature)", INVALID_END: "Invalid END header (bad signature)", /* ZipEntry error messages*/ NO_DATA: "Nothing to decompress", BAD_CRC: "CRC32 checksum failed", FILE_IN_THE_WAY: "There is a file in the way: %s", UNKNOWN_METHOD: "Invalid/unsupported compression method", /* Inflater error messages */ AVAIL_DATA: "inflate::Available inflate data did not terminate", INVALID_DISTANCE: "inflate::Invalid literal/length or distance code in fixed or dynamic block", TO_MANY_CODES: "inflate::Dynamic block code description: too many length or distance codes", INVALID_REPEAT_LEN: "inflate::Dynamic block code description: repeat more than specified lengths", INVALID_REPEAT_FIRST: "inflate::Dynamic block code description: repeat lengths with no first length", INCOMPLETE_CODES: "inflate::Dynamic block code description: code lengths codes incomplete", INVALID_DYN_DISTANCE: "inflate::Dynamic block code description: invalid distance code lengths", INVALID_CODES_LEN: "inflate::Dynamic block code description: invalid literal/length code lengths", INVALID_STORE_BLOCK: "inflate::Stored block length did not match one's complement", INVALID_BLOCK_TYPE: "inflate::Invalid block type (type == 3)", /* ADM-ZIP error messages */ CANT_EXTRACT_FILE: "Could not extract the file", CANT_OVERRIDE: "Target file already exists", NO_ZIP: "No zip file was loaded", NO_ENTRY: "Entry doesn't exist", DIRECTORY_CONTENT_ERROR: "A directory cannot have content", FILE_NOT_FOUND: "File not found: %s", NOT_IMPLEMENTED: "Not implemented", INVALID_FILENAME: "Invalid filename", INVALID_FORMAT: "Invalid or unsupported zip format. No END header found" }; /***/ }), /***/ 8321: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const fs = (__nccwpck_require__(2895).require)(); const pth = __nccwpck_require__(1017); fs.existsSync = fs.existsSync || pth.existsSync; module.exports = function (/*String*/ path) { var _path = path || "", _obj = newAttr(), _stat = null; function newAttr() { return { directory: false, readonly: false, hidden: false, executable: false, mtime: 0, atime: 0 }; } if (_path && fs.existsSync(_path)) { _stat = fs.statSync(_path); _obj.directory = _stat.isDirectory(); _obj.mtime = _stat.mtime; _obj.atime = _stat.atime; _obj.executable = (0o111 & _stat.mode) !== 0; // file is executable who ever har right not just owner _obj.readonly = (0o200 & _stat.mode) === 0; // readonly if owner has no write right _obj.hidden = pth.basename(_path)[0] === "."; } else { console.warn("Invalid path: " + _path); } return { get directory() { return _obj.directory; }, get readOnly() { return _obj.readonly; }, get hidden() { return _obj.hidden; }, get mtime() { return _obj.mtime; }, get atime() { return _obj.atime; }, get executable() { return _obj.executable; }, decodeAttributes: function () {}, encodeAttributes: function () {}, toJSON: function () { return { path: _path, isDirectory: _obj.directory, isReadOnly: _obj.readonly, isHidden: _obj.hidden, isExecutable: _obj.executable, mTime: _obj.mtime, aTime: _obj.atime }; }, toString: function () { return JSON.stringify(this.toJSON(), null, "\t"); } }; }; /***/ }), /***/ 2895: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { exports.require = function () { if (typeof process === "object" && process.versions && process.versions["electron"]) { try { const originalFs = __nccwpck_require__(2941); if (Object.keys(originalFs).length > 0) { return originalFs; } } catch (e) {} } return __nccwpck_require__(7147); }; /***/ }), /***/ 5182: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = __nccwpck_require__(1291); module.exports.Constants = __nccwpck_require__(4522); module.exports.Errors = __nccwpck_require__(1255); module.exports.FileAttr = __nccwpck_require__(8321); /***/ }), /***/ 1291: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const fsystem = (__nccwpck_require__(2895).require)(); const pth = __nccwpck_require__(1017); const Constants = __nccwpck_require__(4522); const Errors = __nccwpck_require__(1255); const isWin = typeof process === "object" && "win32" === process.platform; const is_Obj = (obj) => obj && typeof obj === "object"; // generate CRC32 lookup table const crcTable = new Uint32Array(256).map((t, c) => { for (let k = 0; k < 8; k++) { if ((c & 1) !== 0) { c = 0xedb88320 ^ (c >>> 1); } else { c >>>= 1; } } return c >>> 0; }); // UTILS functions function Utils(opts) { this.sep = pth.sep; this.fs = fsystem; if (is_Obj(opts)) { // custom filesystem if (is_Obj(opts.fs) && typeof opts.fs.statSync === "function") { this.fs = opts.fs; } } } module.exports = Utils; // INSTANCED functions Utils.prototype.makeDir = function (/*String*/ folder) { const self = this; // Sync - make directories tree function mkdirSync(/*String*/ fpath) { let resolvedPath = fpath.split(self.sep)[0]; fpath.split(self.sep).forEach(function (name) { if (!name || name.substr(-1, 1) === ":") return; resolvedPath += self.sep + name; var stat; try { stat = self.fs.statSync(resolvedPath); } catch (e) { self.fs.mkdirSync(resolvedPath); } if (stat && stat.isFile()) throw Errors.FILE_IN_THE_WAY.replace("%s", resolvedPath); }); } mkdirSync(folder); }; Utils.prototype.writeFileTo = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr) { const self = this; if (self.fs.existsSync(path)) { if (!overwrite) return false; // cannot overwrite var stat = self.fs.statSync(path); if (stat.isDirectory()) { return false; } } var folder = pth.dirname(path); if (!self.fs.existsSync(folder)) { self.makeDir(folder); } var fd; try { fd = self.fs.openSync(path, "w", 438); // 0666 } catch (e) { self.fs.chmodSync(path, 438); fd = self.fs.openSync(path, "w", 438); } if (fd) { try { self.fs.writeSync(fd, content, 0, content.length, 0); } finally { self.fs.closeSync(fd); } } self.fs.chmodSync(path, attr || 438); return true; }; Utils.prototype.writeFileToAsync = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr, /*Function*/ callback) { if (typeof attr === "function") { callback = attr; attr = undefined; } const self = this; self.fs.exists(path, function (exist) { if (exist && !overwrite) return callback(false); self.fs.stat(path, function (err, stat) { if (exist && stat.isDirectory()) { return callback(false); } var folder = pth.dirname(path); self.fs.exists(folder, function (exists) { if (!exists) self.makeDir(folder); self.fs.open(path, "w", 438, function (err, fd) { if (err) { self.fs.chmod(path, 438, function () { self.fs.open(path, "w", 438, function (err, fd) { self.fs.write(fd, content, 0, content.length, 0, function () { self.fs.close(fd, function () { self.fs.chmod(path, attr || 438, function () { callback(true); }); }); }); }); }); } else if (fd) { self.fs.write(fd, content, 0, content.length, 0, function () { self.fs.close(fd, function () { self.fs.chmod(path, attr || 438, function () { callback(true); }); }); }); } else { self.fs.chmod(path, attr || 438, function () { callback(true); }); } }); }); }); }); }; Utils.prototype.findFiles = function (/*String*/ path) { const self = this; function findSync(/*String*/ dir, /*RegExp*/ pattern, /*Boolean*/ recursive) { if (typeof pattern === "boolean") { recursive = pattern; pattern = undefined; } let files = []; self.fs.readdirSync(dir).forEach(function (file) { var path = pth.join(dir, file); if (self.fs.statSync(path).isDirectory() && recursive) files = files.concat(findSync(path, pattern, recursive)); if (!pattern || pattern.test(path)) { files.push(pth.normalize(path) + (self.fs.statSync(path).isDirectory() ? self.sep : "")); } }); return files; } return findSync(path, undefined, true); }; Utils.prototype.getAttributes = function () {}; Utils.prototype.setAttributes = function () {}; // STATIC functions // crc32 single update (it is part of crc32) Utils.crc32update = function (crc, byte) { return crcTable[(crc ^ byte) & 0xff] ^ (crc >>> 8); }; Utils.crc32 = function (buf) { if (typeof buf === "string") { buf = Buffer.from(buf, "utf8"); } // Generate crcTable if (!crcTable.length) genCRCTable(); let len = buf.length; let crc = ~0; for (let off = 0; off < len; ) crc = Utils.crc32update(crc, buf[off++]); // xor and cast as uint32 number return ~crc >>> 0; }; Utils.methodToString = function (/*Number*/ method) { switch (method) { case Constants.STORED: return "STORED (" + method + ")"; case Constants.DEFLATED: return "DEFLATED (" + method + ")"; default: return "UNSUPPORTED (" + method + ")"; } }; // removes ".." style path elements Utils.canonical = function (/*string*/ path) { if (!path) return ""; // trick normalize think path is absolute var safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/")); return pth.join(".", safeSuffix); }; // make abolute paths taking prefix as root folder Utils.sanitize = function (/*string*/ prefix, /*string*/ name) { prefix = pth.resolve(pth.normalize(prefix)); var parts = name.split("/"); for (var i = 0, l = parts.length; i < l; i++) { var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep))); if (path.indexOf(prefix) === 0) { return path; } } return pth.normalize(pth.join(prefix, pth.basename(name))); }; // converts buffer, Uint8Array, string types to buffer Utils.toBuffer = function toBuffer(/*buffer, Uint8Array, string*/ input) { if (Buffer.isBuffer(input)) { return input; } else if (input instanceof Uint8Array) { return Buffer.from(input); } else { // expect string all other values are invalid and return empty buffer return typeof input === "string" ? Buffer.from(input, "utf8") : Buffer.alloc(0); } }; Utils.readBigUInt64LE = function (/*Buffer*/ buffer, /*int*/ index) { var slice = Buffer.from(buffer.slice(index, index + 8)); slice.swap64(); return parseInt(`0x${slice.toString("hex")}`); }; Utils.isWin = isWin; // Do we have windows system Utils.crcTable = crcTable; /***/ }), /***/ 4057: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var Utils = __nccwpck_require__(5182), Headers = __nccwpck_require__(4958), Constants = Utils.Constants, Methods = __nccwpck_require__(3928); module.exports = function (/*Buffer*/ input) { var _entryHeader = new Headers.EntryHeader(), _entryName = Buffer.alloc(0), _comment = Buffer.alloc(0), _isDirectory = false, uncompressedData = null, _extra = Buffer.alloc(0); function getCompressedDataFromZip() { if (!input || !Buffer.isBuffer(input)) { return Buffer.alloc(0); } _entryHeader.loadDataHeaderFromBinary(input); return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize); } function crc32OK(data) { // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written if ((_entryHeader.flags & 0x8) !== 0x8) { if (Utils.crc32(data) !== _entryHeader.dataHeader.crc) { return false; } } else { // @TODO: load and check data descriptor header // The fields in the local header are filled with zero, and the CRC-32 and size are appended in a 12-byte structure // (optionally preceded by a 4-byte signature) immediately after the compressed data: } return true; } function decompress(/*Boolean*/ async, /*Function*/ callback, /*String, Buffer*/ pass) { if (typeof callback === "undefined" && typeof async === "string") { pass = async; async = void 0; } if (_isDirectory) { if (async && callback) { callback(Buffer.alloc(0), Utils.Errors.DIRECTORY_CONTENT_ERROR); //si added error. } return Buffer.alloc(0); } var compressedData = getCompressedDataFromZip(); if (compressedData.length === 0) { // File is empty, nothing to decompress. if (async && callback) callback(compressedData); return compressedData; } if (_entryHeader.encripted) { if ("string" !== typeof pass && !Buffer.isBuffer(pass)) { throw new Error("ADM-ZIP: Incompatible password parameter"); } compressedData = Methods.ZipCrypto.decrypt(compressedData, _entryHeader, pass); } var data = Buffer.alloc(_entryHeader.size); switch (_entryHeader.method) { case Utils.Constants.STORED: compressedData.copy(data); if (!crc32OK(data)) { if (async && callback) callback(data, Utils.Errors.BAD_CRC); //si added error throw new Error(Utils.Errors.BAD_CRC); } else { //si added otherwise did not seem to return data. if (async && callback) callback(data); return data; } case Utils.Constants.DEFLATED: var inflater = new Methods.Inflater(compressedData); if (!async) { const result = inflater.inflate(data); result.copy(data, 0); if (!crc32OK(data)) { throw new Error(Utils.Errors.BAD_CRC + " " + _entryName.toString()); } return data; } else { inflater.inflateAsync(function (result) { result.copy(result, 0); if (callback) { if (!crc32OK(result)) { callback(result, Utils.Errors.BAD_CRC); //si added error } else { callback(result); } } }); } break; default: if (async && callback) callback(Buffer.alloc(0), Utils.Errors.UNKNOWN_METHOD); throw new Error(Utils.Errors.UNKNOWN_METHOD); } } function compress(/*Boolean*/ async, /*Function*/ callback) { if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) { // no data set or the data wasn't changed to require recompression if (async && callback) callback(getCompressedDataFromZip()); return getCompressedDataFromZip(); } if (uncompressedData.length && !_isDirectory) { var compressedData; // Local file header switch (_entryHeader.method) { case Utils.Constants.STORED: _entryHeader.compressedSize = _entryHeader.size; compressedData = Buffer.alloc(uncompressedData.length); uncompressedData.copy(compressedData); if (async && callback) callback(compressedData); return compressedData; default: case Utils.Constants.DEFLATED: var deflater = new Methods.Deflater(uncompressedData); if (!async) { var deflated = deflater.deflate(); _entryHeader.compressedSize = deflated.length; return deflated; } else { deflater.deflateAsync(function (data) { compressedData = Buffer.alloc(data.length); _entryHeader.compressedSize = data.length; data.copy(compressedData); callback && callback(compressedData); }); } deflater = null; break; } } else if (async && callback) { callback(Buffer.alloc(0)); } else { return Buffer.alloc(0); } } function readUInt64LE(buffer, offset) { return (buffer.readUInt32LE(offset + 4) << 4) + buffer.readUInt32LE(offset); } function parseExtra(data) { var offset = 0; var signature, size, part; while (offset < data.length) { signature = data.readUInt16LE(offset); offset += 2; size = data.readUInt16LE(offset); offset += 2; part = data.slice(offset, offset + size); offset += size; if (Constants.ID_ZIP64 === signature) { parseZip64ExtendedInformation(part); } } } //Override header field values with values from the ZIP64 extra field function parseZip64ExtendedInformation(data) { var size, compressedSize, offset, diskNumStart; if (data.length >= Constants.EF_ZIP64_SCOMP) { size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP); if (_entryHeader.size === Constants.EF_ZIP64_OR_32) { _entryHeader.size = size; } } if (data.length >= Constants.EF_ZIP64_RHO) { compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP); if (_entryHeader.compressedSize === Constants.EF_ZIP64_OR_32) { _entryHeader.compressedSize = compressedSize; } } if (data.length >= Constants.EF_ZIP64_DSN) { offset = readUInt64LE(data, Constants.EF_ZIP64_RHO); if (_entryHeader.offset === Constants.EF_ZIP64_OR_32) { _entryHeader.offset = offset; } } if (data.length >= Constants.EF_ZIP64_DSN + 4) { diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN); if (_entryHeader.diskNumStart === Constants.EF_ZIP64_OR_16) { _entryHeader.diskNumStart = diskNumStart; } } } return { get entryName() { return _entryName.toString(); }, get rawEntryName() { return _entryName; }, set entryName(val) { _entryName = Utils.toBuffer(val); var lastChar = _entryName[_entryName.length - 1]; _isDirectory = lastChar === 47 || lastChar === 92; _entryHeader.fileNameLength = _entryName.length; }, get extra() { return _extra; }, set extra(val) { _extra = val; _entryHeader.extraLength = val.length; parseExtra(val); }, get comment() { return _comment.toString(); }, set comment(val) { _comment = Utils.toBuffer(val); _entryHeader.commentLength = _comment.length; }, get name() { var n = _entryName.toString(); return _isDirectory ? n .substr(n.length - 1) .split("/") .pop() : n.split("/").pop(); }, get isDirectory() { return _isDirectory; }, getCompressedData: function () { return compress(false, null); }, getCompressedDataAsync: function (/*Function*/ callback) { compress(true, callback); }, setData: function (value) { uncompressedData = Utils.toBuffer(value); if (!_isDirectory && uncompressedData.length) { _entryHeader.size = uncompressedData.length; _entryHeader.method = Utils.Constants.DEFLATED; _entryHeader.crc = Utils.crc32(value); _entryHeader.changed = true; } else { // folders and blank files should be stored _entryHeader.method = Utils.Constants.STORED; } }, getData: function (pass) { if (_entryHeader.changed) { return uncompressedData; } else { return decompress(false, null, pass); } }, getDataAsync: function (/*Function*/ callback, pass) { if (_entryHeader.changed) { callback(uncompressedData); } else { decompress(true, callback, pass); } }, set attr(attr) { _entryHeader.attr = attr; }, get attr() { return _entryHeader.attr; }, set header(/*Buffer*/ data) { _entryHeader.loadFromBinary(data); }, get header() { return _entryHeader; }, packHeader: function () { // 1. create header (buffer) var header = _entryHeader.entryHeaderToBinary(); var addpos = Utils.Constants.CENHDR; // 2. add file name _entryName.copy(header, addpos); addpos += _entryName.length; // 3. add extra data if (_entryHeader.extraLength) { _extra.copy(header, addpos); addpos += _entryHeader.extraLength; } // 4. add file comment if (_entryHeader.commentLength) { _comment.copy(header, addpos); } return header; }, toJSON: function () { const bytes = function (nr) { return "<" + ((nr && nr.length + " bytes buffer") || "null") + ">"; }; return { entryName: this.entryName, name: this.name, comment: this.comment, isDirectory: this.isDirectory, header: _entryHeader.toJSON(), compressedData: bytes(input), data: bytes(uncompressedData) }; }, toString: function () { return JSON.stringify(this.toJSON(), null, "\t"); } }; }; /***/ }), /***/ 7744: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const ZipEntry = __nccwpck_require__(4057); const Headers = __nccwpck_require__(4958); const Utils = __nccwpck_require__(5182); module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { var entryList = [], entryTable = {}, _comment = Buffer.alloc(0), mainHeader = new Headers.MainHeader(), loadedEntries = false; // assign options const opts = Object.assign(Object.create(null), options); const { noSort } = opts; if (inBuffer) { // is a memory buffer readMainHeader(opts.readEntries); } else { // none. is a new file loadedEntries = true; } function iterateEntries(callback) { const totalEntries = mainHeader.diskEntries; // total number of entries let index = mainHeader.offset; // offset of first CEN header for (let i = 0; i < totalEntries; i++) { let tmp = index; const entry = new ZipEntry(inBuffer); entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR)); entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength)); index += entry.header.entryHeaderSize; callback(entry); } } function readEntries() { loadedEntries = true; entryTable = {}; entryList = new Array(mainHeader.diskEntries); // total number of entries var index = mainHeader.offset; // offset of first CEN header for (var i = 0; i < entryList.length; i++) { var tmp = index, entry = new ZipEntry(inBuffer); entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR)); entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength)); if (entry.header.extraLength) { entry.extra = inBuffer.slice(tmp, (tmp += entry.header.extraLength)); } if (entry.header.commentLength) entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength); index += entry.header.entryHeaderSize; entryList[i] = entry; entryTable[entry.entryName] = entry; } } function readMainHeader(/*Boolean*/ readNow) { var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size max = Math.max(0, i - 0xffff), // 0xFFFF is the max zip file comment length n = max, endStart = inBuffer.length, endOffset = -1, // Start offset of the END header commentEnd = 0; for (i; i >= n; i--) { if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P' if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) { // "PK\005\006" endOffset = i; commentEnd = i; endStart = i + Utils.Constants.ENDHDR; // We already found a regular signature, let's look just a bit further to check if there's any zip64 signature n = i - Utils.Constants.END64HDR; continue; } if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) { // Found a zip64 signature, let's continue reading the whole zip64 record n = max; continue; } if (inBuffer.readUInt32LE(i) === Utils.Constants.ZIP64SIG) { // Found the zip64 record, let's determine it's size endOffset = i; endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD; break; } } if (!~endOffset) throw new Error(Utils.Errors.INVALID_FORMAT); mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart)); if (mainHeader.commentLength) { _comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR); } if (readNow) readEntries(); } function sortEntries() { if (entryList.length > 1 && !noSort) { entryList.sort((a, b) => a.entryName.toLowerCase().localeCompare(b.entryName.toLowerCase())); } } return { /** * Returns an array of ZipEntry objects existent in the current opened archive * @return Array */ get entries() { if (!loadedEntries) { readEntries(); } return entryList; }, /** * Archive comment * @return {String} */ get comment() { return _comment.toString(); }, set comment(val) { _comment = Utils.toBuffer(val); mainHeader.commentLength = _comment.length; }, getEntryCount: function () { if (!loadedEntries) { return mainHeader.diskEntries; } return entryList.length; }, forEach: function (callback) { if (!loadedEntries) { iterateEntries(callback); return; } entryList.forEach(callback); }, /** * Returns a reference to the entry with the given name or null if entry is inexistent * * @param entryName * @return ZipEntry */ getEntry: function (/*String*/ entryName) { if (!loadedEntries) { readEntries(); } return entryTable[entryName] || null; }, /** * Adds the given entry to the entry list * * @param entry */ setEntry: function (/*ZipEntry*/ entry) { if (!loadedEntries) { readEntries(); } entryList.push(entry); entryTable[entry.entryName] = entry; mainHeader.totalEntries = entryList.length; }, /** * Removes the entry with the given name from the entry list. * * If the entry is a directory, then all nested files and directories will be removed * @param entryName */ deleteEntry: function (/*String*/ entryName) { if (!loadedEntries) { readEntries(); } var entry = entryTable[entryName]; if (entry && entry.isDirectory) { var _self = this; this.getEntryChildren(entry).forEach(function (child) { if (child.entryName !== entryName) { _self.deleteEntry(child.entryName); } }); } entryList.splice(entryList.indexOf(entry), 1); delete entryTable[entryName]; mainHeader.totalEntries = entryList.length; }, /** * Iterates and returns all nested files and directories of the given entry * * @param entry * @return Array */ getEntryChildren: function (/*ZipEntry*/ entry) { if (!loadedEntries) { readEntries(); } if (entry && entry.isDirectory) { const list = []; const name = entry.entryName; const len = name.length; entryList.forEach(function (zipEntry) { if (zipEntry.entryName.substr(0, len) === name) { list.push(zipEntry); } }); return list; } return []; }, /** * Returns the zip file * * @return Buffer */ compressToBuffer: function () { if (!loadedEntries) { readEntries(); } sortEntries(); const dataBlock = []; const entryHeaders = []; let totalSize = 0; let dindex = 0; mainHeader.size = 0; mainHeader.offset = 0; for (const entry of entryList) { // compress data and set local and entry header accordingly. Reason why is called first const compressedData = entry.getCompressedData(); // 1. construct data header entry.header.offset = dindex; const dataHeader = entry.header.dataHeaderToBinary(); const entryNameLen = entry.rawEntryName.length; // 1.2. postheader - data after data header const postHeader = Buffer.alloc(entryNameLen + entry.extra.length); entry.rawEntryName.copy(postHeader, 0); postHeader.copy(entry.extra, entryNameLen); // 2. offsets const dataLength = dataHeader.length + postHeader.length + compressedData.length; dindex += dataLength; // 3. store values in sequence dataBlock.push(dataHeader); dataBlock.push(postHeader); dataBlock.push(compressedData); // 4. construct entry header const entryHeader = entry.packHeader(); entryHeaders.push(entryHeader); // 5. update main header mainHeader.size += entryHeader.length; totalSize += dataLength + entryHeader.length; } totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length // point to end of data and beginning of central directory first record mainHeader.offset = dindex; dindex = 0; const outBuffer = Buffer.alloc(totalSize); // write data blocks for (const content of dataBlock) { content.copy(outBuffer, dindex); dindex += content.length; } // write central directory entries for (const content of entryHeaders) { content.copy(outBuffer, dindex); dindex += content.length; } // write main header const mh = mainHeader.toBinary(); if (_comment) { _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment } mh.copy(outBuffer, dindex); return outBuffer; }, toAsyncBuffer: function (/*Function*/ onSuccess, /*Function*/ onFail, /*Function*/ onItemStart, /*Function*/ onItemEnd) { try { if (!loadedEntries) { readEntries(); } sortEntries(); const dataBlock = []; const entryHeaders = []; let totalSize = 0; let dindex = 0; mainHeader.size = 0; mainHeader.offset = 0; const compress2Buffer = function (entryLists) { if (entryLists.length) { const entry = entryLists.pop(); const name = entry.entryName + entry.extra.toString(); if (onItemStart) onItemStart(name); entry.getCompressedDataAsync(function (compressedData) { if (onItemEnd) onItemEnd(name); entry.header.offset = dindex; // data header const dataHeader = entry.header.dataHeaderToBinary(); const postHeader = Buffer.alloc(name.length, name); const dataLength = dataHeader.length + postHeader.length + compressedData.length; dindex += dataLength; dataBlock.push(dataHeader); dataBlock.push(postHeader); dataBlock.push(compressedData); const entryHeader = entry.packHeader(); entryHeaders.push(entryHeader); mainHeader.size += entryHeader.length; totalSize += dataLength + entryHeader.length; compress2Buffer(entryLists); }); } else { totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length // point to end of data and beginning of central directory first record mainHeader.offset = dindex; dindex = 0; const outBuffer = Buffer.alloc(totalSize); dataBlock.forEach(function (content) { content.copy(outBuffer, dindex); // write data blocks dindex += content.length; }); entryHeaders.forEach(function (content) { content.copy(outBuffer, dindex); // write central directory entries dindex += content.length; }); const mh = mainHeader.toBinary(); if (_comment) { _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment } mh.copy(outBuffer, dindex); // write main header onSuccess(outBuffer); } }; compress2Buffer(entryList); } catch (e) { onFail(e); } } }; }; /***/ }), /***/ 4812: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = { parallel : __nccwpck_require__(8210), serial : __nccwpck_require__(445), serialOrdered : __nccwpck_require__(3578) }; /***/ }), /***/ 1700: /***/ ((module) => { // API module.exports = abort; /** * Aborts leftover active jobs * * @param {object} state - current state object */ function abort(state) { Object.keys(state.jobs).forEach(clean.bind(state)); // reset leftover jobs state.jobs = {}; } /** * Cleans up leftover job by invoking abort function for the provided job id * * @this state * @param {string|number} key - job id to abort */ function clean(key) { if (typeof this.jobs[key] == 'function') { this.jobs[key](); } } /***/ }), /***/ 2794: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var defer = __nccwpck_require__(8737); // API module.exports = async; /** * Runs provided callback asynchronously * even if callback itself is not * * @param {function} callback - callback to invoke * @returns {function} - augmented callback */ function async(callback) { var isAsync = false; // check if async happened defer(function() { isAsync = true; }); return function async_callback(err, result) { if (isAsync) { callback(err, result); } else { defer(function nextTick_callback() { callback(err, result); }); } }; } /***/ }), /***/ 8737: /***/ ((module) => { module.exports = defer; /** * Runs provided function on next iteration of the event loop * * @param {function} fn - function to run */ function defer(fn) { var nextTick = typeof setImmediate == 'function' ? setImmediate : ( typeof process == 'object' && typeof process.nextTick == 'function' ? process.nextTick : null ); if (nextTick) { nextTick(fn); } else { setTimeout(fn, 0); } } /***/ }), /***/ 9023: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var async = __nccwpck_require__(2794) , abort = __nccwpck_require__(1700) ; // API module.exports = iterate; /** * Iterates over each job object * * @param {array|object} list - array or object (named list) to iterate over * @param {function} iterator - iterator to run * @param {object} state - current job status * @param {function} callback - invoked when all elements processed */ function iterate(list, iterator, state, callback) { // store current index var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; state.jobs[key] = runJob(iterator, key, list[key], function(error, output) { // don't repeat yourself // skip secondary callbacks if (!(key in state.jobs)) { return; } // clean up jobs delete state.jobs[key]; if (error) { // don't process rest of the results // stop still active jobs // and reset the list abort(state); } else { state.results[key] = output; } // return salvaged results callback(error, state.results); }); } /** * Runs iterator over provided job element * * @param {function} iterator - iterator to invoke * @param {string|number} key - key/index of the element in the list of jobs * @param {mixed} item - job description * @param {function} callback - invoked after iterator is done with the job * @returns {function|mixed} - job abort function or something else */ function runJob(iterator, key, item, callback) { var aborter; // allow shortcut if iterator expects only two arguments if (iterator.length == 2) { aborter = iterator(item, async(callback)); } // otherwise go with full three arguments else { aborter = iterator(item, key, async(callback)); } return aborter; } /***/ }), /***/ 2474: /***/ ((module) => { // API module.exports = state; /** * Creates initial state object * for iteration over list * * @param {array|object} list - list to iterate over * @param {function|null} sortMethod - function to use for keys sort, * or `null` to keep them as is * @returns {object} - initial state object */ function state(list, sortMethod) { var isNamedList = !Array.isArray(list) , initState = { index : 0, keyedList: isNamedList || sortMethod ? Object.keys(list) : null, jobs : {}, results : isNamedList ? {} : [], size : isNamedList ? Object.keys(list).length : list.length } ; if (sortMethod) { // sort array keys based on it's values // sort object's keys just on own merit initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) { return sortMethod(list[a], list[b]); }); } return initState; } /***/ }), /***/ 7942: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var abort = __nccwpck_require__(1700) , async = __nccwpck_require__(2794) ; // API module.exports = terminator; /** * Terminates jobs in the attached state context * * @this AsyncKitState# * @param {function} callback - final callback to invoke after termination */ function terminator(callback) { if (!Object.keys(this.jobs).length) { return; } // fast forward iteration index this.index = this.size; // abort jobs abort(this); // send back results we have so far async(callback)(null, this.results); } /***/ }), /***/ 8210: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var iterate = __nccwpck_require__(9023) , initState = __nccwpck_require__(2474) , terminator = __nccwpck_require__(7942) ; // Public API module.exports = parallel; /** * Runs iterator over provided array elements in parallel * * @param {array|object} list - array or object (named list) to iterate over * @param {function} iterator - iterator to run * @param {function} callback - invoked when all elements processed * @returns {function} - jobs terminator */ function parallel(list, iterator, callback) { var state = initState(list); while (state.index < (state['keyedList'] || list).length) { iterate(list, iterator, state, function(error, result) { if (error) { callback(error, result); return; } // looks like it's the last one if (Object.keys(state.jobs).length === 0) { callback(null, state.results); return; } }); state.index++; } return terminator.bind(state, callback); } /***/ }), /***/ 445: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var serialOrdered = __nccwpck_require__(3578); // Public API module.exports = serial; /** * Runs iterator over provided array elements in series * * @param {array|object} list - array or object (named list) to iterate over * @param {function} iterator - iterator to run * @param {function} callback - invoked when all elements processed * @returns {function} - jobs terminator */ function serial(list, iterator, callback) { return serialOrdered(list, iterator, null, callback); } /***/ }), /***/ 3578: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var iterate = __nccwpck_require__(9023) , initState = __nccwpck_require__(2474) , terminator = __nccwpck_require__(7942) ; // Public API module.exports = serialOrdered; // sorting helpers module.exports.ascending = ascending; module.exports.descending = descending; /** * Runs iterator over provided sorted array elements in series * * @param {array|object} list - array or object (named list) to iterate over * @param {function} iterator - iterator to run * @param {function} sortMethod - custom sort function * @param {function} callback - invoked when all elements processed * @returns {function} - jobs terminator */ function serialOrdered(list, iterator, sortMethod, callback) { var state = initState(list, sortMethod); iterate(list, iterator, state, function iteratorHandler(error, result) { if (error) { callback(error, result); return; } state.index++; // are we there yet? if (state.index < (state['keyedList'] || list).length) { iterate(list, iterator, state, iteratorHandler); return; } // done here callback(null, state.results); }); return terminator.bind(state, callback); } /* * -- Sort methods */ /** * sort helper to sort array elements in ascending order * * @param {mixed} a - an item to compare * @param {mixed} b - an item to compare * @returns {number} - comparison result */ function ascending(a, b) { return a < b ? -1 : a > b ? 1 : 0; } /** * sort helper to sort array elements in descending order * * @param {mixed} a - an item to compare * @param {mixed} b - an item to compare * @returns {number} - comparison result */ function descending(a, b) { return -1 * ascending(a, b); } /***/ }), /***/ 3682: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var register = __nccwpck_require__(4670); var addHook = __nccwpck_require__(5549); var removeHook = __nccwpck_require__(6819); // bind with array of arguments: https://stackoverflow.com/a/21792913 var bind = Function.bind; var bindable = bind.bind(bind); function bindApi(hook, state, name) { var removeHookRef = bindable(removeHook, null).apply( null, name ? [state, name] : [state] ); hook.api = { remove: removeHookRef }; hook.remove = removeHookRef; ["before", "error", "after", "wrap"].forEach(function (kind) { var args = name ? [state, kind, name] : [state, kind]; hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); }); } function HookSingular() { var singularHookName = "h"; var singularHookState = { registry: {}, }; var singularHook = register.bind(null, singularHookState, singularHookName); bindApi(singularHook, singularHookState, singularHookName); return singularHook; } function HookCollection() { var state = { registry: {}, }; var hook = register.bind(null, state); bindApi(hook, state); return hook; } var collectionHookDeprecationMessageDisplayed = false; function Hook() { if (!collectionHookDeprecationMessageDisplayed) { console.warn( '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' ); collectionHookDeprecationMessageDisplayed = true; } return HookCollection(); } Hook.Singular = HookSingular.bind(); Hook.Collection = HookCollection.bind(); module.exports = Hook; // expose constructors as a named property for TypeScript module.exports.Hook = Hook; module.exports.Singular = Hook.Singular; module.exports.Collection = Hook.Collection; /***/ }), /***/ 5549: /***/ ((module) => { module.exports = addHook; function addHook(state, kind, name, hook) { var orig = hook; if (!state.registry[name]) { state.registry[name] = []; } if (kind === "before") { hook = function (method, options) { return Promise.resolve() .then(orig.bind(null, options)) .then(method.bind(null, options)); }; } if (kind === "after") { hook = function (method, options) { var result; return Promise.resolve() .then(method.bind(null, options)) .then(function (result_) { result = result_; return orig(result, options); }) .then(function () { return result; }); }; } if (kind === "error") { hook = function (method, options) { return Promise.resolve() .then(method.bind(null, options)) .catch(function (error) { return orig(error, options); }); }; } state.registry[name].push({ hook: hook, orig: orig, }); } /***/ }), /***/ 4670: /***/ ((module) => { module.exports = register; function register(state, name, method, options) { if (typeof method !== "function") { throw new Error("method for before hook must be a function"); } if (!options) { options = {}; } if (Array.isArray(name)) { return name.reverse().reduce(function (callback, name) { return register.bind(null, state, name, callback, options); }, method)(); } return Promise.resolve().then(function () { if (!state.registry[name]) { return method(options); } return state.registry[name].reduce(function (method, registered) { return registered.hook.bind(null, method, options); }, method)(); }); } /***/ }), /***/ 6819: /***/ ((module) => { module.exports = removeHook; function removeHook(state, name, method) { if (!state.registry[name]) { return; } var index = state.registry[name] .map(function (registered) { return registered.orig; }) .indexOf(method); if (index === -1) { return; } state.registry[name].splice(index, 1); } /***/ }), /***/ 3076: /***/ ((module) => { "use strict"; const encodings = new Set(['json', 'buffer', 'string']) module.exports = mkrequest => (...args) => { const statusCodes = new Set() let method let encoding let headers let baseurl = '' args.forEach(arg => { if (typeof arg === 'string') { if (arg.toUpperCase() === arg) { if (method) { const msg = `Can't set method to ${arg}, already set to ${method}.` throw new Error(msg) } else { method = arg } } else if (arg.startsWith('http:') || arg.startsWith('https:')) { baseurl = arg } else { if (encodings.has(arg)) { encoding = arg } else { throw new Error(`Unknown encoding, ${arg}`) } } } else if (typeof arg === 'number') { statusCodes.add(arg) } else if (typeof arg === 'object') { if (Array.isArray(arg) || arg instanceof Set) { arg.forEach(code => statusCodes.add(code)) } else { if (headers) { throw new Error('Cannot set headers twice.') } headers = arg } } else { throw new Error(`Unknown type: ${typeof arg}`) } }) if (!method) method = 'GET' if (statusCodes.size === 0) { statusCodes.add(200) } return mkrequest(statusCodes, method, encoding, headers, baseurl) } /***/ }), /***/ 3113: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const http = __nccwpck_require__(3685) const https = __nccwpck_require__(5687) const { URL } = __nccwpck_require__(7310) const isStream = __nccwpck_require__(1554) const caseless = __nccwpck_require__(5684) const bytes = __nccwpck_require__(5734) const bent = __nccwpck_require__(3076) const zlib = __nccwpck_require__(9796) const { PassThrough } = __nccwpck_require__(2781) const compression = {} /* istanbul ignore else */ if (zlib.createBrotliDecompress) compression.br = () => zlib.createBrotliDecompress() /* istanbul ignore else */ if (zlib.createGunzip) compression.gzip = () => zlib.createGunzip() /* istanbul ignore else */ if (zlib.createInflate) compression.deflate = () => zlib.createInflate() const acceptEncoding = Object.keys(compression).join(', ') const getResponse = resp => { const ret = new PassThrough() ret.statusCode = resp.statusCode ret.status = resp.statusCode ret.statusMessage = resp.statusMessage ret.headers = resp.headers ret._response = resp if (ret.headers['content-encoding']) { const encodings = ret.headers['content-encoding'].split(', ').reverse() while (encodings.length) { const enc = encodings.shift() if (compression[enc]) { const decompress = compression[enc]() decompress.on('error', (e) => ret.emit('error', new Error('ZBufError', e))) resp = resp.pipe(decompress) } else { break } } } return resp.pipe(ret) } class StatusError extends Error { constructor (res, ...params) { super(...params) Error.captureStackTrace(this, StatusError) this.name = 'StatusError' this.message = res.statusMessage this.statusCode = res.statusCode this.json = res.json this.text = res.text this.arrayBuffer = res.arrayBuffer this.headers = res.headers let buffer const get = () => { if (!buffer) buffer = this.arrayBuffer() return buffer } Object.defineProperty(this, 'responseBody', { get }) } } const getBuffer = stream => new Promise((resolve, reject) => { const parts = [] stream.on('error', reject) stream.on('end', () => resolve(Buffer.concat(parts))) stream.on('data', d => parts.push(d)) }) const decodings = res => { let _buffer res.arrayBuffer = () => { if (!_buffer) { _buffer = getBuffer(res) return _buffer } else { throw new Error('body stream is locked') } } res.text = () => res.arrayBuffer().then(buff => buff.toString()) res.json = async () => { const str = await res.text() try { return JSON.parse(str) } catch (e) { e.message += `str"${str}"` throw e } } } const mkrequest = (statusCodes, method, encoding, headers, baseurl) => (_url, body = null, _headers = {}) => { _url = baseurl + (_url || '') const parsed = new URL(_url) let h if (parsed.protocol === 'https:') { h = https } else if (parsed.protocol === 'http:') { h = http } else { throw new Error(`Unknown protocol, ${parsed.protocol}`) } const request = { path: parsed.pathname + parsed.search, port: parsed.port, method: method, headers: { ...(headers || {}), ..._headers }, hostname: parsed.hostname } if (parsed.username || parsed.password) { request.auth = [parsed.username, parsed.password].join(':') } const c = caseless(request.headers) if (encoding === 'json') { if (!c.get('accept')) { c.set('accept', 'application/json') } } if (!c.has('accept-encoding')) { c.set('accept-encoding', acceptEncoding) } return new Promise((resolve, reject) => { const req = h.request(request, async res => { res = getResponse(res) res.on('error', reject) decodings(res) res.status = res.statusCode if (!statusCodes.has(res.statusCode)) { return reject(new StatusError(res)) } if (!encoding) return resolve(res) else { /* istanbul ignore else */ if (encoding === 'buffer') { resolve(res.arrayBuffer()) } else if (encoding === 'json') { resolve(res.json()) } else if (encoding === 'string') { resolve(res.text()) } } }) req.on('error', reject) if (body) { if (body instanceof ArrayBuffer || ArrayBuffer.isView(body)) { body = bytes.native(body) } if (Buffer.isBuffer(body)) { // noop } else if (typeof body === 'string') { body = Buffer.from(body) } else if (isStream(body)) { body.pipe(req) body = null } else if (typeof body === 'object') { if (!c.has('content-type')) { req.setHeader('content-type', 'application/json') } body = Buffer.from(JSON.stringify(body)) } else { reject(new Error('Unknown body type.')) } if (body) { req.setHeader('content-length', body.length) req.end(body) } } else { req.end() } }) } module.exports = bent(mkrequest) /***/ }), /***/ 610: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const stringify = __nccwpck_require__(8750); const compile = __nccwpck_require__(9434); const expand = __nccwpck_require__(5873); const parse = __nccwpck_require__(6477); /** * Expand the given pattern or create a regex-compatible string. * * ```js * const braces = require('braces'); * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)'] * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c'] * ``` * @param {String} `str` * @param {Object} `options` * @return {String} * @api public */ const braces = (input, options = {}) => { let output = []; if (Array.isArray(input)) { for (let pattern of input) { let result = braces.create(pattern, options); if (Array.isArray(result)) { output.push(...result); } else { output.push(result); } } } else { output = [].concat(braces.create(input, options)); } if (options && options.expand === true && options.nodupes === true) { output = [...new Set(output)]; } return output; }; /** * Parse the given `str` with the given `options`. * * ```js * // braces.parse(pattern, [, options]); * const ast = braces.parse('a/{b,c}/d'); * console.log(ast); * ``` * @param {String} pattern Brace pattern to parse * @param {Object} options * @return {Object} Returns an AST * @api public */ braces.parse = (input, options = {}) => parse(input, options); /** * Creates a braces string from an AST, or an AST node. * * ```js * const braces = require('braces'); * let ast = braces.parse('foo/{a,b}/bar'); * console.log(stringify(ast.nodes[2])); //=> '{a,b}' * ``` * @param {String} `input` Brace pattern or AST. * @param {Object} `options` * @return {Array} Returns an array of expanded values. * @api public */ braces.stringify = (input, options = {}) => { if (typeof input === 'string') { return stringify(braces.parse(input, options), options); } return stringify(input, options); }; /** * Compiles a brace pattern into a regex-compatible, optimized string. * This method is called by the main [braces](#braces) function by default. * * ```js * const braces = require('braces'); * console.log(braces.compile('a/{b,c}/d')); * //=> ['a/(b|c)/d'] * ``` * @param {String} `input` Brace pattern or AST. * @param {Object} `options` * @return {Array} Returns an array of expanded values. * @api public */ braces.compile = (input, options = {}) => { if (typeof input === 'string') { input = braces.parse(input, options); } return compile(input, options); }; /** * Expands a brace pattern into an array. This method is called by the * main [braces](#braces) function when `options.expand` is true. Before * using this method it's recommended that you read the [performance notes](#performance)) * and advantages of using [.compile](#compile) instead. * * ```js * const braces = require('braces'); * console.log(braces.expand('a/{b,c}/d')); * //=> ['a/b/d', 'a/c/d']; * ``` * @param {String} `pattern` Brace pattern * @param {Object} `options` * @return {Array} Returns an array of expanded values. * @api public */ braces.expand = (input, options = {}) => { if (typeof input === 'string') { input = braces.parse(input, options); } let result = expand(input, options); // filter out empty strings if specified if (options.noempty === true) { result = result.filter(Boolean); } // filter out duplicates if specified if (options.nodupes === true) { result = [...new Set(result)]; } return result; }; /** * Processes a brace pattern and returns either an expanded array * (if `options.expand` is true), a highly optimized regex-compatible string. * This method is called by the main [braces](#braces) function. * * ```js * const braces = require('braces'); * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}')) * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)' * ``` * @param {String} `pattern` Brace pattern * @param {Object} `options` * @return {Array} Returns an array of expanded values. * @api public */ braces.create = (input, options = {}) => { if (input === '' || input.length < 3) { return [input]; } return options.expand !== true ? braces.compile(input, options) : braces.expand(input, options); }; /** * Expose "braces" */ module.exports = braces; /***/ }), /***/ 9434: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const fill = __nccwpck_require__(6330); const utils = __nccwpck_require__(5207); const compile = (ast, options = {}) => { let walk = (node, parent = {}) => { let invalidBlock = utils.isInvalidBrace(parent); let invalidNode = node.invalid === true && options.escapeInvalid === true; let invalid = invalidBlock === true || invalidNode === true; let prefix = options.escapeInvalid === true ? '\\' : ''; let output = ''; if (node.isOpen === true) { return prefix + node.value; } if (node.isClose === true) { return prefix + node.value; } if (node.type === 'open') { return invalid ? (prefix + node.value) : '('; } if (node.type === 'close') { return invalid ? (prefix + node.value) : ')'; } if (node.type === 'comma') { return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|'); } if (node.value) { return node.value; } if (node.nodes && node.ranges > 0) { let args = utils.reduce(node.nodes); let range = fill(...args, { ...options, wrap: false, toRegex: true }); if (range.length !== 0) { return args.length > 1 && range.length > 1 ? `(${range})` : range; } } if (node.nodes) { for (let child of node.nodes) { output += walk(child, node); } } return output; }; return walk(ast); }; module.exports = compile; /***/ }), /***/ 8774: /***/ ((module) => { "use strict"; module.exports = { MAX_LENGTH: 1024 * 64, // Digits CHAR_0: '0', /* 0 */ CHAR_9: '9', /* 9 */ // Alphabet chars. CHAR_UPPERCASE_A: 'A', /* A */ CHAR_LOWERCASE_A: 'a', /* a */ CHAR_UPPERCASE_Z: 'Z', /* Z */ CHAR_LOWERCASE_Z: 'z', /* z */ CHAR_LEFT_PARENTHESES: '(', /* ( */ CHAR_RIGHT_PARENTHESES: ')', /* ) */ CHAR_ASTERISK: '*', /* * */ // Non-alphabetic chars. CHAR_AMPERSAND: '&', /* & */ CHAR_AT: '@', /* @ */ CHAR_BACKSLASH: '\\', /* \ */ CHAR_BACKTICK: '`', /* ` */ CHAR_CARRIAGE_RETURN: '\r', /* \r */ CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */ CHAR_COLON: ':', /* : */ CHAR_COMMA: ',', /* , */ CHAR_DOLLAR: '$', /* . */ CHAR_DOT: '.', /* . */ CHAR_DOUBLE_QUOTE: '"', /* " */ CHAR_EQUAL: '=', /* = */ CHAR_EXCLAMATION_MARK: '!', /* ! */ CHAR_FORM_FEED: '\f', /* \f */ CHAR_FORWARD_SLASH: '/', /* / */ CHAR_HASH: '#', /* # */ CHAR_HYPHEN_MINUS: '-', /* - */ CHAR_LEFT_ANGLE_BRACKET: '<', /* < */ CHAR_LEFT_CURLY_BRACE: '{', /* { */ CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */ CHAR_LINE_FEED: '\n', /* \n */ CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */ CHAR_PERCENT: '%', /* % */ CHAR_PLUS: '+', /* + */ CHAR_QUESTION_MARK: '?', /* ? */ CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */ CHAR_RIGHT_CURLY_BRACE: '}', /* } */ CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */ CHAR_SEMICOLON: ';', /* ; */ CHAR_SINGLE_QUOTE: '\'', /* ' */ CHAR_SPACE: ' ', /* */ CHAR_TAB: '\t', /* \t */ CHAR_UNDERSCORE: '_', /* _ */ CHAR_VERTICAL_LINE: '|', /* | */ CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */ }; /***/ }), /***/ 5873: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const fill = __nccwpck_require__(6330); const stringify = __nccwpck_require__(8750); const utils = __nccwpck_require__(5207); const append = (queue = '', stash = '', enclose = false) => { let result = []; queue = [].concat(queue); stash = [].concat(stash); if (!stash.length) return queue; if (!queue.length) { return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash; } for (let item of queue) { if (Array.isArray(item)) { for (let value of item) { result.push(append(value, stash, enclose)); } } else { for (let ele of stash) { if (enclose === true && typeof ele === 'string') ele = `{${ele}}`; result.push(Array.isArray(ele) ? append(item, ele, enclose) : (item + ele)); } } } return utils.flatten(result); }; const expand = (ast, options = {}) => { let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit; let walk = (node, parent = {}) => { node.queue = []; let p = parent; let q = parent.queue; while (p.type !== 'brace' && p.type !== 'root' && p.parent) { p = p.parent; q = p.queue; } if (node.invalid || node.dollar) { q.push(append(q.pop(), stringify(node, options))); return; } if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) { q.push(append(q.pop(), ['{}'])); return; } if (node.nodes && node.ranges > 0) { let args = utils.reduce(node.nodes); if (utils.exceedsLimit(...args, options.step, rangeLimit)) { throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.'); } let range = fill(...args, options); if (range.length === 0) { range = stringify(node, options); } q.push(append(q.pop(), range)); node.nodes = []; return; } let enclose = utils.encloseBrace(node); let queue = node.queue; let block = node; while (block.type !== 'brace' && block.type !== 'root' && block.parent) { block = block.parent; queue = block.queue; } for (let i = 0; i < node.nodes.length; i++) { let child = node.nodes[i]; if (child.type === 'comma' && node.type === 'brace') { if (i === 1) queue.push(''); queue.push(''); continue; } if (child.type === 'close') { q.push(append(q.pop(), queue, enclose)); continue; } if (child.value && child.type !== 'open') { queue.push(append(queue.pop(), child.value)); continue; } if (child.nodes) { walk(child, node); } } return queue; }; return utils.flatten(walk(ast)); }; module.exports = expand; /***/ }), /***/ 6477: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const stringify = __nccwpck_require__(8750); /** * Constants */ const { MAX_LENGTH, CHAR_BACKSLASH, /* \ */ CHAR_BACKTICK, /* ` */ CHAR_COMMA, /* , */ CHAR_DOT, /* . */ CHAR_LEFT_PARENTHESES, /* ( */ CHAR_RIGHT_PARENTHESES, /* ) */ CHAR_LEFT_CURLY_BRACE, /* { */ CHAR_RIGHT_CURLY_BRACE, /* } */ CHAR_LEFT_SQUARE_BRACKET, /* [ */ CHAR_RIGHT_SQUARE_BRACKET, /* ] */ CHAR_DOUBLE_QUOTE, /* " */ CHAR_SINGLE_QUOTE, /* ' */ CHAR_NO_BREAK_SPACE, CHAR_ZERO_WIDTH_NOBREAK_SPACE } = __nccwpck_require__(8774); /** * parse */ const parse = (input, options = {}) => { if (typeof input !== 'string') { throw new TypeError('Expected a string'); } let opts = options || {}; let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; if (input.length > max) { throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`); } let ast = { type: 'root', input, nodes: [] }; let stack = [ast]; let block = ast; let prev = ast; let brackets = 0; let length = input.length; let index = 0; let depth = 0; let value; let memo = {}; /** * Helpers */ const advance = () => input[index++]; const push = node => { if (node.type === 'text' && prev.type === 'dot') { prev.type = 'text'; } if (prev && prev.type === 'text' && node.type === 'text') { prev.value += node.value; return; } block.nodes.push(node); node.parent = block; node.prev = prev; prev = node; return node; }; push({ type: 'bos' }); while (index < length) { block = stack[stack.length - 1]; value = advance(); /** * Invalid chars */ if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) { continue; } /** * Escaped chars */ if (value === CHAR_BACKSLASH) { push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() }); continue; } /** * Right square bracket (literal): ']' */ if (value === CHAR_RIGHT_SQUARE_BRACKET) { push({ type: 'text', value: '\\' + value }); continue; } /** * Left square bracket: '[' */ if (value === CHAR_LEFT_SQUARE_BRACKET) { brackets++; let closed = true; let next; while (index < length && (next = advance())) { value += next; if (next === CHAR_LEFT_SQUARE_BRACKET) { brackets++; continue; } if (next === CHAR_BACKSLASH) { value += advance(); continue; } if (next === CHAR_RIGHT_SQUARE_BRACKET) { brackets--; if (brackets === 0) { break; } } } push({ type: 'text', value }); continue; } /** * Parentheses */ if (value === CHAR_LEFT_PARENTHESES) { block = push({ type: 'paren', nodes: [] }); stack.push(block); push({ type: 'text', value }); continue; } if (value === CHAR_RIGHT_PARENTHESES) { if (block.type !== 'paren') { push({ type: 'text', value }); continue; } block = stack.pop(); push({ type: 'text', value }); block = stack[stack.length - 1]; continue; } /** * Quotes: '|"|` */ if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) { let open = value; let next; if (options.keepQuotes !== true) { value = ''; } while (index < length && (next = advance())) { if (next === CHAR_BACKSLASH) { value += next + advance(); continue; } if (next === open) { if (options.keepQuotes === true) value += next; break; } value += next; } push({ type: 'text', value }); continue; } /** * Left curly brace: '{' */ if (value === CHAR_LEFT_CURLY_BRACE) { depth++; let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true; let brace = { type: 'brace', open: true, close: false, dollar, depth, commas: 0, ranges: 0, nodes: [] }; block = push(brace); stack.push(block); push({ type: 'open', value }); continue; } /** * Right curly brace: '}' */ if (value === CHAR_RIGHT_CURLY_BRACE) { if (block.type !== 'brace') { push({ type: 'text', value }); continue; } let type = 'close'; block = stack.pop(); block.close = true; push({ type, value }); depth--; block = stack[stack.length - 1]; continue; } /** * Comma: ',' */ if (value === CHAR_COMMA && depth > 0) { if (block.ranges > 0) { block.ranges = 0; let open = block.nodes.shift(); block.nodes = [open, { type: 'text', value: stringify(block) }]; } push({ type: 'comma', value }); block.commas++; continue; } /** * Dot: '.' */ if (value === CHAR_DOT && depth > 0 && block.commas === 0) { let siblings = block.nodes; if (depth === 0 || siblings.length === 0) { push({ type: 'text', value }); continue; } if (prev.type === 'dot') { block.range = []; prev.value += value; prev.type = 'range'; if (block.nodes.length !== 3 && block.nodes.length !== 5) { block.invalid = true; block.ranges = 0; prev.type = 'text'; continue; } block.ranges++; block.args = []; continue; } if (prev.type === 'range') { siblings.pop(); let before = siblings[siblings.length - 1]; before.value += prev.value + value; prev = before; block.ranges--; continue; } push({ type: 'dot', value }); continue; } /** * Text */ push({ type: 'text', value }); } // Mark imbalanced braces and brackets as invalid do { block = stack.pop(); if (block.type !== 'root') { block.nodes.forEach(node => { if (!node.nodes) { if (node.type === 'open') node.isOpen = true; if (node.type === 'close') node.isClose = true; if (!node.nodes) node.type = 'text'; node.invalid = true; } }); // get the location of the block on parent.nodes (block's siblings) let parent = stack[stack.length - 1]; let index = parent.nodes.indexOf(block); // replace the (invalid) block with it's nodes parent.nodes.splice(index, 1, ...block.nodes); } } while (stack.length > 0); push({ type: 'eos' }); return ast; }; module.exports = parse; /***/ }), /***/ 8750: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const utils = __nccwpck_require__(5207); module.exports = (ast, options = {}) => { let stringify = (node, parent = {}) => { let invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent); let invalidNode = node.invalid === true && options.escapeInvalid === true; let output = ''; if (node.value) { if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) { return '\\' + node.value; } return node.value; } if (node.value) { return node.value; } if (node.nodes) { for (let child of node.nodes) { output += stringify(child); } } return output; }; return stringify(ast); }; /***/ }), /***/ 5207: /***/ ((__unused_webpack_module, exports) => { "use strict"; exports.isInteger = num => { if (typeof num === 'number') { return Number.isInteger(num); } if (typeof num === 'string' && num.trim() !== '') { return Number.isInteger(Number(num)); } return false; }; /** * Find a node of the given type */ exports.find = (node, type) => node.nodes.find(node => node.type === type); /** * Find a node of the given type */ exports.exceedsLimit = (min, max, step = 1, limit) => { if (limit === false) return false; if (!exports.isInteger(min) || !exports.isInteger(max)) return false; return ((Number(max) - Number(min)) / Number(step)) >= limit; }; /** * Escape the given node with '\\' before node.value */ exports.escapeNode = (block, n = 0, type) => { let node = block.nodes[n]; if (!node) return; if ((type && node.type === type) || node.type === 'open' || node.type === 'close') { if (node.escaped !== true) { node.value = '\\' + node.value; node.escaped = true; } } }; /** * Returns true if the given brace node should be enclosed in literal braces */ exports.encloseBrace = node => { if (node.type !== 'brace') return false; if ((node.commas >> 0 + node.ranges >> 0) === 0) { node.invalid = true; return true; } return false; }; /** * Returns true if a brace node is invalid. */ exports.isInvalidBrace = block => { if (block.type !== 'brace') return false; if (block.invalid === true || block.dollar) return true; if ((block.commas >> 0 + block.ranges >> 0) === 0) { block.invalid = true; return true; } if (block.open !== true || block.close !== true) { block.invalid = true; return true; } return false; }; /** * Returns true if a node is an open or close node */ exports.isOpenOrClose = node => { if (node.type === 'open' || node.type === 'close') { return true; } return node.open === true || node.close === true; }; /** * Reduce an array of text nodes. */ exports.reduce = nodes => nodes.reduce((acc, node) => { if (node.type === 'text') acc.push(node.value); if (node.type === 'range') node.type = 'text'; return acc; }, []); /** * Flatten an array */ exports.flatten = (...args) => { const result = []; const flat = arr => { for (let i = 0; i < arr.length; i++) { let ele = arr[i]; Array.isArray(ele) ? flat(ele, result) : ele !== void 0 && result.push(ele); } return result; }; flat(args); return result; }; /***/ }), /***/ 3089: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; /* globals atob, btoa, crypto */ /* istanbul ignore file */ const bytes = __nccwpck_require__(4391) bytes.from = (_from, _encoding) => { if (_from instanceof DataView) return _from if (_from instanceof ArrayBuffer) return new DataView(_from) let buffer if (typeof _from === 'string') { if (!_encoding) { _encoding = 'utf-8' } else if (_encoding === 'base64') { buffer = Uint8Array.from(atob(_from), c => c.charCodeAt(0)).buffer return new DataView(buffer) } if (_encoding !== 'utf-8') throw new Error('Browser support for encodings other than utf-8 not implemented') return new DataView((new TextEncoder()).encode(_from).buffer) } else if (typeof _from === 'object') { if (ArrayBuffer.isView(_from)) { if (_from.byteLength === _from.buffer.byteLength) return new DataView(_from.buffer) else return new DataView(_from.buffer, _from.byteOffset, _from.byteLength) } } throw new Error('Unkown type. Cannot convert to ArrayBuffer') } bytes.toString = (_from, encoding) => { _from = bytes(_from, encoding) const uint = new Uint8Array(_from.buffer, _from.byteOffset, _from.byteLength) const str = String.fromCharCode(...uint) if (encoding === 'base64') { /* would be nice to find a way to do this directly from a buffer * instead of doing two string conversions */ return btoa(str) } else { return str } } bytes.native = (_from, encoding) => { if (_from instanceof Uint8Array) return _from _from = bytes.from(_from, encoding) return new Uint8Array(_from.buffer, _from.byteOffset, _from.byteLength) } if (process.browser) bytes._randomFill = (...args) => crypto.getRandomValues(...args) module.exports = bytes /***/ }), /***/ 4391: /***/ ((module) => { "use strict"; const length = (a, b) => { if (a.byteLength === b.byteLength) return a.byteLength else if (a.byteLength > b.byteLength) return a.byteLength return b.byteLength } const bytes = (_from, encoding) => bytes.from(_from, encoding) bytes.sorter = (a, b) => { a = bytes(a) b = bytes(b) const len = length(a, b) let i = 0 while (i < (len - 1)) { if (i >= a.byteLength) return 1 else if (i >= b.byteLength) return -1 if (a.getUint8(i) < b.getUint8(i)) return -1 else if (a.getUint8(i) > b.getUint8(i)) return 1 i++ } return 0 } bytes.compare = (a, b) => !bytes.sorter(a, b) bytes.memcopy = (_from, encoding) => { const b = bytes(_from, encoding) return b.buffer.slice(b.byteOffset, b.byteOffset + b.byteLength) } bytes.arrayBuffer = (_from, encoding) => { _from = bytes(_from, encoding) if (_from.buffer.byteLength === _from.byteLength) return _from.buffer return _from.buffer.slice(_from.byteOffset, _from.byteOffset + _from.byteLength) } const sliceOptions = (_from, start = 0, end = null) => { _from = bytes(_from) end = (end === null ? _from.byteLength : end) - start return [_from.buffer, _from.byteOffset + start, end] } bytes.slice = (_from, start, end) => new DataView(...sliceOptions(_from, start, end)) bytes.memcopySlice = (_from, start, end) => { const [buffer, offset, length] = sliceOptions(_from, start, end) return buffer.slice(offset, length + offset) } bytes.typedArray = (_from, _Class = Uint8Array) => { _from = bytes(_from) return new _Class(_from.buffer, _from.byteOffset, _from.byteLength / _Class.BYTES_PER_ELEMENT) } bytes.concat = (_from) => { _from = Array.from(_from) _from = _from.map(b => bytes(b)) const length = _from.reduce((x, y) => x + y.byteLength, 0) const ret = new Uint8Array(length) let i = 0 for (const part of _from) { const view = bytes.typedArray(part) ret.set(view, i) i += view.byteLength } return ret.buffer } const maxEntropy = 65536 bytes.random = length => { const ab = new ArrayBuffer(length) if (length > maxEntropy) { let i = 0 while (i < ab.byteLength) { let len if (i + maxEntropy > ab.byteLength) len = ab.byteLength - i else len = maxEntropy const view = new Uint8Array(ab, i, len) i += maxEntropy bytes._randomFill(view) } } else { const view = new Uint8Array(ab) bytes._randomFill(view) } return ab } module.exports = bytes /***/ }), /***/ 5734: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const crypto = __nccwpck_require__(6113) const fallback = (__nccwpck_require__(3089).from) const bytes = __nccwpck_require__(4391) bytes.from = (_from, encoding) => { if (_from instanceof DataView) return _from if (_from instanceof ArrayBuffer) return new DataView(_from) if (typeof _from === 'string') { _from = Buffer.from(_from, encoding) } if (Buffer.isBuffer(_from)) { return new DataView(_from.buffer, _from.byteOffset, _from.byteLength) } return fallback(_from, encoding) } bytes.toString = (_from, encoding) => { _from = bytes(_from) return Buffer.from(_from.buffer, _from.byteOffset, _from.byteLength).toString(encoding) } bytes.native = (_from, encoding) => { if (Buffer.isBuffer(_from)) return _from _from = bytes(_from, encoding) return Buffer.from(_from.buffer, _from.byteOffset, _from.byteLength) } bytes._randomFill = crypto.randomFillSync module.exports = bytes /***/ }), /***/ 5684: /***/ ((module) => { function Caseless (dict) { this.dict = dict || {} } Caseless.prototype.set = function (name, value, clobber) { if (typeof name === 'object') { for (var i in name) { this.set(i, name[i], value) } } else { if (typeof clobber === 'undefined') clobber = true var has = this.has(name) if (!clobber && has) this.dict[has] = this.dict[has] + ',' + value else this.dict[has || name] = value return has } } Caseless.prototype.has = function (name) { var keys = Object.keys(this.dict) , name = name.toLowerCase() ; for (var i=0;i { var util = __nccwpck_require__(3837); var Stream = (__nccwpck_require__(2781).Stream); var DelayedStream = __nccwpck_require__(8611); module.exports = CombinedStream; function CombinedStream() { this.writable = false; this.readable = true; this.dataSize = 0; this.maxDataSize = 2 * 1024 * 1024; this.pauseStreams = true; this._released = false; this._streams = []; this._currentStream = null; this._insideLoop = false; this._pendingNext = false; } util.inherits(CombinedStream, Stream); CombinedStream.create = function(options) { var combinedStream = new this(); options = options || {}; for (var option in options) { combinedStream[option] = options[option]; } return combinedStream; }; CombinedStream.isStreamLike = function(stream) { return (typeof stream !== 'function') && (typeof stream !== 'string') && (typeof stream !== 'boolean') && (typeof stream !== 'number') && (!Buffer.isBuffer(stream)); }; CombinedStream.prototype.append = function(stream) { var isStreamLike = CombinedStream.isStreamLike(stream); if (isStreamLike) { if (!(stream instanceof DelayedStream)) { var newStream = DelayedStream.create(stream, { maxDataSize: Infinity, pauseStream: this.pauseStreams, }); stream.on('data', this._checkDataSize.bind(this)); stream = newStream; } this._handleErrors(stream); if (this.pauseStreams) { stream.pause(); } } this._streams.push(stream); return this; }; CombinedStream.prototype.pipe = function(dest, options) { Stream.prototype.pipe.call(this, dest, options); this.resume(); return dest; }; CombinedStream.prototype._getNext = function() { this._currentStream = null; if (this._insideLoop) { this._pendingNext = true; return; // defer call } this._insideLoop = true; try { do { this._pendingNext = false; this._realGetNext(); } while (this._pendingNext); } finally { this._insideLoop = false; } }; CombinedStream.prototype._realGetNext = function() { var stream = this._streams.shift(); if (typeof stream == 'undefined') { this.end(); return; } if (typeof stream !== 'function') { this._pipeNext(stream); return; } var getStream = stream; getStream(function(stream) { var isStreamLike = CombinedStream.isStreamLike(stream); if (isStreamLike) { stream.on('data', this._checkDataSize.bind(this)); this._handleErrors(stream); } this._pipeNext(stream); }.bind(this)); }; CombinedStream.prototype._pipeNext = function(stream) { this._currentStream = stream; var isStreamLike = CombinedStream.isStreamLike(stream); if (isStreamLike) { stream.on('end', this._getNext.bind(this)); stream.pipe(this, {end: false}); return; } var value = stream; this.write(value); this._getNext(); }; CombinedStream.prototype._handleErrors = function(stream) { var self = this; stream.on('error', function(err) { self._emitError(err); }); }; CombinedStream.prototype.write = function(data) { this.emit('data', data); }; CombinedStream.prototype.pause = function() { if (!this.pauseStreams) { return; } if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause(); this.emit('pause'); }; CombinedStream.prototype.resume = function() { if (!this._released) { this._released = true; this.writable = true; this._getNext(); } if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume(); this.emit('resume'); }; CombinedStream.prototype.end = function() { this._reset(); this.emit('end'); }; CombinedStream.prototype.destroy = function() { this._reset(); this.emit('close'); }; CombinedStream.prototype._reset = function() { this.writable = false; this._streams = []; this._currentStream = null; }; CombinedStream.prototype._checkDataSize = function() { this._updateDataSize(); if (this.dataSize <= this.maxDataSize) { return; } var message = 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'; this._emitError(new Error(message)); }; CombinedStream.prototype._updateDataSize = function() { this.dataSize = 0; var self = this; this._streams.forEach(function(stream) { if (!stream.dataSize) { return; } self.dataSize += stream.dataSize; }); if (this._currentStream && this._currentStream.dataSize) { this.dataSize += this._currentStream.dataSize; } }; CombinedStream.prototype._emitError = function(err) { this._reset(); this.emit('error', err); }; /***/ }), /***/ 8222: /***/ ((module, exports, __nccwpck_require__) => { /* eslint-env browser */ /** * This is the web browser implementation of `debug()`. */ exports.formatArgs = formatArgs; exports.save = save; exports.load = load; exports.useColors = useColors; exports.storage = localstorage(); exports.destroy = (() => { let warned = false; return () => { if (!warned) { warned = true; console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); } }; })(); /** * Colors. */ exports.colors = [ '#0000CC', '#0000FF', '#0033CC', '#0033FF', '#0066CC', '#0066FF', '#0099CC', '#0099FF', '#00CC00', '#00CC33', '#00CC66', '#00CC99', '#00CCCC', '#00CCFF', '#3300CC', '#3300FF', '#3333CC', '#3333FF', '#3366CC', '#3366FF', '#3399CC', '#3399FF', '#33CC00', '#33CC33', '#33CC66', '#33CC99', '#33CCCC', '#33CCFF', '#6600CC', '#6600FF', '#6633CC', '#6633FF', '#66CC00', '#66CC33', '#9900CC', '#9900FF', '#9933CC', '#9933FF', '#99CC00', '#99CC33', '#CC0000', '#CC0033', '#CC0066', '#CC0099', '#CC00CC', '#CC00FF', '#CC3300', '#CC3333', '#CC3366', '#CC3399', '#CC33CC', '#CC33FF', '#CC6600', '#CC6633', '#CC9900', '#CC9933', '#CCCC00', '#CCCC33', '#FF0000', '#FF0033', '#FF0066', '#FF0099', '#FF00CC', '#FF00FF', '#FF3300', '#FF3333', '#FF3366', '#FF3399', '#FF33CC', '#FF33FF', '#FF6600', '#FF6633', '#FF9900', '#FF9933', '#FFCC00', '#FFCC33' ]; /** * Currently only WebKit-based Web Inspectors, Firefox >= v31, * and the Firebug extension (any Firefox version) are known * to support "%c" CSS customizations. * * TODO: add a `localStorage` variable to explicitly enable/disable colors */ // eslint-disable-next-line complexity function useColors() { // NB: In an Electron preload script, document will be defined but not fully // initialized. Since we know we're in Chrome, we'll just detect this case // explicitly if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { return true; } // Internet Explorer and Edge do not support colors. if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { return false; } // Is webkit? http://stackoverflow.com/a/16459606/376773 // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || // Is firebug? http://stackoverflow.com/a/398120/376773 (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || // Is firefox >= v31? // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || // Double check webkit in userAgent just in case we are in a worker (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); } /** * Colorize log arguments if enabled. * * @api public */ function formatArgs(args) { args[0] = (this.useColors ? '%c' : '') + this.namespace + (this.useColors ? ' %c' : ' ') + args[0] + (this.useColors ? '%c ' : ' ') + '+' + module.exports.humanize(this.diff); if (!this.useColors) { return; } const c = 'color: ' + this.color; args.splice(1, 0, c, 'color: inherit'); // The final "%c" is somewhat tricky, because there could be other // arguments passed either before or after the %c, so we need to // figure out the correct index to insert the CSS into let index = 0; let lastC = 0; args[0].replace(/%[a-zA-Z%]/g, match => { if (match === '%%') { return; } index++; if (match === '%c') { // We only are interested in the *last* %c // (the user may have provided their own) lastC = index; } }); args.splice(lastC, 0, c); } /** * Invokes `console.debug()` when available. * No-op when `console.debug` is not a "function". * If `console.debug` is not available, falls back * to `console.log`. * * @api public */ exports.log = console.debug || console.log || (() => {}); /** * Save `namespaces`. * * @param {String} namespaces * @api private */ function save(namespaces) { try { if (namespaces) { exports.storage.setItem('debug', namespaces); } else { exports.storage.removeItem('debug'); } } catch (error) { // Swallow // XXX (@Qix-) should we be logging these? } } /** * Load `namespaces`. * * @return {String} returns the previously persisted debug modes * @api private */ function load() { let r; try { r = exports.storage.getItem('debug'); } catch (error) { // Swallow // XXX (@Qix-) should we be logging these? } // If debug isn't set in LS, and we're in Electron, try to load $DEBUG if (!r && typeof process !== 'undefined' && 'env' in process) { r = process.env.DEBUG; } return r; } /** * Localstorage attempts to return the localstorage. * * This is necessary because safari throws * when a user disables cookies/localstorage * and you attempt to access it. * * @return {LocalStorage} * @api private */ function localstorage() { try { // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context // The Browser also has localStorage in the global context. return localStorage; } catch (error) { // Swallow // XXX (@Qix-) should we be logging these? } } module.exports = __nccwpck_require__(6243)(exports); const {formatters} = module.exports; /** * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. */ formatters.j = function (v) { try { return JSON.stringify(v); } catch (error) { return '[UnexpectedJSONParseError]: ' + error.message; } }; /***/ }), /***/ 6243: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /** * This is the common logic for both the Node.js and web browser * implementations of `debug()`. */ function setup(env) { createDebug.debug = createDebug; createDebug.default = createDebug; createDebug.coerce = coerce; createDebug.disable = disable; createDebug.enable = enable; createDebug.enabled = enabled; createDebug.humanize = __nccwpck_require__(900); createDebug.destroy = destroy; Object.keys(env).forEach(key => { createDebug[key] = env[key]; }); /** * The currently active debug mode names, and names to skip. */ createDebug.names = []; createDebug.skips = []; /** * Map of special "%n" handling functions, for the debug "format" argument. * * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". */ createDebug.formatters = {}; /** * Selects a color for a debug namespace * @param {String} namespace The namespace string for the debug instance to be colored * @return {Number|String} An ANSI color code for the given namespace * @api private */ function selectColor(namespace) { let hash = 0; for (let i = 0; i < namespace.length; i++) { hash = ((hash << 5) - hash) + namespace.charCodeAt(i); hash |= 0; // Convert to 32bit integer } return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; } createDebug.selectColor = selectColor; /** * Create a debugger with the given `namespace`. * * @param {String} namespace * @return {Function} * @api public */ function createDebug(namespace) { let prevTime; let enableOverride = null; let namespacesCache; let enabledCache; function debug(...args) { // Disabled? if (!debug.enabled) { return; } const self = debug; // Set `diff` timestamp const curr = Number(new Date()); const ms = curr - (prevTime || curr); self.diff = ms; self.prev = prevTime; self.curr = curr; prevTime = curr; args[0] = createDebug.coerce(args[0]); if (typeof args[0] !== 'string') { // Anything else let's inspect with %O args.unshift('%O'); } // Apply any `formatters` transformations let index = 0; args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { // If we encounter an escaped % then don't increase the array index if (match === '%%') { return '%'; } index++; const formatter = createDebug.formatters[format]; if (typeof formatter === 'function') { const val = args[index]; match = formatter.call(self, val); // Now we need to remove `args[index]` since it's inlined in the `format` args.splice(index, 1); index--; } return match; }); // Apply env-specific formatting (colors, etc.) createDebug.formatArgs.call(self, args); const logFn = self.log || createDebug.log; logFn.apply(self, args); } debug.namespace = namespace; debug.useColors = createDebug.useColors(); debug.color = createDebug.selectColor(namespace); debug.extend = extend; debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. Object.defineProperty(debug, 'enabled', { enumerable: true, configurable: false, get: () => { if (enableOverride !== null) { return enableOverride; } if (namespacesCache !== createDebug.namespaces) { namespacesCache = createDebug.namespaces; enabledCache = createDebug.enabled(namespace); } return enabledCache; }, set: v => { enableOverride = v; } }); // Env-specific initialization logic for debug instances if (typeof createDebug.init === 'function') { createDebug.init(debug); } return debug; } function extend(namespace, delimiter) { const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); newDebug.log = this.log; return newDebug; } /** * Enables a debug mode by namespaces. This can include modes * separated by a colon and wildcards. * * @param {String} namespaces * @api public */ function enable(namespaces) { createDebug.save(namespaces); createDebug.namespaces = namespaces; createDebug.names = []; createDebug.skips = []; let i; const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); const len = split.length; for (i = 0; i < len; i++) { if (!split[i]) { // ignore empty strings continue; } namespaces = split[i].replace(/\*/g, '.*?'); if (namespaces[0] === '-') { createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); } else { createDebug.names.push(new RegExp('^' + namespaces + '$')); } } } /** * Disable debug output. * * @return {String} namespaces * @api public */ function disable() { const namespaces = [ ...createDebug.names.map(toNamespace), ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) ].join(','); createDebug.enable(''); return namespaces; } /** * Returns true if the given mode name is enabled, false otherwise. * * @param {String} name * @return {Boolean} * @api public */ function enabled(name) { if (name[name.length - 1] === '*') { return true; } let i; let len; for (i = 0, len = createDebug.skips.length; i < len; i++) { if (createDebug.skips[i].test(name)) { return false; } } for (i = 0, len = createDebug.names.length; i < len; i++) { if (createDebug.names[i].test(name)) { return true; } } return false; } /** * Convert regexp to namespace * * @param {RegExp} regxep * @return {String} namespace * @api private */ function toNamespace(regexp) { return regexp.toString() .substring(2, regexp.toString().length - 2) .replace(/\.\*\?$/, '*'); } /** * Coerce `val`. * * @param {Mixed} val * @return {Mixed} * @api private */ function coerce(val) { if (val instanceof Error) { return val.stack || val.message; } return val; } /** * XXX DO NOT USE. This is a temporary stub function. * XXX It WILL be removed in the next major release. */ function destroy() { console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); } createDebug.enable(createDebug.load()); return createDebug; } module.exports = setup; /***/ }), /***/ 8237: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /** * Detect Electron renderer / nwjs process, which is node, but we should * treat as a browser. */ if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { module.exports = __nccwpck_require__(8222); } else { module.exports = __nccwpck_require__(4874); } /***/ }), /***/ 4874: /***/ ((module, exports, __nccwpck_require__) => { /** * Module dependencies. */ const tty = __nccwpck_require__(6224); const util = __nccwpck_require__(3837); /** * This is the Node.js implementation of `debug()`. */ exports.init = init; exports.log = log; exports.formatArgs = formatArgs; exports.save = save; exports.load = load; exports.useColors = useColors; exports.destroy = util.deprecate( () => {}, 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' ); /** * Colors. */ exports.colors = [6, 2, 3, 4, 5, 1]; try { // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) // eslint-disable-next-line import/no-extraneous-dependencies const supportsColor = __nccwpck_require__(9318); if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { exports.colors = [ 20, 21, 26, 27, 32, 33, 38, 39, 40, 41, 42, 43, 44, 45, 56, 57, 62, 63, 68, 69, 74, 75, 76, 77, 78, 79, 80, 81, 92, 93, 98, 99, 112, 113, 128, 129, 134, 135, 148, 149, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 178, 179, 184, 185, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 214, 215, 220, 221 ]; } } catch (error) { // Swallow - we only care if `supports-color` is available; it doesn't have to be. } /** * Build up the default `inspectOpts` object from the environment variables. * * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js */ exports.inspectOpts = Object.keys(process.env).filter(key => { return /^debug_/i.test(key); }).reduce((obj, key) => { // Camel-case const prop = key .substring(6) .toLowerCase() .replace(/_([a-z])/g, (_, k) => { return k.toUpperCase(); }); // Coerce string value into JS value let val = process.env[key]; if (/^(yes|on|true|enabled)$/i.test(val)) { val = true; } else if (/^(no|off|false|disabled)$/i.test(val)) { val = false; } else if (val === 'null') { val = null; } else { val = Number(val); } obj[prop] = val; return obj; }, {}); /** * Is stdout a TTY? Colored output is enabled when `true`. */ function useColors() { return 'colors' in exports.inspectOpts ? Boolean(exports.inspectOpts.colors) : tty.isatty(process.stderr.fd); } /** * Adds ANSI color escape codes if enabled. * * @api public */ function formatArgs(args) { const {namespace: name, useColors} = this; if (useColors) { const c = this.color; const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); const prefix = ` ${colorCode};1m${name} \u001B[0m`; args[0] = prefix + args[0].split('\n').join('\n' + prefix); args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); } else { args[0] = getDate() + name + ' ' + args[0]; } } function getDate() { if (exports.inspectOpts.hideDate) { return ''; } return new Date().toISOString() + ' '; } /** * Invokes `util.format()` with the specified arguments and writes to stderr. */ function log(...args) { return process.stderr.write(util.format(...args) + '\n'); } /** * Save `namespaces`. * * @param {String} namespaces * @api private */ function save(namespaces) { if (namespaces) { process.env.DEBUG = namespaces; } else { // If you set a process.env field to null or undefined, it gets cast to the // string 'null' or 'undefined'. Just delete instead. delete process.env.DEBUG; } } /** * Load `namespaces`. * * @return {String} returns the previously persisted debug modes * @api private */ function load() { return process.env.DEBUG; } /** * Init logic for `debug` instances. * * Create a new `inspectOpts` object in case `useColors` is set * differently for a particular `debug` instance. */ function init(debug) { debug.inspectOpts = {}; const keys = Object.keys(exports.inspectOpts); for (let i = 0; i < keys.length; i++) { debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; } } module.exports = __nccwpck_require__(6243)(exports); const {formatters} = module.exports; /** * Map %o to `util.inspect()`, all on a single line. */ formatters.o = function (v) { this.inspectOpts.colors = this.useColors; return util.inspect(v, this.inspectOpts) .split('\n') .map(str => str.trim()) .join(' '); }; /** * Map %O to `util.inspect()`, allowing multiple lines if needed. */ formatters.O = function (v) { this.inspectOpts.colors = this.useColors; return util.inspect(v, this.inspectOpts); }; /***/ }), /***/ 2391: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const {Transform, PassThrough} = __nccwpck_require__(2781); const zlib = __nccwpck_require__(9796); const mimicResponse = __nccwpck_require__(3877); module.exports = response => { const contentEncoding = (response.headers['content-encoding'] || '').toLowerCase(); if (!['gzip', 'deflate', 'br'].includes(contentEncoding)) { return response; } // TODO: Remove this when targeting Node.js 12. const isBrotli = contentEncoding === 'br'; if (isBrotli && typeof zlib.createBrotliDecompress !== 'function') { response.destroy(new Error('Brotli is not supported on Node.js < 12')); return response; } let isEmpty = true; const checker = new Transform({ transform(data, _encoding, callback) { isEmpty = false; callback(null, data); }, flush(callback) { callback(); } }); const finalStream = new PassThrough({ autoDestroy: false, destroy(error, callback) { response.destroy(); callback(error); } }); const decompressStream = isBrotli ? zlib.createBrotliDecompress() : zlib.createUnzip(); decompressStream.once('error', error => { if (isEmpty && !response.readable) { finalStream.end(); return; } finalStream.destroy(error); }); mimicResponse(response, finalStream); response.pipe(checker).pipe(decompressStream).pipe(finalStream); return finalStream; }; /***/ }), /***/ 3877: /***/ ((module) => { "use strict"; // We define these manually to ensure they're always copied // even if they would move up the prototype chain // https://nodejs.org/api/http.html#http_class_http_incomingmessage const knownProperties = [ 'aborted', 'complete', 'headers', 'httpVersion', 'httpVersionMinor', 'httpVersionMajor', 'method', 'rawHeaders', 'rawTrailers', 'setTimeout', 'socket', 'statusCode', 'statusMessage', 'trailers', 'url' ]; module.exports = (fromStream, toStream) => { if (toStream._readableState.autoDestroy) { throw new Error('The second stream must have the `autoDestroy` option set to `false`'); } const fromProperties = new Set(Object.keys(fromStream).concat(knownProperties)); const properties = {}; for (const property of fromProperties) { // Don't overwrite existing properties. if (property in toStream) { continue; } properties[property] = { get() { const value = fromStream[property]; const isFunction = typeof value === 'function'; return isFunction ? value.bind(fromStream) : value; }, set(value) { fromStream[property] = value; }, enumerable: true, configurable: false }; } Object.defineProperties(toStream, properties); fromStream.once('aborted', () => { toStream.destroy(); toStream.emit('aborted'); }); fromStream.once('close', () => { if (fromStream.complete) { if (toStream.readable) { toStream.once('end', () => { toStream.emit('close'); }); } else { toStream.emit('close'); } } else { toStream.emit('close'); } }); return toStream; }; /***/ }), /***/ 6214: /***/ ((module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); function isTLSSocket(socket) { return socket.encrypted; } const deferToConnect = (socket, fn) => { let listeners; if (typeof fn === 'function') { const connect = fn; listeners = { connect }; } else { listeners = fn; } const hasConnectListener = typeof listeners.connect === 'function'; const hasSecureConnectListener = typeof listeners.secureConnect === 'function'; const hasCloseListener = typeof listeners.close === 'function'; const onConnect = () => { if (hasConnectListener) { listeners.connect(); } if (isTLSSocket(socket) && hasSecureConnectListener) { if (socket.authorized) { listeners.secureConnect(); } else if (!socket.authorizationError) { socket.once('secureConnect', listeners.secureConnect); } } if (hasCloseListener) { socket.once('close', listeners.close); } }; if (socket.writable && !socket.connecting) { onConnect(); } else if (socket.connecting) { socket.once('connect', onConnect); } else if (socket.destroyed && hasCloseListener) { listeners.close(socket._hadError); } }; exports["default"] = deferToConnect; // For CommonJS default export support module.exports = deferToConnect; module.exports["default"] = deferToConnect; /***/ }), /***/ 8611: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var Stream = (__nccwpck_require__(2781).Stream); var util = __nccwpck_require__(3837); module.exports = DelayedStream; function DelayedStream() { this.source = null; this.dataSize = 0; this.maxDataSize = 1024 * 1024; this.pauseStream = true; this._maxDataSizeExceeded = false; this._released = false; this._bufferedEvents = []; } util.inherits(DelayedStream, Stream); DelayedStream.create = function(source, options) { var delayedStream = new this(); options = options || {}; for (var option in options) { delayedStream[option] = options[option]; } delayedStream.source = source; var realEmit = source.emit; source.emit = function() { delayedStream._handleEmit(arguments); return realEmit.apply(source, arguments); }; source.on('error', function() {}); if (delayedStream.pauseStream) { source.pause(); } return delayedStream; }; Object.defineProperty(DelayedStream.prototype, 'readable', { configurable: true, enumerable: true, get: function() { return this.source.readable; } }); DelayedStream.prototype.setEncoding = function() { return this.source.setEncoding.apply(this.source, arguments); }; DelayedStream.prototype.resume = function() { if (!this._released) { this.release(); } this.source.resume(); }; DelayedStream.prototype.pause = function() { this.source.pause(); }; DelayedStream.prototype.release = function() { this._released = true; this._bufferedEvents.forEach(function(args) { this.emit.apply(this, args); }.bind(this)); this._bufferedEvents = []; }; DelayedStream.prototype.pipe = function() { var r = Stream.prototype.pipe.apply(this, arguments); this.resume(); return r; }; DelayedStream.prototype._handleEmit = function(args) { if (this._released) { this.emit.apply(this, args); return; } if (args[0] === 'data') { this.dataSize += args[1].length; this._checkIfMaxDataSizeExceeded(); } this._bufferedEvents.push(args); }; DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { if (this._maxDataSizeExceeded) { return; } if (this.dataSize <= this.maxDataSize) { return; } this._maxDataSizeExceeded = true; var message = 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.' this.emit('error', new Error(message)); }; /***/ }), /***/ 8932: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); class Deprecation extends Error { constructor(message) { super(message); // Maintains proper stack trace (only available on V8) /* istanbul ignore next */ if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } this.name = 'Deprecation'; } } exports.Deprecation = Deprecation; /***/ }), /***/ 4460: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var isGlob = __nccwpck_require__(4466); var pathPosixDirname = (__nccwpck_require__(1017).posix.dirname); var isWin32 = (__nccwpck_require__(2037).platform)() === 'win32'; var slash = '/'; var backslash = /\\/g; var enclosure = /[\{\[].*[\}\]]$/; var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/; var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g; /** * @param {string} str * @param {Object} opts * @param {boolean} [opts.flipBackslashes=true] * @returns {string} */ module.exports = function globParent(str, opts) { var options = Object.assign({ flipBackslashes: true }, opts); // flip windows path separators if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) { str = str.replace(backslash, slash); } // special case for strings ending in enclosure containing path separator if (enclosure.test(str)) { str += slash; } // preserves full path in case of trailing path separator str += 'a'; // remove path parts that are globby do { str = pathPosixDirname(str); } while (isGlob(str) || globby.test(str)); // remove escape chars and return result return str.replace(escaped, '$1'); }; /***/ }), /***/ 3664: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const taskManager = __nccwpck_require__(2708); const async_1 = __nccwpck_require__(5679); const stream_1 = __nccwpck_require__(4630); const sync_1 = __nccwpck_require__(2405); const settings_1 = __nccwpck_require__(952); const utils = __nccwpck_require__(8223); async function FastGlob(source, options) { assertPatternsInput(source); const works = getWorks(source, async_1.default, options); const result = await Promise.all(works); return utils.array.flatten(result); } // https://github.com/typescript-eslint/typescript-eslint/issues/60 // eslint-disable-next-line no-redeclare (function (FastGlob) { FastGlob.glob = FastGlob; FastGlob.globSync = sync; FastGlob.globStream = stream; FastGlob.async = FastGlob; function sync(source, options) { assertPatternsInput(source); const works = getWorks(source, sync_1.default, options); return utils.array.flatten(works); } FastGlob.sync = sync; function stream(source, options) { assertPatternsInput(source); const works = getWorks(source, stream_1.default, options); /** * The stream returned by the provider cannot work with an asynchronous iterator. * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams. * This affects performance (+25%). I don't see best solution right now. */ return utils.stream.merge(works); } FastGlob.stream = stream; function generateTasks(source, options) { assertPatternsInput(source); const patterns = [].concat(source); const settings = new settings_1.default(options); return taskManager.generate(patterns, settings); } FastGlob.generateTasks = generateTasks; function isDynamicPattern(source, options) { assertPatternsInput(source); const settings = new settings_1.default(options); return utils.pattern.isDynamicPattern(source, settings); } FastGlob.isDynamicPattern = isDynamicPattern; function escapePath(source) { assertPatternsInput(source); return utils.path.escape(source); } FastGlob.escapePath = escapePath; function convertPathToPattern(source) { assertPatternsInput(source); return utils.path.convertPathToPattern(source); } FastGlob.convertPathToPattern = convertPathToPattern; let posix; (function (posix) { function escapePath(source) { assertPatternsInput(source); return utils.path.escapePosixPath(source); } posix.escapePath = escapePath; function convertPathToPattern(source) { assertPatternsInput(source); return utils.path.convertPosixPathToPattern(source); } posix.convertPathToPattern = convertPathToPattern; })(posix = FastGlob.posix || (FastGlob.posix = {})); let win32; (function (win32) { function escapePath(source) { assertPatternsInput(source); return utils.path.escapeWindowsPath(source); } win32.escapePath = escapePath; function convertPathToPattern(source) { assertPatternsInput(source); return utils.path.convertWindowsPathToPattern(source); } win32.convertPathToPattern = convertPathToPattern; })(win32 = FastGlob.win32 || (FastGlob.win32 = {})); })(FastGlob || (FastGlob = {})); function getWorks(source, _Provider, options) { const patterns = [].concat(source); const settings = new settings_1.default(options); const tasks = taskManager.generate(patterns, settings); const provider = new _Provider(settings); return tasks.map(provider.read, provider); } function assertPatternsInput(input) { const source = [].concat(input); const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item)); if (!isValidSource) { throw new TypeError('Patterns must be a string (non empty) or an array of strings'); } } module.exports = FastGlob; /***/ }), /***/ 2708: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0; const utils = __nccwpck_require__(8223); function generate(input, settings) { const patterns = processPatterns(input, settings); const ignore = processPatterns(settings.ignore, settings); const positivePatterns = getPositivePatterns(patterns); const negativePatterns = getNegativePatternsAsPositive(patterns, ignore); const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings)); const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings)); const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false); const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true); return staticTasks.concat(dynamicTasks); } exports.generate = generate; function processPatterns(input, settings) { let patterns = input; /** * The original pattern like `{,*,**,a/*}` can lead to problems checking the depth when matching entry * and some problems with the micromatch package (see fast-glob issues: #365, #394). * * To solve this problem, we expand all patterns containing brace expansion. This can lead to a slight slowdown * in matching in the case of a large set of patterns after expansion. */ if (settings.braceExpansion) { patterns = utils.pattern.expandPatternsWithBraceExpansion(patterns); } /** * If the `baseNameMatch` option is enabled, we must add globstar to patterns, so that they can be used * at any nesting level. * * We do this here, because otherwise we have to complicate the filtering logic. For example, we need to change * the pattern in the filter before creating a regular expression. There is no need to change the patterns * in the application. Only on the input. */ if (settings.baseNameMatch) { patterns = patterns.map((pattern) => pattern.includes('/') ? pattern : `**/${pattern}`); } /** * This method also removes duplicate slashes that may have been in the pattern or formed as a result of expansion. */ return patterns.map((pattern) => utils.pattern.removeDuplicateSlashes(pattern)); } /** * Returns tasks grouped by basic pattern directories. * * Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately. * This is necessary because directory traversal starts at the base directory and goes deeper. */ function convertPatternsToTasks(positive, negative, dynamic) { const tasks = []; const patternsOutsideCurrentDirectory = utils.pattern.getPatternsOutsideCurrentDirectory(positive); const patternsInsideCurrentDirectory = utils.pattern.getPatternsInsideCurrentDirectory(positive); const outsideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsOutsideCurrentDirectory); const insideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsInsideCurrentDirectory); tasks.push(...convertPatternGroupsToTasks(outsideCurrentDirectoryGroup, negative, dynamic)); /* * For the sake of reducing future accesses to the file system, we merge all tasks within the current directory * into a global task, if at least one pattern refers to the root (`.`). In this case, the global task covers the rest. */ if ('.' in insideCurrentDirectoryGroup) { tasks.push(convertPatternGroupToTask('.', patternsInsideCurrentDirectory, negative, dynamic)); } else { tasks.push(...convertPatternGroupsToTasks(insideCurrentDirectoryGroup, negative, dynamic)); } return tasks; } exports.convertPatternsToTasks = convertPatternsToTasks; function getPositivePatterns(patterns) { return utils.pattern.getPositivePatterns(patterns); } exports.getPositivePatterns = getPositivePatterns; function getNegativePatternsAsPositive(patterns, ignore) { const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore); const positive = negative.map(utils.pattern.convertToPositivePattern); return positive; } exports.getNegativePatternsAsPositive = getNegativePatternsAsPositive; function groupPatternsByBaseDirectory(patterns) { const group = {}; return patterns.reduce((collection, pattern) => { const base = utils.pattern.getBaseDirectory(pattern); if (base in collection) { collection[base].push(pattern); } else { collection[base] = [pattern]; } return collection; }, group); } exports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory; function convertPatternGroupsToTasks(positive, negative, dynamic) { return Object.keys(positive).map((base) => { return convertPatternGroupToTask(base, positive[base], negative, dynamic); }); } exports.convertPatternGroupsToTasks = convertPatternGroupsToTasks; function convertPatternGroupToTask(base, positive, negative, dynamic) { return { dynamic, positive, negative, base, patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern)) }; } exports.convertPatternGroupToTask = convertPatternGroupToTask; /***/ }), /***/ 5679: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const async_1 = __nccwpck_require__(7747); const provider_1 = __nccwpck_require__(257); class ProviderAsync extends provider_1.default { constructor() { super(...arguments); this._reader = new async_1.default(this._settings); } async read(task) { const root = this._getRootDirectory(task); const options = this._getReaderOptions(task); const entries = await this.api(root, task, options); return entries.map((entry) => options.transform(entry)); } api(root, task, options) { if (task.dynamic) { return this._reader.dynamic(root, options); } return this._reader.static(task.patterns, options); } } exports["default"] = ProviderAsync; /***/ }), /***/ 6983: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const utils = __nccwpck_require__(8223); const partial_1 = __nccwpck_require__(5295); class DeepFilter { constructor(_settings, _micromatchOptions) { this._settings = _settings; this._micromatchOptions = _micromatchOptions; } getFilter(basePath, positive, negative) { const matcher = this._getMatcher(positive); const negativeRe = this._getNegativePatternsRe(negative); return (entry) => this._filter(basePath, entry, matcher, negativeRe); } _getMatcher(patterns) { return new partial_1.default(patterns, this._settings, this._micromatchOptions); } _getNegativePatternsRe(patterns) { const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern); return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions); } _filter(basePath, entry, matcher, negativeRe) { if (this._isSkippedByDeep(basePath, entry.path)) { return false; } if (this._isSkippedSymbolicLink(entry)) { return false; } const filepath = utils.path.removeLeadingDotSegment(entry.path); if (this._isSkippedByPositivePatterns(filepath, matcher)) { return false; } return this._isSkippedByNegativePatterns(filepath, negativeRe); } _isSkippedByDeep(basePath, entryPath) { /** * Avoid unnecessary depth calculations when it doesn't matter. */ if (this._settings.deep === Infinity) { return false; } return this._getEntryLevel(basePath, entryPath) >= this._settings.deep; } _getEntryLevel(basePath, entryPath) { const entryPathDepth = entryPath.split('/').length; if (basePath === '') { return entryPathDepth; } const basePathDepth = basePath.split('/').length; return entryPathDepth - basePathDepth; } _isSkippedSymbolicLink(entry) { return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink(); } _isSkippedByPositivePatterns(entryPath, matcher) { return !this._settings.baseNameMatch && !matcher.match(entryPath); } _isSkippedByNegativePatterns(entryPath, patternsRe) { return !utils.pattern.matchAny(entryPath, patternsRe); } } exports["default"] = DeepFilter; /***/ }), /***/ 1343: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const utils = __nccwpck_require__(8223); class EntryFilter { constructor(_settings, _micromatchOptions) { this._settings = _settings; this._micromatchOptions = _micromatchOptions; this.index = new Map(); } getFilter(positive, negative) { const positiveRe = utils.pattern.convertPatternsToRe(positive, this._micromatchOptions); const negativeRe = utils.pattern.convertPatternsToRe(negative, Object.assign(Object.assign({}, this._micromatchOptions), { dot: true })); return (entry) => this._filter(entry, positiveRe, negativeRe); } _filter(entry, positiveRe, negativeRe) { const filepath = utils.path.removeLeadingDotSegment(entry.path); if (this._settings.unique && this._isDuplicateEntry(filepath)) { return false; } if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) { return false; } if (this._isSkippedByAbsoluteNegativePatterns(filepath, negativeRe)) { return false; } const isDirectory = entry.dirent.isDirectory(); const isMatched = this._isMatchToPatterns(filepath, positiveRe, isDirectory) && !this._isMatchToPatterns(filepath, negativeRe, isDirectory); if (this._settings.unique && isMatched) { this._createIndexRecord(filepath); } return isMatched; } _isDuplicateEntry(filepath) { return this.index.has(filepath); } _createIndexRecord(filepath) { this.index.set(filepath, undefined); } _onlyFileFilter(entry) { return this._settings.onlyFiles && !entry.dirent.isFile(); } _onlyDirectoryFilter(entry) { return this._settings.onlyDirectories && !entry.dirent.isDirectory(); } _isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) { if (!this._settings.absolute) { return false; } const fullpath = utils.path.makeAbsolute(this._settings.cwd, entryPath); return utils.pattern.matchAny(fullpath, patternsRe); } _isMatchToPatterns(filepath, patternsRe, isDirectory) { // Trying to match files and directories by patterns. const isMatched = utils.pattern.matchAny(filepath, patternsRe); // A pattern with a trailling slash can be used for directory matching. // To apply such pattern, we need to add a tralling slash to the path. if (!isMatched && isDirectory) { return utils.pattern.matchAny(filepath + '/', patternsRe); } return isMatched; } } exports["default"] = EntryFilter; /***/ }), /***/ 6654: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const utils = __nccwpck_require__(8223); class ErrorFilter { constructor(_settings) { this._settings = _settings; } getFilter() { return (error) => this._isNonFatalError(error); } _isNonFatalError(error) { return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors; } } exports["default"] = ErrorFilter; /***/ }), /***/ 2576: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const utils = __nccwpck_require__(8223); class Matcher { constructor(_patterns, _settings, _micromatchOptions) { this._patterns = _patterns; this._settings = _settings; this._micromatchOptions = _micromatchOptions; this._storage = []; this._fillStorage(); } _fillStorage() { for (const pattern of this._patterns) { const segments = this._getPatternSegments(pattern); const sections = this._splitSegmentsIntoSections(segments); this._storage.push({ complete: sections.length <= 1, pattern, segments, sections }); } } _getPatternSegments(pattern) { const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions); return parts.map((part) => { const dynamic = utils.pattern.isDynamicPattern(part, this._settings); if (!dynamic) { return { dynamic: false, pattern: part }; } return { dynamic: true, pattern: part, patternRe: utils.pattern.makeRe(part, this._micromatchOptions) }; }); } _splitSegmentsIntoSections(segments) { return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern)); } } exports["default"] = Matcher; /***/ }), /***/ 5295: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const matcher_1 = __nccwpck_require__(2576); class PartialMatcher extends matcher_1.default { match(filepath) { const parts = filepath.split('/'); const levels = parts.length; const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels); for (const pattern of patterns) { const section = pattern.sections[0]; /** * In this case, the pattern has a globstar and we must read all directories unconditionally, * but only if the level has reached the end of the first group. * * fixtures/{a,b}/** * ^ true/false ^ always true */ if (!pattern.complete && levels > section.length) { return true; } const match = parts.every((part, index) => { const segment = pattern.segments[index]; if (segment.dynamic && segment.patternRe.test(part)) { return true; } if (!segment.dynamic && segment.pattern === part) { return true; } return false; }); if (match) { return true; } } return false; } } exports["default"] = PartialMatcher; /***/ }), /***/ 257: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const path = __nccwpck_require__(1017); const deep_1 = __nccwpck_require__(6983); const entry_1 = __nccwpck_require__(1343); const error_1 = __nccwpck_require__(6654); const entry_2 = __nccwpck_require__(4029); class Provider { constructor(_settings) { this._settings = _settings; this.errorFilter = new error_1.default(this._settings); this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions()); this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions()); this.entryTransformer = new entry_2.default(this._settings); } _getRootDirectory(task) { return path.resolve(this._settings.cwd, task.base); } _getReaderOptions(task) { const basePath = task.base === '.' ? '' : task.base; return { basePath, pathSegmentSeparator: '/', concurrency: this._settings.concurrency, deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative), entryFilter: this.entryFilter.getFilter(task.positive, task.negative), errorFilter: this.errorFilter.getFilter(), followSymbolicLinks: this._settings.followSymbolicLinks, fs: this._settings.fs, stats: this._settings.stats, throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink, transform: this.entryTransformer.getTransformer() }; } _getMicromatchOptions() { return { dot: this._settings.dot, matchBase: this._settings.baseNameMatch, nobrace: !this._settings.braceExpansion, nocase: !this._settings.caseSensitiveMatch, noext: !this._settings.extglob, noglobstar: !this._settings.globstar, posix: true, strictSlashes: false }; } } exports["default"] = Provider; /***/ }), /***/ 4630: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const stream_1 = __nccwpck_require__(2781); const stream_2 = __nccwpck_require__(2083); const provider_1 = __nccwpck_require__(257); class ProviderStream extends provider_1.default { constructor() { super(...arguments); this._reader = new stream_2.default(this._settings); } read(task) { const root = this._getRootDirectory(task); const options = this._getReaderOptions(task); const source = this.api(root, task, options); const destination = new stream_1.Readable({ objectMode: true, read: () => { } }); source .once('error', (error) => destination.emit('error', error)) .on('data', (entry) => destination.emit('data', options.transform(entry))) .once('end', () => destination.emit('end')); destination .once('close', () => source.destroy()); return destination; } api(root, task, options) { if (task.dynamic) { return this._reader.dynamic(root, options); } return this._reader.static(task.patterns, options); } } exports["default"] = ProviderStream; /***/ }), /***/ 2405: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const sync_1 = __nccwpck_require__(8821); const provider_1 = __nccwpck_require__(257); class ProviderSync extends provider_1.default { constructor() { super(...arguments); this._reader = new sync_1.default(this._settings); } read(task) { const root = this._getRootDirectory(task); const options = this._getReaderOptions(task); const entries = this.api(root, task, options); return entries.map(options.transform); } api(root, task, options) { if (task.dynamic) { return this._reader.dynamic(root, options); } return this._reader.static(task.patterns, options); } } exports["default"] = ProviderSync; /***/ }), /***/ 4029: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const utils = __nccwpck_require__(8223); class EntryTransformer { constructor(_settings) { this._settings = _settings; } getTransformer() { return (entry) => this._transform(entry); } _transform(entry) { let filepath = entry.path; if (this._settings.absolute) { filepath = utils.path.makeAbsolute(this._settings.cwd, filepath); filepath = utils.path.unixify(filepath); } if (this._settings.markDirectories && entry.dirent.isDirectory()) { filepath += '/'; } if (!this._settings.objectMode) { return filepath; } return Object.assign(Object.assign({}, entry), { path: filepath }); } } exports["default"] = EntryTransformer; /***/ }), /***/ 7747: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const fsWalk = __nccwpck_require__(6026); const reader_1 = __nccwpck_require__(5582); const stream_1 = __nccwpck_require__(2083); class ReaderAsync extends reader_1.default { constructor() { super(...arguments); this._walkAsync = fsWalk.walk; this._readerStream = new stream_1.default(this._settings); } dynamic(root, options) { return new Promise((resolve, reject) => { this._walkAsync(root, options, (error, entries) => { if (error === null) { resolve(entries); } else { reject(error); } }); }); } async static(patterns, options) { const entries = []; const stream = this._readerStream.static(patterns, options); // After #235, replace it with an asynchronous iterator. return new Promise((resolve, reject) => { stream.once('error', reject); stream.on('data', (entry) => entries.push(entry)); stream.once('end', () => resolve(entries)); }); } } exports["default"] = ReaderAsync; /***/ }), /***/ 5582: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const path = __nccwpck_require__(1017); const fsStat = __nccwpck_require__(109); const utils = __nccwpck_require__(8223); class Reader { constructor(_settings) { this._settings = _settings; this._fsStatSettings = new fsStat.Settings({ followSymbolicLink: this._settings.followSymbolicLinks, fs: this._settings.fs, throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks }); } _getFullEntryPath(filepath) { return path.resolve(this._settings.cwd, filepath); } _makeEntry(stats, pattern) { const entry = { name: pattern, path: pattern, dirent: utils.fs.createDirentFromStats(pattern, stats) }; if (this._settings.stats) { entry.stats = stats; } return entry; } _isFatalError(error) { return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors; } } exports["default"] = Reader; /***/ }), /***/ 2083: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const stream_1 = __nccwpck_require__(2781); const fsStat = __nccwpck_require__(109); const fsWalk = __nccwpck_require__(6026); const reader_1 = __nccwpck_require__(5582); class ReaderStream extends reader_1.default { constructor() { super(...arguments); this._walkStream = fsWalk.walkStream; this._stat = fsStat.stat; } dynamic(root, options) { return this._walkStream(root, options); } static(patterns, options) { const filepaths = patterns.map(this._getFullEntryPath, this); const stream = new stream_1.PassThrough({ objectMode: true }); stream._write = (index, _enc, done) => { return this._getEntry(filepaths[index], patterns[index], options) .then((entry) => { if (entry !== null && options.entryFilter(entry)) { stream.push(entry); } if (index === filepaths.length - 1) { stream.end(); } done(); }) .catch(done); }; for (let i = 0; i < filepaths.length; i++) { stream.write(i); } return stream; } _getEntry(filepath, pattern, options) { return this._getStat(filepath) .then((stats) => this._makeEntry(stats, pattern)) .catch((error) => { if (options.errorFilter(error)) { return null; } throw error; }); } _getStat(filepath) { return new Promise((resolve, reject) => { this._stat(filepath, this._fsStatSettings, (error, stats) => { return error === null ? resolve(stats) : reject(error); }); }); } } exports["default"] = ReaderStream; /***/ }), /***/ 8821: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); const fsStat = __nccwpck_require__(109); const fsWalk = __nccwpck_require__(6026); const reader_1 = __nccwpck_require__(5582); class ReaderSync extends reader_1.default { constructor() { super(...arguments); this._walkSync = fsWalk.walkSync; this._statSync = fsStat.statSync; } dynamic(root, options) { return this._walkSync(root, options); } static(patterns, options) { const entries = []; for (const pattern of patterns) { const filepath = this._getFullEntryPath(pattern); const entry = this._getEntry(filepath, pattern, options); if (entry === null || !options.entryFilter(entry)) { continue; } entries.push(entry); } return entries; } _getEntry(filepath, pattern, options) { try { const stats = this._getStat(filepath); return this._makeEntry(stats, pattern); } catch (error) { if (options.errorFilter(error)) { return null; } throw error; } } _getStat(filepath) { return this._statSync(filepath, this._fsStatSettings); } } exports["default"] = ReaderSync; /***/ }), /***/ 952: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0; const fs = __nccwpck_require__(7147); const os = __nccwpck_require__(2037); /** * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero. * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107 */ const CPU_COUNT = Math.max(os.cpus().length, 1); exports.DEFAULT_FILE_SYSTEM_ADAPTER = { lstat: fs.lstat, lstatSync: fs.lstatSync, stat: fs.stat, statSync: fs.statSync, readdir: fs.readdir, readdirSync: fs.readdirSync }; class Settings { constructor(_options = {}) { this._options = _options; this.absolute = this._getValue(this._options.absolute, false); this.baseNameMatch = this._getValue(this._options.baseNameMatch, false); this.braceExpansion = this._getValue(this._options.braceExpansion, true); this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true); this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT); this.cwd = this._getValue(this._options.cwd, process.cwd()); this.deep = this._getValue(this._options.deep, Infinity); this.dot = this._getValue(this._options.dot, false); this.extglob = this._getValue(this._options.extglob, true); this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true); this.fs = this._getFileSystemMethods(this._options.fs); this.globstar = this._getValue(this._options.globstar, true); this.ignore = this._getValue(this._options.ignore, []); this.markDirectories = this._getValue(this._options.markDirectories, false); this.objectMode = this._getValue(this._options.objectMode, false); this.onlyDirectories = this._getValue(this._options.onlyDirectories, false); this.onlyFiles = this._getValue(this._options.onlyFiles, true); this.stats = this._getValue(this._options.stats, false); this.suppressErrors = this._getValue(this._options.suppressErrors, false); this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false); this.unique = this._getValue(this._options.unique, true); if (this.onlyDirectories) { this.onlyFiles = false; } if (this.stats) { this.objectMode = true; } // Remove the cast to the array in the next major (#404). this.ignore = [].concat(this.ignore); } _getValue(option, value) { return option === undefined ? value : option; } _getFileSystemMethods(methods = {}) { return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods); } } exports["default"] = Settings; /***/ }), /***/ 5325: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.splitWhen = exports.flatten = void 0; function flatten(items) { return items.reduce((collection, item) => [].concat(collection, item), []); } exports.flatten = flatten; function splitWhen(items, predicate) { const result = [[]]; let groupIndex = 0; for (const item of items) { if (predicate(item)) { groupIndex++; result[groupIndex] = []; } else { result[groupIndex].push(item); } } return result; } exports.splitWhen = splitWhen; /***/ }), /***/ 1230: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isEnoentCodeError = void 0; function isEnoentCodeError(error) { return error.code === 'ENOENT'; } exports.isEnoentCodeError = isEnoentCodeError; /***/ }), /***/ 7543: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createDirentFromStats = void 0; class DirentFromStats { constructor(name, stats) { this.name = name; this.isBlockDevice = stats.isBlockDevice.bind(stats); this.isCharacterDevice = stats.isCharacterDevice.bind(stats); this.isDirectory = stats.isDirectory.bind(stats); this.isFIFO = stats.isFIFO.bind(stats); this.isFile = stats.isFile.bind(stats); this.isSocket = stats.isSocket.bind(stats); this.isSymbolicLink = stats.isSymbolicLink.bind(stats); } } function createDirentFromStats(name, stats) { return new DirentFromStats(name, stats); } exports.createDirentFromStats = createDirentFromStats; /***/ }), /***/ 8223: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0; const array = __nccwpck_require__(5325); exports.array = array; const errno = __nccwpck_require__(1230); exports.errno = errno; const fs = __nccwpck_require__(7543); exports.fs = fs; const path = __nccwpck_require__(3873); exports.path = path; const pattern = __nccwpck_require__(1221); exports.pattern = pattern; const stream = __nccwpck_require__(8382); exports.stream = stream; const string = __nccwpck_require__(2203); exports.string = string; /***/ }), /***/ 3873: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.convertPosixPathToPattern = exports.convertWindowsPathToPattern = exports.convertPathToPattern = exports.escapePosixPath = exports.escapeWindowsPath = exports.escape = exports.removeLeadingDotSegment = exports.makeAbsolute = exports.unixify = void 0; const os = __nccwpck_require__(2037); const path = __nccwpck_require__(1017); const IS_WINDOWS_PLATFORM = os.platform() === 'win32'; const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\ /** * All non-escaped special characters. * Posix: ()*?[]{|}, !+@ before (, ! at the beginning, \\ before non-special characters. * Windows: (){}[], !+@ before (, ! at the beginning. */ const POSIX_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\()|\\(?![!()*+?@[\]{|}]))/g; const WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()[\]{}]|^!|[!+@](?=\())/g; /** * The device path (\\.\ or \\?\). * https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats#dos-device-paths */ const DOS_DEVICE_PATH_RE = /^\\\\([.?])/; /** * All backslashes except those escaping special characters. * Windows: !()+@{} * https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions */ const WINDOWS_BACKSLASHES_RE = /\\(?![!()+@[\]{}])/g; /** * Designed to work only with simple paths: `dir\\file`. */ function unixify(filepath) { return filepath.replace(/\\/g, '/'); } exports.unixify = unixify; function makeAbsolute(cwd, filepath) { return path.resolve(cwd, filepath); } exports.makeAbsolute = makeAbsolute; function removeLeadingDotSegment(entry) { // We do not use `startsWith` because this is 10x slower than current implementation for some cases. // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with if (entry.charAt(0) === '.') { const secondCharactery = entry.charAt(1); if (secondCharactery === '/' || secondCharactery === '\\') { return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT); } } return entry; } exports.removeLeadingDotSegment = removeLeadingDotSegment; exports.escape = IS_WINDOWS_PLATFORM ? escapeWindowsPath : escapePosixPath; function escapeWindowsPath(pattern) { return pattern.replace(WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); } exports.escapeWindowsPath = escapeWindowsPath; function escapePosixPath(pattern) { return pattern.replace(POSIX_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); } exports.escapePosixPath = escapePosixPath; exports.convertPathToPattern = IS_WINDOWS_PLATFORM ? convertWindowsPathToPattern : convertPosixPathToPattern; function convertWindowsPathToPattern(filepath) { return escapeWindowsPath(filepath) .replace(DOS_DEVICE_PATH_RE, '//$1') .replace(WINDOWS_BACKSLASHES_RE, '/'); } exports.convertWindowsPathToPattern = convertWindowsPathToPattern; function convertPosixPathToPattern(filepath) { return escapePosixPath(filepath); } exports.convertPosixPathToPattern = convertPosixPathToPattern; /***/ }), /***/ 1221: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.removeDuplicateSlashes = exports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.isPatternRelatedToParentDirectory = exports.getPatternsOutsideCurrentDirectory = exports.getPatternsInsideCurrentDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0; const path = __nccwpck_require__(1017); const globParent = __nccwpck_require__(4460); const micromatch = __nccwpck_require__(6228); const GLOBSTAR = '**'; const ESCAPE_SYMBOL = '\\'; const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/; const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[[^[]*]/; const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\([^(]*\|[^|]*\)/; const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\([^(]*\)/; const BRACE_EXPANSION_SEPARATORS_RE = /,|\.\./; /** * Matches a sequence of two or more consecutive slashes, excluding the first two slashes at the beginning of the string. * The latter is due to the presence of the device path at the beginning of the UNC path. */ const DOUBLE_SLASH_RE = /(?!^)\/{2,}/g; function isStaticPattern(pattern, options = {}) { return !isDynamicPattern(pattern, options); } exports.isStaticPattern = isStaticPattern; function isDynamicPattern(pattern, options = {}) { /** * A special case with an empty string is necessary for matching patterns that start with a forward slash. * An empty string cannot be a dynamic pattern. * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'. */ if (pattern === '') { return false; } /** * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check * filepath directly (without read directory). */ if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) { return true; } if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) { return true; } if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) { return true; } if (options.braceExpansion !== false && hasBraceExpansion(pattern)) { return true; } return false; } exports.isDynamicPattern = isDynamicPattern; function hasBraceExpansion(pattern) { const openingBraceIndex = pattern.indexOf('{'); if (openingBraceIndex === -1) { return false; } const closingBraceIndex = pattern.indexOf('}', openingBraceIndex + 1); if (closingBraceIndex === -1) { return false; } const braceContent = pattern.slice(openingBraceIndex, closingBraceIndex); return BRACE_EXPANSION_SEPARATORS_RE.test(braceContent); } function convertToPositivePattern(pattern) { return isNegativePattern(pattern) ? pattern.slice(1) : pattern; } exports.convertToPositivePattern = convertToPositivePattern; function convertToNegativePattern(pattern) { return '!' + pattern; } exports.convertToNegativePattern = convertToNegativePattern; function isNegativePattern(pattern) { return pattern.startsWith('!') && pattern[1] !== '('; } exports.isNegativePattern = isNegativePattern; function isPositivePattern(pattern) { return !isNegativePattern(pattern); } exports.isPositivePattern = isPositivePattern; function getNegativePatterns(patterns) { return patterns.filter(isNegativePattern); } exports.getNegativePatterns = getNegativePatterns; function getPositivePatterns(patterns) { return patterns.filter(isPositivePattern); } exports.getPositivePatterns = getPositivePatterns; /** * Returns patterns that can be applied inside the current directory. * * @example * // ['./*', '*', 'a/*'] * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) */ function getPatternsInsideCurrentDirectory(patterns) { return patterns.filter((pattern) => !isPatternRelatedToParentDirectory(pattern)); } exports.getPatternsInsideCurrentDirectory = getPatternsInsideCurrentDirectory; /** * Returns patterns to be expanded relative to (outside) the current directory. * * @example * // ['../*', './../*'] * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) */ function getPatternsOutsideCurrentDirectory(patterns) { return patterns.filter(isPatternRelatedToParentDirectory); } exports.getPatternsOutsideCurrentDirectory = getPatternsOutsideCurrentDirectory; function isPatternRelatedToParentDirectory(pattern) { return pattern.startsWith('..') || pattern.startsWith('./..'); } exports.isPatternRelatedToParentDirectory = isPatternRelatedToParentDirectory; function getBaseDirectory(pattern) { return globParent(pattern, { flipBackslashes: false }); } exports.getBaseDirectory = getBaseDirectory; function hasGlobStar(pattern) { return pattern.includes(GLOBSTAR); } exports.hasGlobStar = hasGlobStar; function endsWithSlashGlobStar(pattern) { return pattern.endsWith('/' + GLOBSTAR); } exports.endsWithSlashGlobStar = endsWithSlashGlobStar; function isAffectDepthOfReadingPattern(pattern) { const basename = path.basename(pattern); return endsWithSlashGlobStar(pattern) || isStaticPattern(basename); } exports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern; function expandPatternsWithBraceExpansion(patterns) { return patterns.reduce((collection, pattern) => { return collection.concat(expandBraceExpansion(pattern)); }, []); } exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion; function expandBraceExpansion(pattern) { const patterns = micromatch.braces(pattern, { expand: true, nodupes: true, keepEscaping: true }); /** * Sort the patterns by length so that the same depth patterns are processed side by side. * `a/{b,}/{c,}/*` – `['a///*', 'a/b//*', 'a//c/*', 'a/b/c/*']` */ patterns.sort((a, b) => a.length - b.length); /** * Micromatch can return an empty string in the case of patterns like `{a,}`. */ return patterns.filter((pattern) => pattern !== ''); } exports.expandBraceExpansion = expandBraceExpansion; function getPatternParts(pattern, options) { let { parts } = micromatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true })); /** * The scan method returns an empty array in some cases. * See micromatch/picomatch#58 for more details. */ if (parts.length === 0) { parts = [pattern]; } /** * The scan method does not return an empty part for the pattern with a forward slash. * This is another part of micromatch/picomatch#58. */ if (parts[0].startsWith('/')) { parts[0] = parts[0].slice(1); parts.unshift(''); } return parts; } exports.getPatternParts = getPatternParts; function makeRe(pattern, options) { return micromatch.makeRe(pattern, options); } exports.makeRe = makeRe; function convertPatternsToRe(patterns, options) { return patterns.map((pattern) => makeRe(pattern, options)); } exports.convertPatternsToRe = convertPatternsToRe; function matchAny(entry, patternsRe) { return patternsRe.some((patternRe) => patternRe.test(entry)); } exports.matchAny = matchAny; /** * This package only works with forward slashes as a path separator. * Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes. */ function removeDuplicateSlashes(pattern) { return pattern.replace(DOUBLE_SLASH_RE, '/'); } exports.removeDuplicateSlashes = removeDuplicateSlashes; /***/ }), /***/ 8382: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.merge = void 0; const merge2 = __nccwpck_require__(2578); function merge(streams) { const mergedStream = merge2(streams); streams.forEach((stream) => { stream.once('error', (error) => mergedStream.emit('error', error)); }); mergedStream.once('close', () => propagateCloseEventToSources(streams)); mergedStream.once('end', () => propagateCloseEventToSources(streams)); return mergedStream; } exports.merge = merge; function propagateCloseEventToSources(streams) { streams.forEach((stream) => stream.emit('close')); } /***/ }), /***/ 2203: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isEmpty = exports.isString = void 0; function isString(input) { return typeof input === 'string'; } exports.isString = isString; function isEmpty(input) { return input === ''; } exports.isEmpty = isEmpty; /***/ }), /***/ 7340: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; /* eslint-disable no-var */ var reusify = __nccwpck_require__(2113) function fastqueue (context, worker, concurrency) { if (typeof context === 'function') { concurrency = worker worker = context context = null } if (concurrency < 1) { throw new Error('fastqueue concurrency must be greater than 1') } var cache = reusify(Task) var queueHead = null var queueTail = null var _running = 0 var errorHandler = null var self = { push: push, drain: noop, saturated: noop, pause: pause, paused: false, concurrency: concurrency, running: running, resume: resume, idle: idle, length: length, getQueue: getQueue, unshift: unshift, empty: noop, kill: kill, killAndDrain: killAndDrain, error: error } return self function running () { return _running } function pause () { self.paused = true } function length () { var current = queueHead var counter = 0 while (current) { current = current.next counter++ } return counter } function getQueue () { var current = queueHead var tasks = [] while (current) { tasks.push(current.value) current = current.next } return tasks } function resume () { if (!self.paused) return self.paused = false for (var i = 0; i < self.concurrency; i++) { _running++ release() } } function idle () { return _running === 0 && self.length() === 0 } function push (value, done) { var current = cache.get() current.context = context current.release = release current.value = value current.callback = done || noop current.errorHandler = errorHandler if (_running === self.concurrency || self.paused) { if (queueTail) { queueTail.next = current queueTail = current } else { queueHead = current queueTail = current self.saturated() } } else { _running++ worker.call(context, current.value, current.worked) } } function unshift (value, done) { var current = cache.get() current.context = context current.release = release current.value = value current.callback = done || noop current.errorHandler = errorHandler if (_running === self.concurrency || self.paused) { if (queueHead) { current.next = queueHead queueHead = current } else { queueHead = current queueTail = current self.saturated() } } else { _running++ worker.call(context, current.value, current.worked) } } function release (holder) { if (holder) { cache.release(holder) } var next = queueHead if (next) { if (!self.paused) { if (queueTail === queueHead) { queueTail = null } queueHead = next.next next.next = null worker.call(context, next.value, next.worked) if (queueTail === null) { self.empty() } } else { _running-- } } else if (--_running === 0) { self.drain() } } function kill () { queueHead = null queueTail = null self.drain = noop } function killAndDrain () { queueHead = null queueTail = null self.drain() self.drain = noop } function error (handler) { errorHandler = handler } } function noop () {} function Task () { this.value = null this.callback = noop this.next = null this.release = noop this.context = null this.errorHandler = null var self = this this.worked = function worked (err, result) { var callback = self.callback var errorHandler = self.errorHandler var val = self.value self.value = null self.callback = noop if (self.errorHandler) { errorHandler(err, val) } callback.call(self.context, err, result) self.release(self) } } function queueAsPromised (context, worker, concurrency) { if (typeof context === 'function') { concurrency = worker worker = context context = null } function asyncWrapper (arg, cb) { worker.call(this, arg) .then(function (res) { cb(null, res) }, cb) } var queue = fastqueue(context, asyncWrapper, concurrency) var pushCb = queue.push var unshiftCb = queue.unshift queue.push = push queue.unshift = unshift queue.drained = drained return queue function push (value) { var p = new Promise(function (resolve, reject) { pushCb(value, function (err, result) { if (err) { reject(err) return } resolve(result) }) }) // Let's fork the promise chain to // make the error bubble up to the user but // not lead to a unhandledRejection p.catch(noop) return p } function unshift (value) { var p = new Promise(function (resolve, reject) { unshiftCb(value, function (err, result) { if (err) { reject(err) return } resolve(result) }) }) // Let's fork the promise chain to // make the error bubble up to the user but // not lead to a unhandledRejection p.catch(noop) return p } function drained () { if (queue.idle()) { return new Promise(function (resolve) { resolve() }) } var previousDrain = queue.drain var p = new Promise(function (resolve) { queue.drain = function () { previousDrain() resolve() } }) return p } } module.exports = fastqueue module.exports.promise = queueAsPromised /***/ }), /***/ 6330: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; /*! * fill-range * * Copyright (c) 2014-present, Jon Schlinkert. * Licensed under the MIT License. */ const util = __nccwpck_require__(3837); const toRegexRange = __nccwpck_require__(1861); const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); const transform = toNumber => { return value => toNumber === true ? Number(value) : String(value); }; const isValidValue = value => { return typeof value === 'number' || (typeof value === 'string' && value !== ''); }; const isNumber = num => Number.isInteger(+num); const zeros = input => { let value = `${input}`; let index = -1; if (value[0] === '-') value = value.slice(1); if (value === '0') return false; while (value[++index] === '0'); return index > 0; }; const stringify = (start, end, options) => { if (typeof start === 'string' || typeof end === 'string') { return true; } return options.stringify === true; }; const pad = (input, maxLength, toNumber) => { if (maxLength > 0) { let dash = input[0] === '-' ? '-' : ''; if (dash) input = input.slice(1); input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0')); } if (toNumber === false) { return String(input); } return input; }; const toMaxLen = (input, maxLength) => { let negative = input[0] === '-' ? '-' : ''; if (negative) { input = input.slice(1); maxLength--; } while (input.length < maxLength) input = '0' + input; return negative ? ('-' + input) : input; }; const toSequence = (parts, options) => { parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); let prefix = options.capture ? '' : '?:'; let positives = ''; let negatives = ''; let result; if (parts.positives.length) { positives = parts.positives.join('|'); } if (parts.negatives.length) { negatives = `-(${prefix}${parts.negatives.join('|')})`; } if (positives && negatives) { result = `${positives}|${negatives}`; } else { result = positives || negatives; } if (options.wrap) { return `(${prefix}${result})`; } return result; }; const toRange = (a, b, isNumbers, options) => { if (isNumbers) { return toRegexRange(a, b, { wrap: false, ...options }); } let start = String.fromCharCode(a); if (a === b) return start; let stop = String.fromCharCode(b); return `[${start}-${stop}]`; }; const toRegex = (start, end, options) => { if (Array.isArray(start)) { let wrap = options.wrap === true; let prefix = options.capture ? '' : '?:'; return wrap ? `(${prefix}${start.join('|')})` : start.join('|'); } return toRegexRange(start, end, options); }; const rangeError = (...args) => { return new RangeError('Invalid range arguments: ' + util.inspect(...args)); }; const invalidRange = (start, end, options) => { if (options.strictRanges === true) throw rangeError([start, end]); return []; }; const invalidStep = (step, options) => { if (options.strictRanges === true) { throw new TypeError(`Expected step "${step}" to be a number`); } return []; }; const fillNumbers = (start, end, step = 1, options = {}) => { let a = Number(start); let b = Number(end); if (!Number.isInteger(a) || !Number.isInteger(b)) { if (options.strictRanges === true) throw rangeError([start, end]); return []; } // fix negative zero if (a === 0) a = 0; if (b === 0) b = 0; let descending = a > b; let startString = String(start); let endString = String(end); let stepString = String(step); step = Math.max(Math.abs(step), 1); let padded = zeros(startString) || zeros(endString) || zeros(stepString); let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0; let toNumber = padded === false && stringify(start, end, options) === false; let format = options.transform || transform(toNumber); if (options.toRegex && step === 1) { return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options); } let parts = { negatives: [], positives: [] }; let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num)); let range = []; let index = 0; while (descending ? a >= b : a <= b) { if (options.toRegex === true && step > 1) { push(a); } else { range.push(pad(format(a, index), maxLen, toNumber)); } a = descending ? a - step : a + step; index++; } if (options.toRegex === true) { return step > 1 ? toSequence(parts, options) : toRegex(range, null, { wrap: false, ...options }); } return range; }; const fillLetters = (start, end, step = 1, options = {}) => { if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) { return invalidRange(start, end, options); } let format = options.transform || (val => String.fromCharCode(val)); let a = `${start}`.charCodeAt(0); let b = `${end}`.charCodeAt(0); let descending = a > b; let min = Math.min(a, b); let max = Math.max(a, b); if (options.toRegex && step === 1) { return toRange(min, max, false, options); } let range = []; let index = 0; while (descending ? a >= b : a <= b) { range.push(format(a, index)); a = descending ? a - step : a + step; index++; } if (options.toRegex === true) { return toRegex(range, null, { wrap: false, options }); } return range; }; const fill = (start, end, step, options = {}) => { if (end == null && isValidValue(start)) { return [start]; } if (!isValidValue(start) || !isValidValue(end)) { return invalidRange(start, end, options); } if (typeof step === 'function') { return fill(start, end, 1, { transform: step }); } if (isObject(step)) { return fill(start, end, 0, step); } let opts = { ...options }; if (opts.capture === true) opts.wrap = true; step = step || opts.step || 1; if (!isNumber(step)) { if (step != null && !isObject(step)) return invalidStep(step, opts); return fill(start, end, 1, step); } if (isNumber(start) && isNumber(end)) { return fillNumbers(start, end, step, opts); } return fillLetters(start, end, Math.max(Math.abs(step), 1), opts); }; module.exports = fill; /***/ }), /***/ 1133: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var debug; module.exports = function () { if (!debug) { try { /* eslint global-require: off */ debug = __nccwpck_require__(8237)("follow-redirects"); } catch (error) { /* */ } if (typeof debug !== "function") { debug = function () { /* */ }; } } debug.apply(null, arguments); }; /***/ }), /***/ 7707: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var url = __nccwpck_require__(7310); var URL = url.URL; var http = __nccwpck_require__(3685); var https = __nccwpck_require__(5687); var Writable = (__nccwpck_require__(2781).Writable); var assert = __nccwpck_require__(9491); var debug = __nccwpck_require__(1133); // Whether to use the native URL object or the legacy url module var useNativeURL = false; try { assert(new URL()); } catch (error) { useNativeURL = error.code === "ERR_INVALID_URL"; } // URL fields to preserve in copy operations var preservedUrlFields = [ "auth", "host", "hostname", "href", "path", "pathname", "port", "protocol", "query", "search", "hash", ]; // Create handlers that pass events from native requests var events = ["abort", "aborted", "connect", "error", "socket", "timeout"]; var eventHandlers = Object.create(null); events.forEach(function (event) { eventHandlers[event] = function (arg1, arg2, arg3) { this._redirectable.emit(event, arg1, arg2, arg3); }; }); // Error types with codes var InvalidUrlError = createErrorType( "ERR_INVALID_URL", "Invalid URL", TypeError ); var RedirectionError = createErrorType( "ERR_FR_REDIRECTION_FAILURE", "Redirected request failed" ); var TooManyRedirectsError = createErrorType( "ERR_FR_TOO_MANY_REDIRECTS", "Maximum number of redirects exceeded", RedirectionError ); var MaxBodyLengthExceededError = createErrorType( "ERR_FR_MAX_BODY_LENGTH_EXCEEDED", "Request body larger than maxBodyLength limit" ); var WriteAfterEndError = createErrorType( "ERR_STREAM_WRITE_AFTER_END", "write after end" ); // istanbul ignore next var destroy = Writable.prototype.destroy || noop; // An HTTP(S) request that can be redirected function RedirectableRequest(options, responseCallback) { // Initialize the request Writable.call(this); this._sanitizeOptions(options); this._options = options; this._ended = false; this._ending = false; this._redirectCount = 0; this._redirects = []; this._requestBodyLength = 0; this._requestBodyBuffers = []; // Attach a callback if passed if (responseCallback) { this.on("response", responseCallback); } // React to responses of native requests var self = this; this._onNativeResponse = function (response) { try { self._processResponse(response); } catch (cause) { self.emit("error", cause instanceof RedirectionError ? cause : new RedirectionError({ cause: cause })); } }; // Perform the first request this._performRequest(); } RedirectableRequest.prototype = Object.create(Writable.prototype); RedirectableRequest.prototype.abort = function () { destroyRequest(this._currentRequest); this._currentRequest.abort(); this.emit("abort"); }; RedirectableRequest.prototype.destroy = function (error) { destroyRequest(this._currentRequest, error); destroy.call(this, error); return this; }; // Writes buffered data to the current native request RedirectableRequest.prototype.write = function (data, encoding, callback) { // Writing is not allowed if end has been called if (this._ending) { throw new WriteAfterEndError(); } // Validate input and shift parameters if necessary if (!isString(data) && !isBuffer(data)) { throw new TypeError("data should be a string, Buffer or Uint8Array"); } if (isFunction(encoding)) { callback = encoding; encoding = null; } // Ignore empty buffers, since writing them doesn't invoke the callback // https://github.com/nodejs/node/issues/22066 if (data.length === 0) { if (callback) { callback(); } return; } // Only write when we don't exceed the maximum body length if (this._requestBodyLength + data.length <= this._options.maxBodyLength) { this._requestBodyLength += data.length; this._requestBodyBuffers.push({ data: data, encoding: encoding }); this._currentRequest.write(data, encoding, callback); } // Error when we exceed the maximum body length else { this.emit("error", new MaxBodyLengthExceededError()); this.abort(); } }; // Ends the current native request RedirectableRequest.prototype.end = function (data, encoding, callback) { // Shift parameters if necessary if (isFunction(data)) { callback = data; data = encoding = null; } else if (isFunction(encoding)) { callback = encoding; encoding = null; } // Write data if needed and end if (!data) { this._ended = this._ending = true; this._currentRequest.end(null, null, callback); } else { var self = this; var currentRequest = this._currentRequest; this.write(data, encoding, function () { self._ended = true; currentRequest.end(null, null, callback); }); this._ending = true; } }; // Sets a header value on the current native request RedirectableRequest.prototype.setHeader = function (name, value) { this._options.headers[name] = value; this._currentRequest.setHeader(name, value); }; // Clears a header value on the current native request RedirectableRequest.prototype.removeHeader = function (name) { delete this._options.headers[name]; this._currentRequest.removeHeader(name); }; // Global timeout for all underlying requests RedirectableRequest.prototype.setTimeout = function (msecs, callback) { var self = this; // Destroys the socket on timeout function destroyOnTimeout(socket) { socket.setTimeout(msecs); socket.removeListener("timeout", socket.destroy); socket.addListener("timeout", socket.destroy); } // Sets up a timer to trigger a timeout event function startTimer(socket) { if (self._timeout) { clearTimeout(self._timeout); } self._timeout = setTimeout(function () { self.emit("timeout"); clearTimer(); }, msecs); destroyOnTimeout(socket); } // Stops a timeout from triggering function clearTimer() { // Clear the timeout if (self._timeout) { clearTimeout(self._timeout); self._timeout = null; } // Clean up all attached listeners self.removeListener("abort", clearTimer); self.removeListener("error", clearTimer); self.removeListener("response", clearTimer); self.removeListener("close", clearTimer); if (callback) { self.removeListener("timeout", callback); } if (!self.socket) { self._currentRequest.removeListener("socket", startTimer); } } // Attach callback if passed if (callback) { this.on("timeout", callback); } // Start the timer if or when the socket is opened if (this.socket) { startTimer(this.socket); } else { this._currentRequest.once("socket", startTimer); } // Clean up on events this.on("socket", destroyOnTimeout); this.on("abort", clearTimer); this.on("error", clearTimer); this.on("response", clearTimer); this.on("close", clearTimer); return this; }; // Proxy all other public ClientRequest methods [ "flushHeaders", "getHeader", "setNoDelay", "setSocketKeepAlive", ].forEach(function (method) { RedirectableRequest.prototype[method] = function (a, b) { return this._currentRequest[method](a, b); }; }); // Proxy all public ClientRequest properties ["aborted", "connection", "socket"].forEach(function (property) { Object.defineProperty(RedirectableRequest.prototype, property, { get: function () { return this._currentRequest[property]; }, }); }); RedirectableRequest.prototype._sanitizeOptions = function (options) { // Ensure headers are always present if (!options.headers) { options.headers = {}; } // Since http.request treats host as an alias of hostname, // but the url module interprets host as hostname plus port, // eliminate the host property to avoid confusion. if (options.host) { // Use hostname if set, because it has precedence if (!options.hostname) { options.hostname = options.host; } delete options.host; } // Complete the URL object when necessary if (!options.pathname && options.path) { var searchPos = options.path.indexOf("?"); if (searchPos < 0) { options.pathname = options.path; } else { options.pathname = options.path.substring(0, searchPos); options.search = options.path.substring(searchPos); } } }; // Executes the next native request (initial or redirect) RedirectableRequest.prototype._performRequest = function () { // Load the native protocol var protocol = this._options.protocol; var nativeProtocol = this._options.nativeProtocols[protocol]; if (!nativeProtocol) { throw new TypeError("Unsupported protocol " + protocol); } // If specified, use the agent corresponding to the protocol // (HTTP and HTTPS use different types of agents) if (this._options.agents) { var scheme = protocol.slice(0, -1); this._options.agent = this._options.agents[scheme]; } // Create the native request and set up its event handlers var request = this._currentRequest = nativeProtocol.request(this._options, this._onNativeResponse); request._redirectable = this; for (var event of events) { request.on(event, eventHandlers[event]); } // RFC7230§5.3.1: When making a request directly to an origin server, […] // a client MUST send only the absolute path […] as the request-target. this._currentUrl = /^\//.test(this._options.path) ? url.format(this._options) : // When making a request to a proxy, […] // a client MUST send the target URI in absolute-form […]. this._options.path; // End a redirected request // (The first request must be ended explicitly with RedirectableRequest#end) if (this._isRedirect) { // Write the request entity and end var i = 0; var self = this; var buffers = this._requestBodyBuffers; (function writeNext(error) { // Only write if this request has not been redirected yet /* istanbul ignore else */ if (request === self._currentRequest) { // Report any write errors /* istanbul ignore if */ if (error) { self.emit("error", error); } // Write the next buffer if there are still left else if (i < buffers.length) { var buffer = buffers[i++]; /* istanbul ignore else */ if (!request.finished) { request.write(buffer.data, buffer.encoding, writeNext); } } // End the request if `end` has been called on us else if (self._ended) { request.end(); } } }()); } }; // Processes a response from the current native request RedirectableRequest.prototype._processResponse = function (response) { // Store the redirected response var statusCode = response.statusCode; if (this._options.trackRedirects) { this._redirects.push({ url: this._currentUrl, headers: response.headers, statusCode: statusCode, }); } // RFC7231§6.4: The 3xx (Redirection) class of status code indicates // that further action needs to be taken by the user agent in order to // fulfill the request. If a Location header field is provided, // the user agent MAY automatically redirect its request to the URI // referenced by the Location field value, // even if the specific status code is not understood. // If the response is not a redirect; return it as-is var location = response.headers.location; if (!location || this._options.followRedirects === false || statusCode < 300 || statusCode >= 400) { response.responseUrl = this._currentUrl; response.redirects = this._redirects; this.emit("response", response); // Clean up this._requestBodyBuffers = []; return; } // The response is a redirect, so abort the current request destroyRequest(this._currentRequest); // Discard the remainder of the response to avoid waiting for data response.destroy(); // RFC7231§6.4: A client SHOULD detect and intervene // in cyclical redirections (i.e., "infinite" redirection loops). if (++this._redirectCount > this._options.maxRedirects) { throw new TooManyRedirectsError(); } // Store the request headers if applicable var requestHeaders; var beforeRedirect = this._options.beforeRedirect; if (beforeRedirect) { requestHeaders = Object.assign({ // The Host header was set by nativeProtocol.request Host: response.req.getHeader("host"), }, this._options.headers); } // RFC7231§6.4: Automatic redirection needs to done with // care for methods not known to be safe, […] // RFC7231§6.4.2–3: For historical reasons, a user agent MAY change // the request method from POST to GET for the subsequent request. var method = this._options.method; if ((statusCode === 301 || statusCode === 302) && this._options.method === "POST" || // RFC7231§6.4.4: The 303 (See Other) status code indicates that // the server is redirecting the user agent to a different resource […] // A user agent can perform a retrieval request targeting that URI // (a GET or HEAD request if using HTTP) […] (statusCode === 303) && !/^(?:GET|HEAD)$/.test(this._options.method)) { this._options.method = "GET"; // Drop a possible entity and headers related to it this._requestBodyBuffers = []; removeMatchingHeaders(/^content-/i, this._options.headers); } // Drop the Host header, as the redirect might lead to a different host var currentHostHeader = removeMatchingHeaders(/^host$/i, this._options.headers); // If the redirect is relative, carry over the host of the last request var currentUrlParts = parseUrl(this._currentUrl); var currentHost = currentHostHeader || currentUrlParts.host; var currentUrl = /^\w+:/.test(location) ? this._currentUrl : url.format(Object.assign(currentUrlParts, { host: currentHost })); // Create the redirected request var redirectUrl = resolveUrl(location, currentUrl); debug("redirecting to", redirectUrl.href); this._isRedirect = true; spreadUrlObject(redirectUrl, this._options); // Drop confidential headers when redirecting to a less secure protocol // or to a different domain that is not a superdomain if (redirectUrl.protocol !== currentUrlParts.protocol && redirectUrl.protocol !== "https:" || redirectUrl.host !== currentHost && !isSubdomain(redirectUrl.host, currentHost)) { removeMatchingHeaders(/^(?:authorization|cookie)$/i, this._options.headers); } // Evaluate the beforeRedirect callback if (isFunction(beforeRedirect)) { var responseDetails = { headers: response.headers, statusCode: statusCode, }; var requestDetails = { url: currentUrl, method: method, headers: requestHeaders, }; beforeRedirect(this._options, responseDetails, requestDetails); this._sanitizeOptions(this._options); } // Perform the redirected request this._performRequest(); }; // Wraps the key/value object of protocols with redirect functionality function wrap(protocols) { // Default settings var exports = { maxRedirects: 21, maxBodyLength: 10 * 1024 * 1024, }; // Wrap each protocol var nativeProtocols = {}; Object.keys(protocols).forEach(function (scheme) { var protocol = scheme + ":"; var nativeProtocol = nativeProtocols[protocol] = protocols[scheme]; var wrappedProtocol = exports[scheme] = Object.create(nativeProtocol); // Executes a request, following redirects function request(input, options, callback) { // Parse parameters, ensuring that input is an object if (isURL(input)) { input = spreadUrlObject(input); } else if (isString(input)) { input = spreadUrlObject(parseUrl(input)); } else { callback = options; options = validateUrl(input); input = { protocol: protocol }; } if (isFunction(options)) { callback = options; options = null; } // Set defaults options = Object.assign({ maxRedirects: exports.maxRedirects, maxBodyLength: exports.maxBodyLength, }, input, options); options.nativeProtocols = nativeProtocols; if (!isString(options.host) && !isString(options.hostname)) { options.hostname = "::1"; } assert.equal(options.protocol, protocol, "protocol mismatch"); debug("options", options); return new RedirectableRequest(options, callback); } // Executes a GET request, following redirects function get(input, options, callback) { var wrappedRequest = wrappedProtocol.request(input, options, callback); wrappedRequest.end(); return wrappedRequest; } // Expose the properties on the wrapped protocol Object.defineProperties(wrappedProtocol, { request: { value: request, configurable: true, enumerable: true, writable: true }, get: { value: get, configurable: true, enumerable: true, writable: true }, }); }); return exports; } function noop() { /* empty */ } function parseUrl(input) { var parsed; /* istanbul ignore else */ if (useNativeURL) { parsed = new URL(input); } else { // Ensure the URL is valid and absolute parsed = validateUrl(url.parse(input)); if (!isString(parsed.protocol)) { throw new InvalidUrlError({ input }); } } return parsed; } function resolveUrl(relative, base) { /* istanbul ignore next */ return useNativeURL ? new URL(relative, base) : parseUrl(url.resolve(base, relative)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { throw new InvalidUrlError({ input: input.href || input }); } if (/^\[/.test(input.host) && !/^\[[:0-9a-f]+\](:\d+)?$/i.test(input.host)) { throw new InvalidUrlError({ input: input.href || input }); } return input; } function spreadUrlObject(urlObject, target) { var spread = target || {}; for (var key of preservedUrlFields) { spread[key] = urlObject[key]; } // Fix IPv6 hostname if (spread.hostname.startsWith("[")) { spread.hostname = spread.hostname.slice(1, -1); } // Ensure port is a number if (spread.port !== "") { spread.port = Number(spread.port); } // Concatenate path spread.path = spread.search ? spread.pathname + spread.search : spread.pathname; return spread; } function removeMatchingHeaders(regex, headers) { var lastValue; for (var header in headers) { if (regex.test(header)) { lastValue = headers[header]; delete headers[header]; } } return (lastValue === null || typeof lastValue === "undefined") ? undefined : String(lastValue).trim(); } function createErrorType(code, message, baseClass) { // Create constructor function CustomError(properties) { Error.captureStackTrace(this, this.constructor); Object.assign(this, properties || {}); this.code = code; this.message = this.cause ? message + ": " + this.cause.message : message; } // Attach constructor and set default properties CustomError.prototype = new (baseClass || Error)(); Object.defineProperties(CustomError.prototype, { constructor: { value: CustomError, enumerable: false, }, name: { value: "Error [" + code + "]", enumerable: false, }, }); return CustomError; } function destroyRequest(request, error) { for (var event of events) { request.removeListener(event, eventHandlers[event]); } request.on("error", noop); request.destroy(error); } function isSubdomain(subdomain, domain) { assert(isString(subdomain) && isString(domain)); var dot = subdomain.length - domain.length - 1; return dot > 0 && subdomain[dot] === "." && subdomain.endsWith(domain); } function isString(value) { return typeof value === "string" || value instanceof String; } function isFunction(value) { return typeof value === "function"; } function isBuffer(value) { return typeof value === "object" && ("length" in value); } function isURL(value) { return URL && value instanceof URL; } // Exports module.exports = wrap({ http: http, https: https }); module.exports.wrap = wrap; /***/ }), /***/ 4334: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var CombinedStream = __nccwpck_require__(5443); var util = __nccwpck_require__(3837); var path = __nccwpck_require__(1017); var http = __nccwpck_require__(3685); var https = __nccwpck_require__(5687); var parseUrl = (__nccwpck_require__(7310).parse); var fs = __nccwpck_require__(7147); var Stream = (__nccwpck_require__(2781).Stream); var mime = __nccwpck_require__(3583); var asynckit = __nccwpck_require__(4812); var populate = __nccwpck_require__(7142); // Public API module.exports = FormData; // make it a Stream util.inherits(FormData, CombinedStream); /** * Create readable "multipart/form-data" streams. * Can be used to submit forms * and file uploads to other web applications. * * @constructor * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream */ function FormData(options) { if (!(this instanceof FormData)) { return new FormData(options); } this._overheadLength = 0; this._valueLength = 0; this._valuesToMeasure = []; CombinedStream.call(this); options = options || {}; for (var option in options) { this[option] = options[option]; } } FormData.LINE_BREAK = '\r\n'; FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; FormData.prototype.append = function(field, value, options) { options = options || {}; // allow filename as single option if (typeof options == 'string') { options = {filename: options}; } var append = CombinedStream.prototype.append.bind(this); // all that streamy business can't handle numbers if (typeof value == 'number') { value = '' + value; } // https://github.com/felixge/node-form-data/issues/38 if (util.isArray(value)) { // Please convert your array into string // the way web server expects it this._error(new Error('Arrays are not supported.')); return; } var header = this._multiPartHeader(field, value, options); var footer = this._multiPartFooter(); append(header); append(value); append(footer); // pass along options.knownLength this._trackLength(header, value, options); }; FormData.prototype._trackLength = function(header, value, options) { var valueLength = 0; // used w/ getLengthSync(), when length is known. // e.g. for streaming directly from a remote server, // w/ a known file a size, and not wanting to wait for // incoming file to finish to get its size. if (options.knownLength != null) { valueLength += +options.knownLength; } else if (Buffer.isBuffer(value)) { valueLength = value.length; } else if (typeof value === 'string') { valueLength = Buffer.byteLength(value); } this._valueLength += valueLength; // @check why add CRLF? does this account for custom/multiple CRLFs? this._overheadLength += Buffer.byteLength(header) + FormData.LINE_BREAK.length; // empty or either doesn't have path or not an http response or not a stream if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) && !(value instanceof Stream))) { return; } // no need to bother with the length if (!options.knownLength) { this._valuesToMeasure.push(value); } }; FormData.prototype._lengthRetriever = function(value, callback) { if (value.hasOwnProperty('fd')) { // take read range into a account // `end` = Infinity –> read file till the end // // TODO: Looks like there is bug in Node fs.createReadStream // it doesn't respect `end` options without `start` options // Fix it when node fixes it. // https://github.com/joyent/node/issues/7819 if (value.end != undefined && value.end != Infinity && value.start != undefined) { // when end specified // no need to calculate range // inclusive, starts with 0 callback(null, value.end + 1 - (value.start ? value.start : 0)); // not that fast snoopy } else { // still need to fetch file size from fs fs.stat(value.path, function(err, stat) { var fileSize; if (err) { callback(err); return; } // update final size based on the range options fileSize = stat.size - (value.start ? value.start : 0); callback(null, fileSize); }); } // or http response } else if (value.hasOwnProperty('httpVersion')) { callback(null, +value.headers['content-length']); // or request stream http://github.com/mikeal/request } else if (value.hasOwnProperty('httpModule')) { // wait till response come back value.on('response', function(response) { value.pause(); callback(null, +response.headers['content-length']); }); value.resume(); // something else } else { callback('Unknown stream'); } }; FormData.prototype._multiPartHeader = function(field, value, options) { // custom header specified (as string)? // it becomes responsible for boundary // (e.g. to handle extra CRLFs on .NET servers) if (typeof options.header == 'string') { return options.header; } var contentDisposition = this._getContentDisposition(value, options); var contentType = this._getContentType(value, options); var contents = ''; var headers = { // add custom disposition as third element or keep it two elements if not 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), // if no content type. allow it to be empty array 'Content-Type': [].concat(contentType || []) }; // allow custom headers. if (typeof options.header == 'object') { populate(headers, options.header); } var header; for (var prop in headers) { if (!headers.hasOwnProperty(prop)) continue; header = headers[prop]; // skip nullish headers. if (header == null) { continue; } // convert all headers to arrays. if (!Array.isArray(header)) { header = [header]; } // add non-empty headers. if (header.length) { contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; } } return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; }; FormData.prototype._getContentDisposition = function(value, options) { var filename , contentDisposition ; if (typeof options.filepath === 'string') { // custom filepath for relative paths filename = path.normalize(options.filepath).replace(/\\/g, '/'); } else if (options.filename || value.name || value.path) { // custom filename take precedence // formidable and the browser add a name property // fs- and request- streams have path property filename = path.basename(options.filename || value.name || value.path); } else if (value.readable && value.hasOwnProperty('httpVersion')) { // or try http response filename = path.basename(value.client._httpMessage.path || ''); } if (filename) { contentDisposition = 'filename="' + filename + '"'; } return contentDisposition; }; FormData.prototype._getContentType = function(value, options) { // use custom content-type above all var contentType = options.contentType; // or try `name` from formidable, browser if (!contentType && value.name) { contentType = mime.lookup(value.name); } // or try `path` from fs-, request- streams if (!contentType && value.path) { contentType = mime.lookup(value.path); } // or if it's http-reponse if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { contentType = value.headers['content-type']; } // or guess it from the filepath or filename if (!contentType && (options.filepath || options.filename)) { contentType = mime.lookup(options.filepath || options.filename); } // fallback to the default content type if `value` is not simple value if (!contentType && typeof value == 'object') { contentType = FormData.DEFAULT_CONTENT_TYPE; } return contentType; }; FormData.prototype._multiPartFooter = function() { return function(next) { var footer = FormData.LINE_BREAK; var lastPart = (this._streams.length === 0); if (lastPart) { footer += this._lastBoundary(); } next(footer); }.bind(this); }; FormData.prototype._lastBoundary = function() { return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; }; FormData.prototype.getHeaders = function(userHeaders) { var header; var formHeaders = { 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() }; for (header in userHeaders) { if (userHeaders.hasOwnProperty(header)) { formHeaders[header.toLowerCase()] = userHeaders[header]; } } return formHeaders; }; FormData.prototype.setBoundary = function(boundary) { this._boundary = boundary; }; FormData.prototype.getBoundary = function() { if (!this._boundary) { this._generateBoundary(); } return this._boundary; }; FormData.prototype.getBuffer = function() { var dataBuffer = new Buffer.alloc( 0 ); var boundary = this.getBoundary(); // Create the form content. Add Line breaks to the end of data. for (var i = 0, len = this._streams.length; i < len; i++) { if (typeof this._streams[i] !== 'function') { // Add content to the buffer. if(Buffer.isBuffer(this._streams[i])) { dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); }else { dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); } // Add break after content. if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); } } } // Add the footer and return the Buffer object. return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); }; FormData.prototype._generateBoundary = function() { // This generates a 50 character boundary similar to those used by Firefox. // They are optimized for boyer-moore parsing. var boundary = '--------------------------'; for (var i = 0; i < 24; i++) { boundary += Math.floor(Math.random() * 10).toString(16); } this._boundary = boundary; }; // Note: getLengthSync DOESN'T calculate streams length // As workaround one can calculate file size manually // and add it as knownLength option FormData.prototype.getLengthSync = function() { var knownLength = this._overheadLength + this._valueLength; // Don't get confused, there are 3 "internal" streams for each keyval pair // so it basically checks if there is any value added to the form if (this._streams.length) { knownLength += this._lastBoundary().length; } // https://github.com/form-data/form-data/issues/40 if (!this.hasKnownLength()) { // Some async length retrievers are present // therefore synchronous length calculation is false. // Please use getLength(callback) to get proper length this._error(new Error('Cannot calculate proper length in synchronous way.')); } return knownLength; }; // Public API to check if length of added values is known // https://github.com/form-data/form-data/issues/196 // https://github.com/form-data/form-data/issues/262 FormData.prototype.hasKnownLength = function() { var hasKnownLength = true; if (this._valuesToMeasure.length) { hasKnownLength = false; } return hasKnownLength; }; FormData.prototype.getLength = function(cb) { var knownLength = this._overheadLength + this._valueLength; if (this._streams.length) { knownLength += this._lastBoundary().length; } if (!this._valuesToMeasure.length) { process.nextTick(cb.bind(this, null, knownLength)); return; } asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { if (err) { cb(err); return; } values.forEach(function(length) { knownLength += length; }); cb(null, knownLength); }); }; FormData.prototype.submit = function(params, cb) { var request , options , defaults = {method: 'post'} ; // parse provided url if it's string // or treat it as options object if (typeof params == 'string') { params = parseUrl(params); options = populate({ port: params.port, path: params.pathname, host: params.hostname, protocol: params.protocol }, defaults); // use custom params } else { options = populate(params, defaults); // if no port provided use default one if (!options.port) { options.port = options.protocol == 'https:' ? 443 : 80; } } // put that good code in getHeaders to some use options.headers = this.getHeaders(params.headers); // https if specified, fallback to http in any other case if (options.protocol == 'https:') { request = https.request(options); } else { request = http.request(options); } // get content length and fire away this.getLength(function(err, length) { if (err && err !== 'Unknown stream') { this._error(err); return; } // add content length if (length) { request.setHeader('Content-Length', length); } this.pipe(request); if (cb) { var onResponse; var callback = function (error, responce) { request.removeListener('error', callback); request.removeListener('response', onResponse); return cb.call(this, error, responce); }; onResponse = callback.bind(this, null); request.on('error', callback); request.on('response', onResponse); } }.bind(this)); return request; }; FormData.prototype._error = function(err) { if (!this.error) { this.error = err; this.pause(); this.emit('error', err); } }; FormData.prototype.toString = function () { return '[object FormData]'; }; /***/ }), /***/ 7142: /***/ ((module) => { // populates missing values module.exports = function(dst, src) { Object.keys(src).forEach(function(prop) { dst[prop] = dst[prop] || src[prop]; }); return dst; }; /***/ }), /***/ 1585: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const {PassThrough: PassThroughStream} = __nccwpck_require__(2781); module.exports = options => { options = {...options}; const {array} = options; let {encoding} = options; const isBuffer = encoding === 'buffer'; let objectMode = false; if (array) { objectMode = !(encoding || isBuffer); } else { encoding = encoding || 'utf8'; } if (isBuffer) { encoding = null; } const stream = new PassThroughStream({objectMode}); if (encoding) { stream.setEncoding(encoding); } let length = 0; const chunks = []; stream.on('data', chunk => { chunks.push(chunk); if (objectMode) { length = chunks.length; } else { length += chunk.length; } }); stream.getBufferedValue = () => { if (array) { return chunks; } return isBuffer ? Buffer.concat(chunks, length) : chunks.join(''); }; stream.getBufferedLength = () => length; return stream; }; /***/ }), /***/ 1766: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const {constants: BufferConstants} = __nccwpck_require__(4300); const stream = __nccwpck_require__(2781); const {promisify} = __nccwpck_require__(3837); const bufferStream = __nccwpck_require__(1585); const streamPipelinePromisified = promisify(stream.pipeline); class MaxBufferError extends Error { constructor() { super('maxBuffer exceeded'); this.name = 'MaxBufferError'; } } async function getStream(inputStream, options) { if (!inputStream) { throw new Error('Expected a stream'); } options = { maxBuffer: Infinity, ...options }; const {maxBuffer} = options; const stream = bufferStream(options); await new Promise((resolve, reject) => { const rejectPromise = error => { // Don't retrieve an oversized buffer. if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) { error.bufferedData = stream.getBufferedValue(); } reject(error); }; (async () => { try { await streamPipelinePromisified(inputStream, stream); resolve(); } catch (error) { rejectPromise(error); } })(); stream.on('data', () => { if (stream.getBufferedLength() > maxBuffer) { rejectPromise(new MaxBufferError()); } }); }); return stream.getBufferedValue(); } module.exports = getStream; module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'}); module.exports.array = (stream, options) => getStream(stream, {...options, array: true}); module.exports.MaxBufferError = MaxBufferError; /***/ }), /***/ 1621: /***/ ((module) => { "use strict"; module.exports = (flag, argv = process.argv) => { const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); const position = argv.indexOf(prefix + flag); const terminatorPosition = argv.indexOf('--'); return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); }; /***/ }), /***/ 1002: /***/ ((module) => { "use strict"; // rfc7231 6.1 const statusCodeCacheableByDefault = new Set([ 200, 203, 204, 206, 300, 301, 308, 404, 405, 410, 414, 501, ]); // This implementation does not understand partial responses (206) const understoodStatuses = new Set([ 200, 203, 204, 300, 301, 302, 303, 307, 308, 404, 405, 410, 414, 501, ]); const errorStatusCodes = new Set([ 500, 502, 503, 504, ]); const hopByHopHeaders = { date: true, // included, because we add Age update Date connection: true, 'keep-alive': true, 'proxy-authenticate': true, 'proxy-authorization': true, te: true, trailer: true, 'transfer-encoding': true, upgrade: true, }; const excludedFromRevalidationUpdate = { // Since the old body is reused, it doesn't make sense to change properties of the body 'content-length': true, 'content-encoding': true, 'transfer-encoding': true, 'content-range': true, }; function toNumberOrZero(s) { const n = parseInt(s, 10); return isFinite(n) ? n : 0; } // RFC 5861 function isErrorResponse(response) { // consider undefined response as faulty if(!response) { return true } return errorStatusCodes.has(response.status); } function parseCacheControl(header) { const cc = {}; if (!header) return cc; // TODO: When there is more than one value present for a given directive (e.g., two Expires header fields, multiple Cache-Control: max-age directives), // the directive's value is considered invalid. Caches are encouraged to consider responses that have invalid freshness information to be stale const parts = header.trim().split(/,/); for (const part of parts) { const [k, v] = part.split(/=/, 2); cc[k.trim()] = v === undefined ? true : v.trim().replace(/^"|"$/g, ''); } return cc; } function formatCacheControl(cc) { let parts = []; for (const k in cc) { const v = cc[k]; parts.push(v === true ? k : k + '=' + v); } if (!parts.length) { return undefined; } return parts.join(', '); } module.exports = class CachePolicy { constructor( req, res, { shared, cacheHeuristic, immutableMinTimeToLive, ignoreCargoCult, _fromObject, } = {} ) { if (_fromObject) { this._fromObject(_fromObject); return; } if (!res || !res.headers) { throw Error('Response headers missing'); } this._assertRequestHasHeaders(req); this._responseTime = this.now(); this._isShared = shared !== false; this._cacheHeuristic = undefined !== cacheHeuristic ? cacheHeuristic : 0.1; // 10% matches IE this._immutableMinTtl = undefined !== immutableMinTimeToLive ? immutableMinTimeToLive : 24 * 3600 * 1000; this._status = 'status' in res ? res.status : 200; this._resHeaders = res.headers; this._rescc = parseCacheControl(res.headers['cache-control']); this._method = 'method' in req ? req.method : 'GET'; this._url = req.url; this._host = req.headers.host; this._noAuthorization = !req.headers.authorization; this._reqHeaders = res.headers.vary ? req.headers : null; // Don't keep all request headers if they won't be used this._reqcc = parseCacheControl(req.headers['cache-control']); // Assume that if someone uses legacy, non-standard uncecessary options they don't understand caching, // so there's no point stricly adhering to the blindly copy&pasted directives. if ( ignoreCargoCult && 'pre-check' in this._rescc && 'post-check' in this._rescc ) { delete this._rescc['pre-check']; delete this._rescc['post-check']; delete this._rescc['no-cache']; delete this._rescc['no-store']; delete this._rescc['must-revalidate']; this._resHeaders = Object.assign({}, this._resHeaders, { 'cache-control': formatCacheControl(this._rescc), }); delete this._resHeaders.expires; delete this._resHeaders.pragma; } // When the Cache-Control header field is not present in a request, caches MUST consider the no-cache request pragma-directive // as having the same effect as if "Cache-Control: no-cache" were present (see Section 5.2.1). if ( res.headers['cache-control'] == null && /no-cache/.test(res.headers.pragma) ) { this._rescc['no-cache'] = true; } } now() { return Date.now(); } storable() { // The "no-store" request directive indicates that a cache MUST NOT store any part of either this request or any response to it. return !!( !this._reqcc['no-store'] && // A cache MUST NOT store a response to any request, unless: // The request method is understood by the cache and defined as being cacheable, and ('GET' === this._method || 'HEAD' === this._method || ('POST' === this._method && this._hasExplicitExpiration())) && // the response status code is understood by the cache, and understoodStatuses.has(this._status) && // the "no-store" cache directive does not appear in request or response header fields, and !this._rescc['no-store'] && // the "private" response directive does not appear in the response, if the cache is shared, and (!this._isShared || !this._rescc.private) && // the Authorization header field does not appear in the request, if the cache is shared, (!this._isShared || this._noAuthorization || this._allowsStoringAuthenticated()) && // the response either: // contains an Expires header field, or (this._resHeaders.expires || // contains a max-age response directive, or // contains a s-maxage response directive and the cache is shared, or // contains a public response directive. this._rescc['max-age'] || (this._isShared && this._rescc['s-maxage']) || this._rescc.public || // has a status code that is defined as cacheable by default statusCodeCacheableByDefault.has(this._status)) ); } _hasExplicitExpiration() { // 4.2.1 Calculating Freshness Lifetime return ( (this._isShared && this._rescc['s-maxage']) || this._rescc['max-age'] || this._resHeaders.expires ); } _assertRequestHasHeaders(req) { if (!req || !req.headers) { throw Error('Request headers missing'); } } satisfiesWithoutRevalidation(req) { this._assertRequestHasHeaders(req); // When presented with a request, a cache MUST NOT reuse a stored response, unless: // the presented request does not contain the no-cache pragma (Section 5.4), nor the no-cache cache directive, // unless the stored response is successfully validated (Section 4.3), and const requestCC = parseCacheControl(req.headers['cache-control']); if (requestCC['no-cache'] || /no-cache/.test(req.headers.pragma)) { return false; } if (requestCC['max-age'] && this.age() > requestCC['max-age']) { return false; } if ( requestCC['min-fresh'] && this.timeToLive() < 1000 * requestCC['min-fresh'] ) { return false; } // the stored response is either: // fresh, or allowed to be served stale if (this.stale()) { const allowsStale = requestCC['max-stale'] && !this._rescc['must-revalidate'] && (true === requestCC['max-stale'] || requestCC['max-stale'] > this.age() - this.maxAge()); if (!allowsStale) { return false; } } return this._requestMatches(req, false); } _requestMatches(req, allowHeadMethod) { // The presented effective request URI and that of the stored response match, and return ( (!this._url || this._url === req.url) && this._host === req.headers.host && // the request method associated with the stored response allows it to be used for the presented request, and (!req.method || this._method === req.method || (allowHeadMethod && 'HEAD' === req.method)) && // selecting header fields nominated by the stored response (if any) match those presented, and this._varyMatches(req) ); } _allowsStoringAuthenticated() { // following Cache-Control response directives (Section 5.2.2) have such an effect: must-revalidate, public, and s-maxage. return ( this._rescc['must-revalidate'] || this._rescc.public || this._rescc['s-maxage'] ); } _varyMatches(req) { if (!this._resHeaders.vary) { return true; } // A Vary header field-value of "*" always fails to match if (this._resHeaders.vary === '*') { return false; } const fields = this._resHeaders.vary .trim() .toLowerCase() .split(/\s*,\s*/); for (const name of fields) { if (req.headers[name] !== this._reqHeaders[name]) return false; } return true; } _copyWithoutHopByHopHeaders(inHeaders) { const headers = {}; for (const name in inHeaders) { if (hopByHopHeaders[name]) continue; headers[name] = inHeaders[name]; } // 9.1. Connection if (inHeaders.connection) { const tokens = inHeaders.connection.trim().split(/\s*,\s*/); for (const name of tokens) { delete headers[name]; } } if (headers.warning) { const warnings = headers.warning.split(/,/).filter(warning => { return !/^\s*1[0-9][0-9]/.test(warning); }); if (!warnings.length) { delete headers.warning; } else { headers.warning = warnings.join(',').trim(); } } return headers; } responseHeaders() { const headers = this._copyWithoutHopByHopHeaders(this._resHeaders); const age = this.age(); // A cache SHOULD generate 113 warning if it heuristically chose a freshness // lifetime greater than 24 hours and the response's age is greater than 24 hours. if ( age > 3600 * 24 && !this._hasExplicitExpiration() && this.maxAge() > 3600 * 24 ) { headers.warning = (headers.warning ? `${headers.warning}, ` : '') + '113 - "rfc7234 5.5.4"'; } headers.age = `${Math.round(age)}`; headers.date = new Date(this.now()).toUTCString(); return headers; } /** * Value of the Date response header or current time if Date was invalid * @return timestamp */ date() { const serverDate = Date.parse(this._resHeaders.date); if (isFinite(serverDate)) { return serverDate; } return this._responseTime; } /** * Value of the Age header, in seconds, updated for the current time. * May be fractional. * * @return Number */ age() { let age = this._ageValue(); const residentTime = (this.now() - this._responseTime) / 1000; return age + residentTime; } _ageValue() { return toNumberOrZero(this._resHeaders.age); } /** * Value of applicable max-age (or heuristic equivalent) in seconds. This counts since response's `Date`. * * For an up-to-date value, see `timeToLive()`. * * @return Number */ maxAge() { if (!this.storable() || this._rescc['no-cache']) { return 0; } // Shared responses with cookies are cacheable according to the RFC, but IMHO it'd be unwise to do so by default // so this implementation requires explicit opt-in via public header if ( this._isShared && (this._resHeaders['set-cookie'] && !this._rescc.public && !this._rescc.immutable) ) { return 0; } if (this._resHeaders.vary === '*') { return 0; } if (this._isShared) { if (this._rescc['proxy-revalidate']) { return 0; } // if a response includes the s-maxage directive, a shared cache recipient MUST ignore the Expires field. if (this._rescc['s-maxage']) { return toNumberOrZero(this._rescc['s-maxage']); } } // If a response includes a Cache-Control field with the max-age directive, a recipient MUST ignore the Expires field. if (this._rescc['max-age']) { return toNumberOrZero(this._rescc['max-age']); } const defaultMinTtl = this._rescc.immutable ? this._immutableMinTtl : 0; const serverDate = this.date(); if (this._resHeaders.expires) { const expires = Date.parse(this._resHeaders.expires); // A cache recipient MUST interpret invalid date formats, especially the value "0", as representing a time in the past (i.e., "already expired"). if (Number.isNaN(expires) || expires < serverDate) { return 0; } return Math.max(defaultMinTtl, (expires - serverDate) / 1000); } if (this._resHeaders['last-modified']) { const lastModified = Date.parse(this._resHeaders['last-modified']); if (isFinite(lastModified) && serverDate > lastModified) { return Math.max( defaultMinTtl, ((serverDate - lastModified) / 1000) * this._cacheHeuristic ); } } return defaultMinTtl; } timeToLive() { const age = this.maxAge() - this.age(); const staleIfErrorAge = age + toNumberOrZero(this._rescc['stale-if-error']); const staleWhileRevalidateAge = age + toNumberOrZero(this._rescc['stale-while-revalidate']); return Math.max(0, age, staleIfErrorAge, staleWhileRevalidateAge) * 1000; } stale() { return this.maxAge() <= this.age(); } _useStaleIfError() { return this.maxAge() + toNumberOrZero(this._rescc['stale-if-error']) > this.age(); } useStaleWhileRevalidate() { return this.maxAge() + toNumberOrZero(this._rescc['stale-while-revalidate']) > this.age(); } static fromObject(obj) { return new this(undefined, undefined, { _fromObject: obj }); } _fromObject(obj) { if (this._responseTime) throw Error('Reinitialized'); if (!obj || obj.v !== 1) throw Error('Invalid serialization'); this._responseTime = obj.t; this._isShared = obj.sh; this._cacheHeuristic = obj.ch; this._immutableMinTtl = obj.imm !== undefined ? obj.imm : 24 * 3600 * 1000; this._status = obj.st; this._resHeaders = obj.resh; this._rescc = obj.rescc; this._method = obj.m; this._url = obj.u; this._host = obj.h; this._noAuthorization = obj.a; this._reqHeaders = obj.reqh; this._reqcc = obj.reqcc; } toObject() { return { v: 1, t: this._responseTime, sh: this._isShared, ch: this._cacheHeuristic, imm: this._immutableMinTtl, st: this._status, resh: this._resHeaders, rescc: this._rescc, m: this._method, u: this._url, h: this._host, a: this._noAuthorization, reqh: this._reqHeaders, reqcc: this._reqcc, }; } /** * Headers for sending to the origin server to revalidate stale response. * Allows server to return 304 to allow reuse of the previous response. * * Hop by hop headers are always stripped. * Revalidation headers may be added or removed, depending on request. */ revalidationHeaders(incomingReq) { this._assertRequestHasHeaders(incomingReq); const headers = this._copyWithoutHopByHopHeaders(incomingReq.headers); // This implementation does not understand range requests delete headers['if-range']; if (!this._requestMatches(incomingReq, true) || !this.storable()) { // revalidation allowed via HEAD // not for the same resource, or wasn't allowed to be cached anyway delete headers['if-none-match']; delete headers['if-modified-since']; return headers; } /* MUST send that entity-tag in any cache validation request (using If-Match or If-None-Match) if an entity-tag has been provided by the origin server. */ if (this._resHeaders.etag) { headers['if-none-match'] = headers['if-none-match'] ? `${headers['if-none-match']}, ${this._resHeaders.etag}` : this._resHeaders.etag; } // Clients MAY issue simple (non-subrange) GET requests with either weak validators or strong validators. Clients MUST NOT use weak validators in other forms of request. const forbidsWeakValidators = headers['accept-ranges'] || headers['if-match'] || headers['if-unmodified-since'] || (this._method && this._method != 'GET'); /* SHOULD send the Last-Modified value in non-subrange cache validation requests (using If-Modified-Since) if only a Last-Modified value has been provided by the origin server. Note: This implementation does not understand partial responses (206) */ if (forbidsWeakValidators) { delete headers['if-modified-since']; if (headers['if-none-match']) { const etags = headers['if-none-match'] .split(/,/) .filter(etag => { return !/^\s*W\//.test(etag); }); if (!etags.length) { delete headers['if-none-match']; } else { headers['if-none-match'] = etags.join(',').trim(); } } } else if ( this._resHeaders['last-modified'] && !headers['if-modified-since'] ) { headers['if-modified-since'] = this._resHeaders['last-modified']; } return headers; } /** * Creates new CachePolicy with information combined from the previews response, * and the new revalidation response. * * Returns {policy, modified} where modified is a boolean indicating * whether the response body has been modified, and old cached body can't be used. * * @return {Object} {policy: CachePolicy, modified: Boolean} */ revalidatedPolicy(request, response) { this._assertRequestHasHeaders(request); if(this._useStaleIfError() && isErrorResponse(response)) { // I consider the revalidation request unsuccessful return { modified: false, matches: false, policy: this, }; } if (!response || !response.headers) { throw Error('Response headers missing'); } // These aren't going to be supported exactly, since one CachePolicy object // doesn't know about all the other cached objects. let matches = false; if (response.status !== undefined && response.status != 304) { matches = false; } else if ( response.headers.etag && !/^\s*W\//.test(response.headers.etag) ) { // "All of the stored responses with the same strong validator are selected. // If none of the stored responses contain the same strong validator, // then the cache MUST NOT use the new response to update any stored responses." matches = this._resHeaders.etag && this._resHeaders.etag.replace(/^\s*W\//, '') === response.headers.etag; } else if (this._resHeaders.etag && response.headers.etag) { // "If the new response contains a weak validator and that validator corresponds // to one of the cache's stored responses, // then the most recent of those matching stored responses is selected for update." matches = this._resHeaders.etag.replace(/^\s*W\//, '') === response.headers.etag.replace(/^\s*W\//, ''); } else if (this._resHeaders['last-modified']) { matches = this._resHeaders['last-modified'] === response.headers['last-modified']; } else { // If the new response does not include any form of validator (such as in the case where // a client generates an If-Modified-Since request from a source other than the Last-Modified // response header field), and there is only one stored response, and that stored response also // lacks a validator, then that stored response is selected for update. if ( !this._resHeaders.etag && !this._resHeaders['last-modified'] && !response.headers.etag && !response.headers['last-modified'] ) { matches = true; } } if (!matches) { return { policy: new this.constructor(request, response), // Client receiving 304 without body, even if it's invalid/mismatched has no option // but to reuse a cached body. We don't have a good way to tell clients to do // error recovery in such case. modified: response.status != 304, matches: false, }; } // use other header fields provided in the 304 (Not Modified) response to replace all instances // of the corresponding header fields in the stored response. const headers = {}; for (const k in this._resHeaders) { headers[k] = k in response.headers && !excludedFromRevalidationUpdate[k] ? response.headers[k] : this._resHeaders[k]; } const newResponse = Object.assign({}, response, { status: this._status, method: this._method, headers, }); return { policy: new this.constructor(request, newResponse, { shared: this._isShared, cacheHeuristic: this._cacheHeuristic, immutableMinTimeToLive: this._immutableMinTtl, }), modified: false, matches: true, }; } }; /***/ }), /***/ 9898: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // See https://github.com/facebook/jest/issues/2549 // eslint-disable-next-line node/prefer-global/url const {URL} = __nccwpck_require__(7310); const EventEmitter = __nccwpck_require__(2361); const tls = __nccwpck_require__(4404); const http2 = __nccwpck_require__(5158); const QuickLRU = __nccwpck_require__(9273); const delayAsyncDestroy = __nccwpck_require__(9237); const kCurrentStreamCount = Symbol('currentStreamCount'); const kRequest = Symbol('request'); const kOriginSet = Symbol('cachedOriginSet'); const kGracefullyClosing = Symbol('gracefullyClosing'); const kLength = Symbol('length'); const nameKeys = [ // Not an Agent option actually 'createConnection', // `http2.connect()` options 'maxDeflateDynamicTableSize', 'maxSettings', 'maxSessionMemory', 'maxHeaderListPairs', 'maxOutstandingPings', 'maxReservedRemoteStreams', 'maxSendHeaderBlockLength', 'paddingStrategy', 'peerMaxConcurrentStreams', 'settings', // `tls.connect()` source options 'family', 'localAddress', 'rejectUnauthorized', // `tls.connect()` secure context options 'pskCallback', 'minDHSize', // `tls.connect()` destination options // - `servername` is automatically validated, skip it // - `host` and `port` just describe the destination server, 'path', 'socket', // `tls.createSecureContext()` options 'ca', 'cert', 'sigalgs', 'ciphers', 'clientCertEngine', 'crl', 'dhparam', 'ecdhCurve', 'honorCipherOrder', 'key', 'privateKeyEngine', 'privateKeyIdentifier', 'maxVersion', 'minVersion', 'pfx', 'secureOptions', 'secureProtocol', 'sessionIdContext', 'ticketKeys' ]; const getSortedIndex = (array, value, compare) => { let low = 0; let high = array.length; while (low < high) { const mid = (low + high) >>> 1; if (compare(array[mid], value)) { low = mid + 1; } else { high = mid; } } return low; }; const compareSessions = (a, b) => a.remoteSettings.maxConcurrentStreams > b.remoteSettings.maxConcurrentStreams; // See https://tools.ietf.org/html/rfc8336 const closeCoveredSessions = (where, session) => { // Clients SHOULD NOT emit new requests on any connection whose Origin // Set is a proper subset of another connection's Origin Set, and they // SHOULD close it once all outstanding requests are satisfied. for (let index = 0; index < where.length; index++) { const coveredSession = where[index]; if ( // Unfortunately `.every()` returns true for an empty array coveredSession[kOriginSet].length > 0 // The set is a proper subset when its length is less than the other set. && coveredSession[kOriginSet].length < session[kOriginSet].length // And the other set includes all elements of the subset. && coveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin)) // Makes sure that the session can handle all requests from the covered session. && (coveredSession[kCurrentStreamCount] + session[kCurrentStreamCount]) <= session.remoteSettings.maxConcurrentStreams ) { // This allows pending requests to finish and prevents making new requests. gracefullyClose(coveredSession); } } }; // This is basically inverted `closeCoveredSessions(...)`. const closeSessionIfCovered = (where, coveredSession) => { for (let index = 0; index < where.length; index++) { const session = where[index]; if ( coveredSession[kOriginSet].length > 0 && coveredSession[kOriginSet].length < session[kOriginSet].length && coveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin)) && (coveredSession[kCurrentStreamCount] + session[kCurrentStreamCount]) <= session.remoteSettings.maxConcurrentStreams ) { gracefullyClose(coveredSession); return true; } } return false; }; const gracefullyClose = session => { session[kGracefullyClosing] = true; if (session[kCurrentStreamCount] === 0) { session.close(); } }; class Agent extends EventEmitter { constructor({timeout = 0, maxSessions = Number.POSITIVE_INFINITY, maxEmptySessions = 10, maxCachedTlsSessions = 100} = {}) { super(); // SESSIONS[NORMALIZED_OPTIONS] = []; this.sessions = {}; // The queue for creating new sessions. It looks like this: // QUEUE[NORMALIZED_OPTIONS][NORMALIZED_ORIGIN] = ENTRY_FUNCTION // // It's faster when there are many origins. If there's only one, then QUEUE[`${options}:${origin}`] is faster. // I guess object creation / deletion is causing the slowdown. // // The entry function has `listeners`, `completed` and `destroyed` properties. // `listeners` is an array of objects containing `resolve` and `reject` functions. // `completed` is a boolean. It's set to true after ENTRY_FUNCTION is executed. // `destroyed` is a boolean. If it's set to true, the session will be destroyed if hasn't connected yet. this.queue = {}; // Each session will use this timeout value. this.timeout = timeout; // Max sessions in total this.maxSessions = maxSessions; // Max empty sessions in total this.maxEmptySessions = maxEmptySessions; this._emptySessionCount = 0; this._sessionCount = 0; // We don't support push streams by default. this.settings = { enablePush: false, initialWindowSize: 1024 * 1024 * 32 // 32MB, see https://github.com/nodejs/node/issues/38426 }; // Reusing TLS sessions increases performance. this.tlsSessionCache = new QuickLRU({maxSize: maxCachedTlsSessions}); } get protocol() { return 'https:'; } normalizeOptions(options) { let normalized = ''; for (let index = 0; index < nameKeys.length; index++) { const key = nameKeys[index]; normalized += ':'; if (options && options[key] !== undefined) { normalized += options[key]; } } return normalized; } _processQueue() { if (this._sessionCount >= this.maxSessions) { this.closeEmptySessions(this.maxSessions - this._sessionCount + 1); return; } // eslint-disable-next-line guard-for-in for (const normalizedOptions in this.queue) { // eslint-disable-next-line guard-for-in for (const normalizedOrigin in this.queue[normalizedOptions]) { const item = this.queue[normalizedOptions][normalizedOrigin]; // The entry function can be run only once. if (!item.completed) { item.completed = true; item(); } } } } _isBetterSession(thisStreamCount, thatStreamCount) { return thisStreamCount > thatStreamCount; } _accept(session, listeners, normalizedOrigin, options) { let index = 0; while (index < listeners.length && session[kCurrentStreamCount] < session.remoteSettings.maxConcurrentStreams) { // We assume `resolve(...)` calls `request(...)` *directly*, // otherwise the session will get overloaded. listeners[index].resolve(session); index++; } listeners.splice(0, index); if (listeners.length > 0) { this.getSession(normalizedOrigin, options, listeners); listeners.length = 0; } } getSession(origin, options, listeners) { return new Promise((resolve, reject) => { if (Array.isArray(listeners) && listeners.length > 0) { listeners = [...listeners]; // Resolve the current promise ASAP, we're just moving the listeners. // They will be executed at a different time. resolve(); } else { listeners = [{resolve, reject}]; } try { // Parse origin if (typeof origin === 'string') { origin = new URL(origin); } else if (!(origin instanceof URL)) { throw new TypeError('The `origin` argument needs to be a string or an URL object'); } if (options) { // Validate servername const {servername} = options; const {hostname} = origin; if (servername && hostname !== servername) { throw new Error(`Origin ${hostname} differs from servername ${servername}`); } } } catch (error) { for (let index = 0; index < listeners.length; index++) { listeners[index].reject(error); } return; } const normalizedOptions = this.normalizeOptions(options); const normalizedOrigin = origin.origin; if (normalizedOptions in this.sessions) { const sessions = this.sessions[normalizedOptions]; let maxConcurrentStreams = -1; let currentStreamsCount = -1; let optimalSession; // We could just do this.sessions[normalizedOptions].find(...) but that isn't optimal. // Additionally, we are looking for session which has biggest current pending streams count. // // |------------| |------------| |------------| |------------| // | Session: A | | Session: B | | Session: C | | Session: D | // | Pending: 5 |-| Pending: 8 |-| Pending: 9 |-| Pending: 4 | // | Max: 10 | | Max: 10 | | Max: 9 | | Max: 5 | // |------------| |------------| |------------| |------------| // ^ // | // pick this one -- // for (let index = 0; index < sessions.length; index++) { const session = sessions[index]; const sessionMaxConcurrentStreams = session.remoteSettings.maxConcurrentStreams; if (sessionMaxConcurrentStreams < maxConcurrentStreams) { break; } if (!session[kOriginSet].includes(normalizedOrigin)) { continue; } const sessionCurrentStreamsCount = session[kCurrentStreamCount]; if ( sessionCurrentStreamsCount >= sessionMaxConcurrentStreams || session[kGracefullyClosing] // Unfortunately the `close` event isn't called immediately, // so `session.destroyed` is `true`, but `session.closed` is `false`. || session.destroyed ) { continue; } // We only need set this once. if (!optimalSession) { maxConcurrentStreams = sessionMaxConcurrentStreams; } // Either get the session which has biggest current stream count or the lowest. if (this._isBetterSession(sessionCurrentStreamsCount, currentStreamsCount)) { optimalSession = session; currentStreamsCount = sessionCurrentStreamsCount; } } if (optimalSession) { this._accept(optimalSession, listeners, normalizedOrigin, options); return; } } if (normalizedOptions in this.queue) { if (normalizedOrigin in this.queue[normalizedOptions]) { // There's already an item in the queue, just attach ourselves to it. this.queue[normalizedOptions][normalizedOrigin].listeners.push(...listeners); return; } } else { this.queue[normalizedOptions] = { [kLength]: 0 }; } // The entry must be removed from the queue IMMEDIATELY when: // 1. the session connects successfully, // 2. an error occurs. const removeFromQueue = () => { // Our entry can be replaced. We cannot remove the new one. if (normalizedOptions in this.queue && this.queue[normalizedOptions][normalizedOrigin] === entry) { delete this.queue[normalizedOptions][normalizedOrigin]; if (--this.queue[normalizedOptions][kLength] === 0) { delete this.queue[normalizedOptions]; } } }; // The main logic is here const entry = async () => { this._sessionCount++; const name = `${normalizedOrigin}:${normalizedOptions}`; let receivedSettings = false; let socket; try { const computedOptions = {...options}; if (computedOptions.settings === undefined) { computedOptions.settings = this.settings; } if (computedOptions.session === undefined) { computedOptions.session = this.tlsSessionCache.get(name); } const createConnection = computedOptions.createConnection || this.createConnection; // A hacky workaround to enable async `createConnection` socket = await createConnection.call(this, origin, computedOptions); computedOptions.createConnection = () => socket; const session = http2.connect(origin, computedOptions); session[kCurrentStreamCount] = 0; session[kGracefullyClosing] = false; // Node.js return https://false:443 instead of https://1.1.1.1:443 const getOriginSet = () => { const {socket} = session; let originSet; if (socket.servername === false) { socket.servername = socket.remoteAddress; originSet = session.originSet; socket.servername = false; } else { originSet = session.originSet; } return originSet; }; const isFree = () => session[kCurrentStreamCount] < session.remoteSettings.maxConcurrentStreams; session.socket.once('session', tlsSession => { this.tlsSessionCache.set(name, tlsSession); }); session.once('error', error => { // Listeners are empty when the session successfully connected. for (let index = 0; index < listeners.length; index++) { listeners[index].reject(error); } // The connection got broken, purge the cache. this.tlsSessionCache.delete(name); }); session.setTimeout(this.timeout, () => { // Terminates all streams owned by this session. session.destroy(); }); session.once('close', () => { this._sessionCount--; if (receivedSettings) { // Assumes session `close` is emitted after request `close` this._emptySessionCount--; // This cannot be moved to the stream logic, // because there may be a session that hadn't made a single request. const where = this.sessions[normalizedOptions]; if (where.length === 1) { delete this.sessions[normalizedOptions]; } else { where.splice(where.indexOf(session), 1); } } else { // Broken connection removeFromQueue(); const error = new Error('Session closed without receiving a SETTINGS frame'); error.code = 'HTTP2WRAPPER_NOSETTINGS'; for (let index = 0; index < listeners.length; index++) { listeners[index].reject(error); } } // There may be another session awaiting. this._processQueue(); }); // Iterates over the queue and processes listeners. const processListeners = () => { const queue = this.queue[normalizedOptions]; if (!queue) { return; } const originSet = session[kOriginSet]; for (let index = 0; index < originSet.length; index++) { const origin = originSet[index]; if (origin in queue) { const {listeners, completed} = queue[origin]; let index = 0; // Prevents session overloading. while (index < listeners.length && isFree()) { // We assume `resolve(...)` calls `request(...)` *directly*, // otherwise the session will get overloaded. listeners[index].resolve(session); index++; } queue[origin].listeners.splice(0, index); if (queue[origin].listeners.length === 0 && !completed) { delete queue[origin]; if (--queue[kLength] === 0) { delete this.queue[normalizedOptions]; break; } } // We're no longer free, no point in continuing. if (!isFree()) { break; } } } }; // The Origin Set cannot shrink. No need to check if it suddenly became covered by another one. session.on('origin', () => { session[kOriginSet] = getOriginSet() || []; session[kGracefullyClosing] = false; closeSessionIfCovered(this.sessions[normalizedOptions], session); if (session[kGracefullyClosing] || !isFree()) { return; } processListeners(); if (!isFree()) { return; } // Close covered sessions (if possible). closeCoveredSessions(this.sessions[normalizedOptions], session); }); session.once('remoteSettings', () => { // The Agent could have been destroyed already. if (entry.destroyed) { const error = new Error('Agent has been destroyed'); for (let index = 0; index < listeners.length; index++) { listeners[index].reject(error); } session.destroy(); return; } // See https://github.com/nodejs/node/issues/38426 if (session.setLocalWindowSize) { session.setLocalWindowSize(1024 * 1024 * 4); // 4 MB } session[kOriginSet] = getOriginSet() || []; if (session.socket.encrypted) { const mainOrigin = session[kOriginSet][0]; if (mainOrigin !== normalizedOrigin) { const error = new Error(`Requested origin ${normalizedOrigin} does not match server ${mainOrigin}`); for (let index = 0; index < listeners.length; index++) { listeners[index].reject(error); } session.destroy(); return; } } removeFromQueue(); { const where = this.sessions; if (normalizedOptions in where) { const sessions = where[normalizedOptions]; sessions.splice(getSortedIndex(sessions, session, compareSessions), 0, session); } else { where[normalizedOptions] = [session]; } } receivedSettings = true; this._emptySessionCount++; this.emit('session', session); this._accept(session, listeners, normalizedOrigin, options); if (session[kCurrentStreamCount] === 0 && this._emptySessionCount > this.maxEmptySessions) { this.closeEmptySessions(this._emptySessionCount - this.maxEmptySessions); } // `session.remoteSettings.maxConcurrentStreams` might get increased session.on('remoteSettings', () => { if (!isFree()) { return; } processListeners(); if (!isFree()) { return; } // In case the Origin Set changes closeCoveredSessions(this.sessions[normalizedOptions], session); }); }); // Shim `session.request()` in order to catch all streams session[kRequest] = session.request; session.request = (headers, streamOptions) => { if (session[kGracefullyClosing]) { throw new Error('The session is gracefully closing. No new streams are allowed.'); } const stream = session[kRequest](headers, streamOptions); // The process won't exit until the session is closed or all requests are gone. session.ref(); if (session[kCurrentStreamCount]++ === 0) { this._emptySessionCount--; } stream.once('close', () => { if (--session[kCurrentStreamCount] === 0) { this._emptySessionCount++; session.unref(); if (this._emptySessionCount > this.maxEmptySessions || session[kGracefullyClosing]) { session.close(); return; } } if (session.destroyed || session.closed) { return; } if (isFree() && !closeSessionIfCovered(this.sessions[normalizedOptions], session)) { closeCoveredSessions(this.sessions[normalizedOptions], session); processListeners(); if (session[kCurrentStreamCount] === 0) { this._processQueue(); } } }); return stream; }; } catch (error) { removeFromQueue(); this._sessionCount--; for (let index = 0; index < listeners.length; index++) { listeners[index].reject(error); } } }; entry.listeners = listeners; entry.completed = false; entry.destroyed = false; this.queue[normalizedOptions][normalizedOrigin] = entry; this.queue[normalizedOptions][kLength]++; this._processQueue(); }); } request(origin, options, headers, streamOptions) { return new Promise((resolve, reject) => { this.getSession(origin, options, [{ reject, resolve: session => { try { const stream = session.request(headers, streamOptions); // Do not throw before `request(...)` has been awaited delayAsyncDestroy(stream); resolve(stream); } catch (error) { reject(error); } } }]); }); } async createConnection(origin, options) { return Agent.connect(origin, options); } static connect(origin, options) { options.ALPNProtocols = ['h2']; const port = origin.port || 443; const host = origin.hostname; if (typeof options.servername === 'undefined') { options.servername = host; } const socket = tls.connect(port, host, options); if (options.socket) { socket._peername = { family: undefined, address: undefined, port }; } return socket; } closeEmptySessions(maxCount = Number.POSITIVE_INFINITY) { let closedCount = 0; const {sessions} = this; // eslint-disable-next-line guard-for-in for (const key in sessions) { const thisSessions = sessions[key]; for (let index = 0; index < thisSessions.length; index++) { const session = thisSessions[index]; if (session[kCurrentStreamCount] === 0) { closedCount++; session.close(); if (closedCount >= maxCount) { return closedCount; } } } } return closedCount; } destroy(reason) { const {sessions, queue} = this; // eslint-disable-next-line guard-for-in for (const key in sessions) { const thisSessions = sessions[key]; for (let index = 0; index < thisSessions.length; index++) { thisSessions[index].destroy(reason); } } // eslint-disable-next-line guard-for-in for (const normalizedOptions in queue) { const entries = queue[normalizedOptions]; // eslint-disable-next-line guard-for-in for (const normalizedOrigin in entries) { entries[normalizedOrigin].destroyed = true; } } // New requests should NOT attach to destroyed sessions this.queue = {}; this.tlsSessionCache.clear(); } get emptySessionCount() { return this._emptySessionCount; } get pendingSessionCount() { return this._sessionCount - this._emptySessionCount; } get sessionCount() { return this._sessionCount; } } Agent.kCurrentStreamCount = kCurrentStreamCount; Agent.kGracefullyClosing = kGracefullyClosing; module.exports = { Agent, globalAgent: new Agent() }; /***/ }), /***/ 7167: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // See https://github.com/facebook/jest/issues/2549 // eslint-disable-next-line node/prefer-global/url const {URL, urlToHttpOptions} = __nccwpck_require__(7310); const http = __nccwpck_require__(3685); const https = __nccwpck_require__(5687); const resolveALPN = __nccwpck_require__(6624); const QuickLRU = __nccwpck_require__(9273); const {Agent, globalAgent} = __nccwpck_require__(9898); const Http2ClientRequest = __nccwpck_require__(9632); const calculateServerName = __nccwpck_require__(1982); const delayAsyncDestroy = __nccwpck_require__(9237); const cache = new QuickLRU({maxSize: 100}); const queue = new Map(); const installSocket = (agent, socket, options) => { socket._httpMessage = {shouldKeepAlive: true}; const onFree = () => { agent.emit('free', socket, options); }; socket.on('free', onFree); const onClose = () => { agent.removeSocket(socket, options); }; socket.on('close', onClose); const onTimeout = () => { const {freeSockets} = agent; for (const sockets of Object.values(freeSockets)) { if (sockets.includes(socket)) { socket.destroy(); return; } } }; socket.on('timeout', onTimeout); const onRemove = () => { agent.removeSocket(socket, options); socket.off('close', onClose); socket.off('free', onFree); socket.off('timeout', onTimeout); socket.off('agentRemove', onRemove); }; socket.on('agentRemove', onRemove); agent.emit('free', socket, options); }; const createResolveProtocol = (cache, queue = new Map(), connect = undefined) => { return async options => { const name = `${options.host}:${options.port}:${options.ALPNProtocols.sort()}`; if (!cache.has(name)) { if (queue.has(name)) { const result = await queue.get(name); return {alpnProtocol: result.alpnProtocol}; } const {path} = options; options.path = options.socketPath; const resultPromise = resolveALPN(options, connect); queue.set(name, resultPromise); try { const result = await resultPromise; cache.set(name, result.alpnProtocol); queue.delete(name); options.path = path; return result; } catch (error) { queue.delete(name); options.path = path; throw error; } } return {alpnProtocol: cache.get(name)}; }; }; const defaultResolveProtocol = createResolveProtocol(cache, queue); module.exports = async (input, options, callback) => { if (typeof input === 'string') { input = urlToHttpOptions(new URL(input)); } else if (input instanceof URL) { input = urlToHttpOptions(input); } else { input = {...input}; } if (typeof options === 'function' || options === undefined) { // (options, callback) callback = options; options = input; } else { // (input, options, callback) options = Object.assign(input, options); } options.ALPNProtocols = options.ALPNProtocols || ['h2', 'http/1.1']; if (!Array.isArray(options.ALPNProtocols) || options.ALPNProtocols.length === 0) { throw new Error('The `ALPNProtocols` option must be an Array with at least one entry'); } options.protocol = options.protocol || 'https:'; const isHttps = options.protocol === 'https:'; options.host = options.hostname || options.host || 'localhost'; options.session = options.tlsSession; options.servername = options.servername || calculateServerName((options.headers && options.headers.host) || options.host); options.port = options.port || (isHttps ? 443 : 80); options._defaultAgent = isHttps ? https.globalAgent : http.globalAgent; const resolveProtocol = options.resolveProtocol || defaultResolveProtocol; // Note: We don't support `h2session` here let {agent} = options; if (agent !== undefined && agent !== false && agent.constructor.name !== 'Object') { throw new Error('The `options.agent` can be only an object `http`, `https` or `http2` properties'); } if (isHttps) { options.resolveSocket = true; let {socket, alpnProtocol, timeout} = await resolveProtocol(options); if (timeout) { if (socket) { socket.destroy(); } const error = new Error(`Timed out resolving ALPN: ${options.timeout} ms`); error.code = 'ETIMEDOUT'; error.ms = options.timeout; throw error; } // We can't accept custom `createConnection` because the API is different for HTTP/2 if (socket && options.createConnection) { socket.destroy(); socket = undefined; } delete options.resolveSocket; const isHttp2 = alpnProtocol === 'h2'; if (agent) { agent = isHttp2 ? agent.http2 : agent.https; options.agent = agent; } if (agent === undefined) { agent = isHttp2 ? globalAgent : https.globalAgent; } if (socket) { if (agent === false) { socket.destroy(); } else { const defaultCreateConnection = (isHttp2 ? Agent : https.Agent).prototype.createConnection; if (agent.createConnection === defaultCreateConnection) { if (isHttp2) { options._reuseSocket = socket; } else { installSocket(agent, socket, options); } } else { socket.destroy(); } } } if (isHttp2) { return delayAsyncDestroy(new Http2ClientRequest(options, callback)); } } else if (agent) { options.agent = agent.http; } // If we're sending HTTP/1.1, handle any explicitly set H2 headers in the options: if (options.headers) { options.headers = {...options.headers}; // :authority is equivalent to the HTTP/1.1 host header if (options.headers[':authority']) { if (!options.headers.host) { options.headers.host = options.headers[':authority']; } delete options.headers[':authority']; } // Remove other HTTP/2 headers as they have their counterparts in the options delete options.headers[':method']; delete options.headers[':scheme']; delete options.headers[':path']; } return delayAsyncDestroy(http.request(options, callback)); }; module.exports.protocolCache = cache; module.exports.resolveProtocol = defaultResolveProtocol; module.exports.createResolveProtocol = createResolveProtocol; /***/ }), /***/ 9632: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // See https://github.com/facebook/jest/issues/2549 // eslint-disable-next-line node/prefer-global/url const {URL, urlToHttpOptions} = __nccwpck_require__(7310); const http2 = __nccwpck_require__(5158); const {Writable} = __nccwpck_require__(2781); const {Agent, globalAgent} = __nccwpck_require__(9898); const IncomingMessage = __nccwpck_require__(2575); const proxyEvents = __nccwpck_require__(1818); const { ERR_INVALID_ARG_TYPE, ERR_INVALID_PROTOCOL, ERR_HTTP_HEADERS_SENT } = __nccwpck_require__(7087); const validateHeaderName = __nccwpck_require__(4592); const validateHeaderValue = __nccwpck_require__(3549); const proxySocketHandler = __nccwpck_require__(9404); const { HTTP2_HEADER_STATUS, HTTP2_HEADER_METHOD, HTTP2_HEADER_PATH, HTTP2_HEADER_AUTHORITY, HTTP2_METHOD_CONNECT } = http2.constants; const kHeaders = Symbol('headers'); const kOrigin = Symbol('origin'); const kSession = Symbol('session'); const kOptions = Symbol('options'); const kFlushedHeaders = Symbol('flushedHeaders'); const kJobs = Symbol('jobs'); const kPendingAgentPromise = Symbol('pendingAgentPromise'); class ClientRequest extends Writable { constructor(input, options, callback) { super({ autoDestroy: false, emitClose: false }); if (typeof input === 'string') { input = urlToHttpOptions(new URL(input)); } else if (input instanceof URL) { input = urlToHttpOptions(input); } else { input = {...input}; } if (typeof options === 'function' || options === undefined) { // (options, callback) callback = options; options = input; } else { // (input, options, callback) options = Object.assign(input, options); } if (options.h2session) { this[kSession] = options.h2session; if (this[kSession].destroyed) { throw new Error('The session has been closed already'); } this.protocol = this[kSession].socket.encrypted ? 'https:' : 'http:'; } else if (options.agent === false) { this.agent = new Agent({maxEmptySessions: 0}); } else if (typeof options.agent === 'undefined' || options.agent === null) { this.agent = globalAgent; } else if (typeof options.agent.request === 'function') { this.agent = options.agent; } else { throw new ERR_INVALID_ARG_TYPE('options.agent', ['http2wrapper.Agent-like Object', 'undefined', 'false'], options.agent); } if (this.agent) { this.protocol = this.agent.protocol; } if (options.protocol && options.protocol !== this.protocol) { throw new ERR_INVALID_PROTOCOL(options.protocol, this.protocol); } if (!options.port) { options.port = options.defaultPort || (this.agent && this.agent.defaultPort) || 443; } options.host = options.hostname || options.host || 'localhost'; // Unused delete options.hostname; const {timeout} = options; options.timeout = undefined; this[kHeaders] = Object.create(null); this[kJobs] = []; this[kPendingAgentPromise] = undefined; this.socket = null; this.connection = null; this.method = options.method || 'GET'; if (!(this.method === 'CONNECT' && (options.path === '/' || options.path === undefined))) { this.path = options.path; } this.res = null; this.aborted = false; this.reusedSocket = false; const {headers} = options; if (headers) { // eslint-disable-next-line guard-for-in for (const header in headers) { this.setHeader(header, headers[header]); } } if (options.auth && !('authorization' in this[kHeaders])) { this[kHeaders].authorization = 'Basic ' + Buffer.from(options.auth).toString('base64'); } options.session = options.tlsSession; options.path = options.socketPath; this[kOptions] = options; // Clients that generate HTTP/2 requests directly SHOULD use the :authority pseudo-header field instead of the Host header field. this[kOrigin] = new URL(`${this.protocol}//${options.servername || options.host}:${options.port}`); // A socket is being reused const reuseSocket = options._reuseSocket; if (reuseSocket) { options.createConnection = (...args) => { if (reuseSocket.destroyed) { return this.agent.createConnection(...args); } return reuseSocket; }; // eslint-disable-next-line promise/prefer-await-to-then this.agent.getSession(this[kOrigin], this[kOptions]).catch(() => {}); } if (timeout) { this.setTimeout(timeout); } if (callback) { this.once('response', callback); } this[kFlushedHeaders] = false; } get method() { return this[kHeaders][HTTP2_HEADER_METHOD]; } set method(value) { if (value) { this[kHeaders][HTTP2_HEADER_METHOD] = value.toUpperCase(); } } get path() { const header = this.method === 'CONNECT' ? HTTP2_HEADER_AUTHORITY : HTTP2_HEADER_PATH; return this[kHeaders][header]; } set path(value) { if (value) { const header = this.method === 'CONNECT' ? HTTP2_HEADER_AUTHORITY : HTTP2_HEADER_PATH; this[kHeaders][header] = value; } } get host() { return this[kOrigin].hostname; } set host(_value) { // Do nothing as this is read only. } get _mustNotHaveABody() { return this.method === 'GET' || this.method === 'HEAD' || this.method === 'DELETE'; } _write(chunk, encoding, callback) { // https://github.com/nodejs/node/blob/654df09ae0c5e17d1b52a900a545f0664d8c7627/lib/internal/http2/util.js#L148-L156 if (this._mustNotHaveABody) { callback(new Error('The GET, HEAD and DELETE methods must NOT have a body')); /* istanbul ignore next: Node.js 12 throws directly */ return; } this.flushHeaders(); const callWrite = () => this._request.write(chunk, encoding, callback); if (this._request) { callWrite(); } else { this[kJobs].push(callWrite); } } _final(callback) { this.flushHeaders(); const callEnd = () => { // For GET, HEAD and DELETE and CONNECT if (this._mustNotHaveABody || this.method === 'CONNECT') { callback(); return; } this._request.end(callback); }; if (this._request) { callEnd(); } else { this[kJobs].push(callEnd); } } abort() { if (this.res && this.res.complete) { return; } if (!this.aborted) { process.nextTick(() => this.emit('abort')); } this.aborted = true; this.destroy(); } async _destroy(error, callback) { if (this.res) { this.res._dump(); } if (this._request) { this._request.destroy(); } else { process.nextTick(() => { this.emit('close'); }); } try { await this[kPendingAgentPromise]; } catch (internalError) { if (this.aborted) { error = internalError; } } callback(error); } async flushHeaders() { if (this[kFlushedHeaders] || this.destroyed) { return; } this[kFlushedHeaders] = true; const isConnectMethod = this.method === HTTP2_METHOD_CONNECT; // The real magic is here const onStream = stream => { this._request = stream; if (this.destroyed) { stream.destroy(); return; } // Forwards `timeout`, `continue`, `close` and `error` events to this instance. if (!isConnectMethod) { // TODO: Should we proxy `close` here? proxyEvents(stream, this, ['timeout', 'continue']); } stream.once('error', error => { this.destroy(error); }); stream.once('aborted', () => { const {res} = this; if (res) { res.aborted = true; res.emit('aborted'); res.destroy(); } else { this.destroy(new Error('The server aborted the HTTP/2 stream')); } }); const onResponse = (headers, flags, rawHeaders) => { // If we were to emit raw request stream, it would be as fast as the native approach. // Note that wrapping the raw stream in a Proxy instance won't improve the performance (already tested it). const response = new IncomingMessage(this.socket, stream.readableHighWaterMark); this.res = response; // Undocumented, but it is used by `cacheable-request` response.url = `${this[kOrigin].origin}${this.path}`; response.req = this; response.statusCode = headers[HTTP2_HEADER_STATUS]; response.headers = headers; response.rawHeaders = rawHeaders; response.once('end', () => { response.complete = true; // Has no effect, just be consistent with the Node.js behavior response.socket = null; response.connection = null; }); if (isConnectMethod) { response.upgrade = true; // The HTTP1 API says the socket is detached here, // but we can't do that so we pass the original HTTP2 request. if (this.emit('connect', response, stream, Buffer.alloc(0))) { this.emit('close'); } else { // No listeners attached, destroy the original request. stream.destroy(); } } else { // Forwards data stream.on('data', chunk => { if (!response._dumped && !response.push(chunk)) { stream.pause(); } }); stream.once('end', () => { if (!this.aborted) { response.push(null); } }); if (!this.emit('response', response)) { // No listeners attached, dump the response. response._dump(); } } }; // This event tells we are ready to listen for the data. stream.once('response', onResponse); // Emits `information` event stream.once('headers', headers => this.emit('information', {statusCode: headers[HTTP2_HEADER_STATUS]})); stream.once('trailers', (trailers, flags, rawTrailers) => { const {res} = this; // https://github.com/nodejs/node/issues/41251 if (res === null) { onResponse(trailers, flags, rawTrailers); return; } // Assigns trailers to the response object. res.trailers = trailers; res.rawTrailers = rawTrailers; }); stream.once('close', () => { const {aborted, res} = this; if (res) { if (aborted) { res.aborted = true; res.emit('aborted'); res.destroy(); } const finish = () => { res.emit('close'); this.destroy(); this.emit('close'); }; if (res.readable) { res.once('end', finish); } else { finish(); } return; } if (!this.destroyed) { this.destroy(new Error('The HTTP/2 stream has been early terminated')); this.emit('close'); return; } this.destroy(); this.emit('close'); }); this.socket = new Proxy(stream, proxySocketHandler); for (const job of this[kJobs]) { job(); } this[kJobs].length = 0; this.emit('socket', this.socket); }; if (!(HTTP2_HEADER_AUTHORITY in this[kHeaders]) && !isConnectMethod) { this[kHeaders][HTTP2_HEADER_AUTHORITY] = this[kOrigin].host; } // Makes a HTTP2 request if (this[kSession]) { try { onStream(this[kSession].request(this[kHeaders])); } catch (error) { this.destroy(error); } } else { this.reusedSocket = true; try { const promise = this.agent.request(this[kOrigin], this[kOptions], this[kHeaders]); this[kPendingAgentPromise] = promise; onStream(await promise); this[kPendingAgentPromise] = false; } catch (error) { this[kPendingAgentPromise] = false; this.destroy(error); } } } get connection() { return this.socket; } set connection(value) { this.socket = value; } getHeaderNames() { return Object.keys(this[kHeaders]); } hasHeader(name) { if (typeof name !== 'string') { throw new ERR_INVALID_ARG_TYPE('name', 'string', name); } return Boolean(this[kHeaders][name.toLowerCase()]); } getHeader(name) { if (typeof name !== 'string') { throw new ERR_INVALID_ARG_TYPE('name', 'string', name); } return this[kHeaders][name.toLowerCase()]; } get headersSent() { return this[kFlushedHeaders]; } removeHeader(name) { if (typeof name !== 'string') { throw new ERR_INVALID_ARG_TYPE('name', 'string', name); } if (this.headersSent) { throw new ERR_HTTP_HEADERS_SENT('remove'); } delete this[kHeaders][name.toLowerCase()]; } setHeader(name, value) { if (this.headersSent) { throw new ERR_HTTP_HEADERS_SENT('set'); } validateHeaderName(name); validateHeaderValue(name, value); const lowercased = name.toLowerCase(); if (lowercased === 'connection') { if (value.toLowerCase() === 'keep-alive') { return; } throw new Error(`Invalid 'connection' header: ${value}`); } if (lowercased === 'host' && this.method === 'CONNECT') { this[kHeaders][HTTP2_HEADER_AUTHORITY] = value; } else { this[kHeaders][lowercased] = value; } } setNoDelay() { // HTTP2 sockets cannot be malformed, do nothing. } setSocketKeepAlive() { // HTTP2 sockets cannot be malformed, do nothing. } setTimeout(ms, callback) { const applyTimeout = () => this._request.setTimeout(ms, callback); if (this._request) { applyTimeout(); } else { this[kJobs].push(applyTimeout); } return this; } get maxHeadersCount() { if (!this.destroyed && this._request) { return this._request.session.localSettings.maxHeaderListSize; } return undefined; } set maxHeadersCount(_value) { // Updating HTTP2 settings would affect all requests, do nothing. } } module.exports = ClientRequest; /***/ }), /***/ 2575: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const {Readable} = __nccwpck_require__(2781); class IncomingMessage extends Readable { constructor(socket, highWaterMark) { super({ emitClose: false, autoDestroy: true, highWaterMark }); this.statusCode = null; this.statusMessage = ''; this.httpVersion = '2.0'; this.httpVersionMajor = 2; this.httpVersionMinor = 0; this.headers = {}; this.trailers = {}; this.req = null; this.aborted = false; this.complete = false; this.upgrade = null; this.rawHeaders = []; this.rawTrailers = []; this.socket = socket; this._dumped = false; } get connection() { return this.socket; } set connection(value) { this.socket = value; } _destroy(error, callback) { if (!this.readableEnded) { this.aborted = true; } // See https://github.com/nodejs/node/issues/35303 callback(); this.req._request.destroy(error); } setTimeout(ms, callback) { this.req.setTimeout(ms, callback); return this; } _dump() { if (!this._dumped) { this._dumped = true; this.removeAllListeners('data'); this.resume(); } } _read() { if (this.req) { this.req._request.resume(); } } } module.exports = IncomingMessage; /***/ }), /***/ 4645: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const http2 = __nccwpck_require__(5158); const { Agent, globalAgent } = __nccwpck_require__(9898); const ClientRequest = __nccwpck_require__(9632); const IncomingMessage = __nccwpck_require__(2575); const auto = __nccwpck_require__(7167); const { HttpOverHttp2, HttpsOverHttp2 } = __nccwpck_require__(8795); const Http2OverHttp2 = __nccwpck_require__(8553); const { Http2OverHttp, Http2OverHttps } = __nccwpck_require__(9794); const validateHeaderName = __nccwpck_require__(4592); const validateHeaderValue = __nccwpck_require__(3549); const request = (url, options, callback) => new ClientRequest(url, options, callback); const get = (url, options, callback) => { // eslint-disable-next-line unicorn/prevent-abbreviations const req = new ClientRequest(url, options, callback); req.end(); return req; }; module.exports = { ...http2, ClientRequest, IncomingMessage, Agent, globalAgent, request, get, auto, proxies: { HttpOverHttp2, HttpsOverHttp2, Http2OverHttp2, Http2OverHttp, Http2OverHttps }, validateHeaderName, validateHeaderValue }; /***/ }), /***/ 7885: /***/ ((module) => { "use strict"; module.exports = self => { const {username, password} = self.proxyOptions.url; if (username || password) { const data = `${username}:${password}`; const authorization = `Basic ${Buffer.from(data).toString('base64')}`; return { 'proxy-authorization': authorization, authorization }; } return {}; }; /***/ }), /***/ 8795: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const tls = __nccwpck_require__(4404); const http = __nccwpck_require__(3685); const https = __nccwpck_require__(5687); const JSStreamSocket = __nccwpck_require__(1564); const {globalAgent} = __nccwpck_require__(9898); const UnexpectedStatusCodeError = __nccwpck_require__(6203); const initialize = __nccwpck_require__(1089); const getAuthorizationHeaders = __nccwpck_require__(7885); const createConnection = (self, options, callback) => { (async () => { try { const {proxyOptions} = self; const {url, headers, raw} = proxyOptions; const stream = await globalAgent.request(url, proxyOptions, { ...getAuthorizationHeaders(self), ...headers, ':method': 'CONNECT', ':authority': `${options.host}:${options.port}` }); stream.once('error', callback); stream.once('response', headers => { const statusCode = headers[':status']; if (statusCode !== 200) { callback(new UnexpectedStatusCodeError(statusCode, '')); return; } const encrypted = self instanceof https.Agent; if (raw && encrypted) { options.socket = stream; const secureStream = tls.connect(options); secureStream.once('close', () => { stream.destroy(); }); callback(null, secureStream); return; } const socket = new JSStreamSocket(stream); socket.encrypted = false; socket._handle.getpeername = out => { out.family = undefined; out.address = undefined; out.port = undefined; }; callback(null, socket); }); } catch (error) { callback(error); } })(); }; class HttpOverHttp2 extends http.Agent { constructor(options) { super(options); initialize(this, options.proxyOptions); } createConnection(options, callback) { createConnection(this, options, callback); } } class HttpsOverHttp2 extends https.Agent { constructor(options) { super(options); initialize(this, options.proxyOptions); } createConnection(options, callback) { createConnection(this, options, callback); } } module.exports = { HttpOverHttp2, HttpsOverHttp2 }; /***/ }), /***/ 9794: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const http = __nccwpck_require__(3685); const https = __nccwpck_require__(5687); const Http2OverHttpX = __nccwpck_require__(1857); const getAuthorizationHeaders = __nccwpck_require__(7885); const getStream = request => new Promise((resolve, reject) => { const onConnect = (response, socket, head) => { socket.unshift(head); request.off('error', reject); resolve([socket, response.statusCode, response.statusMessage]); }; request.once('error', reject); request.once('connect', onConnect); }); class Http2OverHttp extends Http2OverHttpX { async _getProxyStream(authority) { const {proxyOptions} = this; const {url, headers} = this.proxyOptions; const network = url.protocol === 'https:' ? https : http; // `new URL('https://localhost/httpbin.org:443')` results in // a `/httpbin.org:443` path, which has an invalid leading slash. const request = network.request({ ...proxyOptions, hostname: url.hostname, port: url.port, path: authority, headers: { ...getAuthorizationHeaders(this), ...headers, host: authority }, method: 'CONNECT' }).end(); return getStream(request); } } module.exports = { Http2OverHttp, Http2OverHttps: Http2OverHttp }; /***/ }), /***/ 8553: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const {globalAgent} = __nccwpck_require__(9898); const Http2OverHttpX = __nccwpck_require__(1857); const getAuthorizationHeaders = __nccwpck_require__(7885); const getStatusCode = stream => new Promise((resolve, reject) => { stream.once('error', reject); stream.once('response', headers => { stream.off('error', reject); resolve(headers[':status']); }); }); class Http2OverHttp2 extends Http2OverHttpX { async _getProxyStream(authority) { const {proxyOptions} = this; const headers = { ...getAuthorizationHeaders(this), ...proxyOptions.headers, ':method': 'CONNECT', ':authority': authority }; const stream = await globalAgent.request(proxyOptions.url, proxyOptions, headers); const statusCode = await getStatusCode(stream); return [stream, statusCode, '']; } } module.exports = Http2OverHttp2; /***/ }), /***/ 1857: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const {Agent} = __nccwpck_require__(9898); const JSStreamSocket = __nccwpck_require__(1564); const UnexpectedStatusCodeError = __nccwpck_require__(6203); const initialize = __nccwpck_require__(1089); class Http2OverHttpX extends Agent { constructor(options) { super(options); initialize(this, options.proxyOptions); } async createConnection(origin, options) { const authority = `${origin.hostname}:${origin.port || 443}`; const [stream, statusCode, statusMessage] = await this._getProxyStream(authority); if (statusCode !== 200) { throw new UnexpectedStatusCodeError(statusCode, statusMessage); } if (this.proxyOptions.raw) { options.socket = stream; } else { const socket = new JSStreamSocket(stream); socket.encrypted = false; socket._handle.getpeername = out => { out.family = undefined; out.address = undefined; out.port = undefined; }; return socket; } return super.createConnection(origin, options); } } module.exports = Http2OverHttpX; /***/ }), /***/ 1089: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // See https://github.com/facebook/jest/issues/2549 // eslint-disable-next-line node/prefer-global/url const {URL} = __nccwpck_require__(7310); const checkType = __nccwpck_require__(3453); module.exports = (self, proxyOptions) => { checkType('proxyOptions', proxyOptions, ['object']); checkType('proxyOptions.headers', proxyOptions.headers, ['object', 'undefined']); checkType('proxyOptions.raw', proxyOptions.raw, ['boolean', 'undefined']); checkType('proxyOptions.url', proxyOptions.url, [URL, 'string']); const url = new URL(proxyOptions.url); self.proxyOptions = { raw: true, ...proxyOptions, headers: {...proxyOptions.headers}, url }; }; /***/ }), /***/ 6203: /***/ ((module) => { "use strict"; class UnexpectedStatusCodeError extends Error { constructor(statusCode, statusMessage = '') { super(`The proxy server rejected the request with status code ${statusCode} (${statusMessage || 'empty status message'})`); this.statusCode = statusCode; this.statusMessage = statusMessage; } } module.exports = UnexpectedStatusCodeError; /***/ }), /***/ 1982: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const {isIP} = __nccwpck_require__(1808); const assert = __nccwpck_require__(9491); const getHost = host => { if (host[0] === '[') { const idx = host.indexOf(']'); assert(idx !== -1); return host.slice(1, idx); } const idx = host.indexOf(':'); if (idx === -1) { return host; } return host.slice(0, idx); }; module.exports = host => { const servername = getHost(host); if (isIP(servername)) { return ''; } return servername; }; /***/ }), /***/ 3453: /***/ ((module) => { "use strict"; const checkType = (name, value, types) => { const valid = types.some(type => { const typeofType = typeof type; if (typeofType === 'string') { return typeof value === type; } return value instanceof type; }); if (!valid) { const names = types.map(type => typeof type === 'string' ? type : type.name); throw new TypeError(`Expected '${name}' to be a type of ${names.join(' or ')}, got ${typeof value}`); } }; module.exports = checkType; /***/ }), /***/ 9237: /***/ ((module) => { "use strict"; module.exports = stream => { if (stream.listenerCount('error') !== 0) { return stream; } stream.__destroy = stream._destroy; stream._destroy = (...args) => { const callback = args.pop(); stream.__destroy(...args, async error => { await Promise.resolve(); callback(error); }); }; const onError = error => { // eslint-disable-next-line promise/prefer-await-to-then Promise.resolve().then(() => { stream.emit('error', error); }); }; stream.once('error', onError); // eslint-disable-next-line promise/prefer-await-to-then Promise.resolve().then(() => { stream.off('error', onError); }); return stream; }; /***/ }), /***/ 7087: /***/ ((module) => { "use strict"; /* istanbul ignore file: https://github.com/nodejs/node/blob/master/lib/internal/errors.js */ const makeError = (Base, key, getMessage) => { module.exports[key] = class NodeError extends Base { constructor(...args) { super(typeof getMessage === 'string' ? getMessage : getMessage(args)); this.name = `${super.name} [${key}]`; this.code = key; } }; }; makeError(TypeError, 'ERR_INVALID_ARG_TYPE', args => { const type = args[0].includes('.') ? 'property' : 'argument'; let valid = args[1]; const isManyTypes = Array.isArray(valid); if (isManyTypes) { valid = `${valid.slice(0, -1).join(', ')} or ${valid.slice(-1)}`; } return `The "${args[0]}" ${type} must be ${isManyTypes ? 'one of' : 'of'} type ${valid}. Received ${typeof args[2]}`; }); makeError(TypeError, 'ERR_INVALID_PROTOCOL', args => `Protocol "${args[0]}" not supported. Expected "${args[1]}"` ); makeError(Error, 'ERR_HTTP_HEADERS_SENT', args => `Cannot ${args[0]} headers after they are sent to the client` ); makeError(TypeError, 'ERR_INVALID_HTTP_TOKEN', args => `${args[0]} must be a valid HTTP token [${args[1]}]` ); makeError(TypeError, 'ERR_HTTP_INVALID_HEADER_VALUE', args => `Invalid value "${args[0]} for header "${args[1]}"` ); makeError(TypeError, 'ERR_INVALID_CHAR', args => `Invalid character in ${args[0]} [${args[1]}]` ); makeError( Error, 'ERR_HTTP2_NO_SOCKET_MANIPULATION', 'HTTP/2 sockets should not be directly manipulated (e.g. read and written)' ); /***/ }), /***/ 1199: /***/ ((module) => { "use strict"; module.exports = header => { switch (header) { case ':method': case ':scheme': case ':authority': case ':path': return true; default: return false; } }; /***/ }), /***/ 1564: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const stream = __nccwpck_require__(2781); const tls = __nccwpck_require__(4404); // Really awesome hack. const JSStreamSocket = (new tls.TLSSocket(new stream.PassThrough()))._handle._parentWrap.constructor; module.exports = JSStreamSocket; /***/ }), /***/ 1818: /***/ ((module) => { "use strict"; module.exports = (from, to, events) => { for (const event of events) { from.on(event, (...args) => to.emit(event, ...args)); } }; /***/ }), /***/ 9404: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const {ERR_HTTP2_NO_SOCKET_MANIPULATION} = __nccwpck_require__(7087); /* istanbul ignore file */ /* https://github.com/nodejs/node/blob/6eec858f34a40ffa489c1ec54bb24da72a28c781/lib/internal/http2/compat.js#L195-L272 */ const proxySocketHandler = { has(stream, property) { // Replaced [kSocket] with .socket const reference = stream.session === undefined ? stream : stream.session.socket; return (property in stream) || (property in reference); }, get(stream, property) { switch (property) { case 'on': case 'once': case 'end': case 'emit': case 'destroy': return stream[property].bind(stream); case 'writable': case 'destroyed': return stream[property]; case 'readable': if (stream.destroyed) { return false; } return stream.readable; case 'setTimeout': { const {session} = stream; if (session !== undefined) { return session.setTimeout.bind(session); } return stream.setTimeout.bind(stream); } case 'write': case 'read': case 'pause': case 'resume': throw new ERR_HTTP2_NO_SOCKET_MANIPULATION(); default: { // Replaced [kSocket] with .socket const reference = stream.session === undefined ? stream : stream.session.socket; const value = reference[property]; return typeof value === 'function' ? value.bind(reference) : value; } } }, getPrototypeOf(stream) { if (stream.session !== undefined) { // Replaced [kSocket] with .socket return Reflect.getPrototypeOf(stream.session.socket); } return Reflect.getPrototypeOf(stream); }, set(stream, property, value) { switch (property) { case 'writable': case 'readable': case 'destroyed': case 'on': case 'once': case 'end': case 'emit': case 'destroy': stream[property] = value; return true; case 'setTimeout': { const {session} = stream; if (session === undefined) { stream.setTimeout = value; } else { session.setTimeout = value; } return true; } case 'write': case 'read': case 'pause': case 'resume': throw new ERR_HTTP2_NO_SOCKET_MANIPULATION(); default: { // Replaced [kSocket] with .socket const reference = stream.session === undefined ? stream : stream.session.socket; reference[property] = value; return true; } } } }; module.exports = proxySocketHandler; /***/ }), /***/ 4592: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const {ERR_INVALID_HTTP_TOKEN} = __nccwpck_require__(7087); const isRequestPseudoHeader = __nccwpck_require__(1199); const isValidHttpToken = /^[\^`\-\w!#$%&*+.|~]+$/; module.exports = name => { if (typeof name !== 'string' || (!isValidHttpToken.test(name) && !isRequestPseudoHeader(name))) { throw new ERR_INVALID_HTTP_TOKEN('Header name', name); } }; /***/ }), /***/ 3549: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { ERR_HTTP_INVALID_HEADER_VALUE, ERR_INVALID_CHAR } = __nccwpck_require__(7087); const isInvalidHeaderValue = /[^\t\u0020-\u007E\u0080-\u00FF]/; module.exports = (name, value) => { if (typeof value === 'undefined') { throw new ERR_HTTP_INVALID_HEADER_VALUE(value, name); } if (isInvalidHeaderValue.test(value)) { throw new ERR_INVALID_CHAR('header content', name); } }; /***/ }), /***/ 6435: /***/ ((module) => { /*! * is-extglob * * Copyright (c) 2014-2016, Jon Schlinkert. * Licensed under the MIT License. */ module.exports = function isExtglob(str) { if (typeof str !== 'string' || str === '') { return false; } var match; while ((match = /(\\).|([@?!+*]\(.*\))/g.exec(str))) { if (match[2]) return true; str = str.slice(match.index + match[0].length); } return false; }; /***/ }), /***/ 4466: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /*! * is-glob * * Copyright (c) 2014-2017, Jon Schlinkert. * Released under the MIT License. */ var isExtglob = __nccwpck_require__(6435); var chars = { '{': '}', '(': ')', '[': ']'}; var strictCheck = function(str) { if (str[0] === '!') { return true; } var index = 0; var pipeIndex = -2; var closeSquareIndex = -2; var closeCurlyIndex = -2; var closeParenIndex = -2; var backSlashIndex = -2; while (index < str.length) { if (str[index] === '*') { return true; } if (str[index + 1] === '?' && /[\].+)]/.test(str[index])) { return true; } if (closeSquareIndex !== -1 && str[index] === '[' && str[index + 1] !== ']') { if (closeSquareIndex < index) { closeSquareIndex = str.indexOf(']', index); } if (closeSquareIndex > index) { if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { return true; } backSlashIndex = str.indexOf('\\', index); if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { return true; } } } if (closeCurlyIndex !== -1 && str[index] === '{' && str[index + 1] !== '}') { closeCurlyIndex = str.indexOf('}', index); if (closeCurlyIndex > index) { backSlashIndex = str.indexOf('\\', index); if (backSlashIndex === -1 || backSlashIndex > closeCurlyIndex) { return true; } } } if (closeParenIndex !== -1 && str[index] === '(' && str[index + 1] === '?' && /[:!=]/.test(str[index + 2]) && str[index + 3] !== ')') { closeParenIndex = str.indexOf(')', index); if (closeParenIndex > index) { backSlashIndex = str.indexOf('\\', index); if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { return true; } } } if (pipeIndex !== -1 && str[index] === '(' && str[index + 1] !== '|') { if (pipeIndex < index) { pipeIndex = str.indexOf('|', index); } if (pipeIndex !== -1 && str[pipeIndex + 1] !== ')') { closeParenIndex = str.indexOf(')', pipeIndex); if (closeParenIndex > pipeIndex) { backSlashIndex = str.indexOf('\\', pipeIndex); if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { return true; } } } } if (str[index] === '\\') { var open = str[index + 1]; index += 2; var close = chars[open]; if (close) { var n = str.indexOf(close, index); if (n !== -1) { index = n + 1; } } if (str[index] === '!') { return true; } } else { index++; } } return false; }; var relaxedCheck = function(str) { if (str[0] === '!') { return true; } var index = 0; while (index < str.length) { if (/[*?{}()[\]]/.test(str[index])) { return true; } if (str[index] === '\\') { var open = str[index + 1]; index += 2; var close = chars[open]; if (close) { var n = str.indexOf(close, index); if (n !== -1) { index = n + 1; } } if (str[index] === '!') { return true; } } else { index++; } } return false; }; module.exports = function isGlob(str, options) { if (typeof str !== 'string' || str === '') { return false; } if (isExtglob(str)) { return true; } var check = strictCheck; // optionally relax check if (options && options.strict === false) { check = relaxedCheck; } return check(str); }; /***/ }), /***/ 5680: /***/ ((module) => { "use strict"; /*! * is-number * * Copyright (c) 2014-present, Jon Schlinkert. * Released under the MIT License. */ module.exports = function(num) { if (typeof num === 'number') { return num - num === 0; } if (typeof num === 'string' && num.trim() !== '') { return Number.isFinite ? Number.isFinite(+num) : isFinite(+num); } return false; }; /***/ }), /***/ 1554: /***/ ((module) => { "use strict"; const isStream = stream => stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function'; isStream.writable = stream => isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object'; isStream.readable = stream => isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object'; isStream.duplex = stream => isStream.writable(stream) && isStream.readable(stream); isStream.transform = stream => isStream.duplex(stream) && typeof stream._transform === 'function'; module.exports = isStream; /***/ }), /***/ 2820: /***/ ((__unused_webpack_module, exports) => { //TODO: handle reviver/dehydrate function like normal //and handle indentation, like normal. //if anyone needs this... please send pull request. exports.stringify = function stringify (o) { if('undefined' == typeof o) return o if(o && Buffer.isBuffer(o)) return JSON.stringify(':base64:' + o.toString('base64')) if(o && o.toJSON) o = o.toJSON() if(o && 'object' === typeof o) { var s = '' var array = Array.isArray(o) s = array ? '[' : '{' var first = true for(var k in o) { var ignore = 'function' == typeof o[k] || (!array && 'undefined' === typeof o[k]) if(Object.hasOwnProperty.call(o, k) && !ignore) { if(!first) s += ',' first = false if (array) { if(o[k] == undefined) s += 'null' else s += stringify(o[k]) } else if (o[k] !== void(0)) { s += stringify(k) + ':' + stringify(o[k]) } } } s += array ? ']' : '}' return s } else if ('string' === typeof o) { return JSON.stringify(/^:/.test(o) ? ':' + o : o) } else if ('undefined' === typeof o) { return 'null'; } else return JSON.stringify(o) } exports.parse = function (s) { return JSON.parse(s, function (key, value) { if('string' === typeof value) { if(/^:base64:/.test(value)) return Buffer.from(value.substring(8), 'base64') else return /^:/.test(value) ? value.substring(1) : value } return value }) } /***/ }), /***/ 1531: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const EventEmitter = __nccwpck_require__(2361); const JSONB = __nccwpck_require__(2820); const loadStore = options => { const adapters = { redis: '@keyv/redis', rediss: '@keyv/redis', mongodb: '@keyv/mongo', mongo: '@keyv/mongo', sqlite: '@keyv/sqlite', postgresql: '@keyv/postgres', postgres: '@keyv/postgres', mysql: '@keyv/mysql', etcd: '@keyv/etcd', offline: '@keyv/offline', tiered: '@keyv/tiered', }; if (options.adapter || options.uri) { const adapter = options.adapter || /^[^:+]*/.exec(options.uri)[0]; return new (require(adapters[adapter]))(options); } return new Map(); }; const iterableAdapters = [ 'sqlite', 'postgres', 'mysql', 'mongo', 'redis', 'tiered', ]; class Keyv extends EventEmitter { constructor(uri, {emitErrors = true, ...options} = {}) { super(); this.opts = { namespace: 'keyv', serialize: JSONB.stringify, deserialize: JSONB.parse, ...((typeof uri === 'string') ? {uri} : uri), ...options, }; if (!this.opts.store) { const adapterOptions = {...this.opts}; this.opts.store = loadStore(adapterOptions); } if (this.opts.compression) { const compression = this.opts.compression; this.opts.serialize = compression.serialize.bind(compression); this.opts.deserialize = compression.deserialize.bind(compression); } if (typeof this.opts.store.on === 'function' && emitErrors) { this.opts.store.on('error', error => this.emit('error', error)); } this.opts.store.namespace = this.opts.namespace; const generateIterator = iterator => async function * () { for await (const [key, raw] of typeof iterator === 'function' ? iterator(this.opts.store.namespace) : iterator) { const data = await this.opts.deserialize(raw); if (this.opts.store.namespace && !key.includes(this.opts.store.namespace)) { continue; } if (typeof data.expires === 'number' && Date.now() > data.expires) { this.delete(key); continue; } yield [this._getKeyUnprefix(key), data.value]; } }; // Attach iterators if (typeof this.opts.store[Symbol.iterator] === 'function' && this.opts.store instanceof Map) { this.iterator = generateIterator(this.opts.store); } else if (typeof this.opts.store.iterator === 'function' && this.opts.store.opts && this._checkIterableAdaptar()) { this.iterator = generateIterator(this.opts.store.iterator.bind(this.opts.store)); } } _checkIterableAdaptar() { return iterableAdapters.includes(this.opts.store.opts.dialect) || iterableAdapters.findIndex(element => this.opts.store.opts.url.includes(element)) >= 0; } _getKeyPrefix(key) { return `${this.opts.namespace}:${key}`; } _getKeyPrefixArray(keys) { return keys.map(key => `${this.opts.namespace}:${key}`); } _getKeyUnprefix(key) { return key .split(':') .splice(1) .join(':'); } get(key, options) { const {store} = this.opts; const isArray = Array.isArray(key); const keyPrefixed = isArray ? this._getKeyPrefixArray(key) : this._getKeyPrefix(key); if (isArray && store.getMany === undefined) { const promises = []; for (const key of keyPrefixed) { promises.push(Promise.resolve() .then(() => store.get(key)) .then(data => (typeof data === 'string') ? this.opts.deserialize(data) : (this.opts.compression ? this.opts.deserialize(data) : data)) .then(data => { if (data === undefined || data === null) { return undefined; } if (typeof data.expires === 'number' && Date.now() > data.expires) { return this.delete(key).then(() => undefined); } return (options && options.raw) ? data : data.value; }), ); } return Promise.allSettled(promises) .then(values => { const data = []; for (const value of values) { data.push(value.value); } return data; }); } return Promise.resolve() .then(() => isArray ? store.getMany(keyPrefixed) : store.get(keyPrefixed)) .then(data => (typeof data === 'string') ? this.opts.deserialize(data) : (this.opts.compression ? this.opts.deserialize(data) : data)) .then(data => { if (data === undefined || data === null) { return undefined; } if (isArray) { return data.map((row, index) => { if ((typeof row === 'string')) { row = this.opts.deserialize(row); } if (row === undefined || row === null) { return undefined; } if (typeof row.expires === 'number' && Date.now() > row.expires) { this.delete(key[index]).then(() => undefined); return undefined; } return (options && options.raw) ? row : row.value; }); } if (typeof data.expires === 'number' && Date.now() > data.expires) { return this.delete(key).then(() => undefined); } return (options && options.raw) ? data : data.value; }); } set(key, value, ttl) { const keyPrefixed = this._getKeyPrefix(key); if (typeof ttl === 'undefined') { ttl = this.opts.ttl; } if (ttl === 0) { ttl = undefined; } const {store} = this.opts; return Promise.resolve() .then(() => { const expires = (typeof ttl === 'number') ? (Date.now() + ttl) : null; if (typeof value === 'symbol') { this.emit('error', 'symbol cannot be serialized'); } value = {value, expires}; return this.opts.serialize(value); }) .then(value => store.set(keyPrefixed, value, ttl)) .then(() => true); } delete(key) { const {store} = this.opts; if (Array.isArray(key)) { const keyPrefixed = this._getKeyPrefixArray(key); if (store.deleteMany === undefined) { const promises = []; for (const key of keyPrefixed) { promises.push(store.delete(key)); } return Promise.allSettled(promises) .then(values => values.every(x => x.value === true)); } return Promise.resolve() .then(() => store.deleteMany(keyPrefixed)); } const keyPrefixed = this._getKeyPrefix(key); return Promise.resolve() .then(() => store.delete(keyPrefixed)); } clear() { const {store} = this.opts; return Promise.resolve() .then(() => store.clear()); } has(key) { const keyPrefixed = this._getKeyPrefix(key); const {store} = this.opts; return Promise.resolve() .then(async () => { if (typeof store.has === 'function') { return store.has(keyPrefixed); } const value = await store.get(keyPrefixed); return value !== undefined; }); } disconnect() { const {store} = this.opts; if (typeof store.disconnect === 'function') { return store.disconnect(); } } } module.exports = Keyv; /***/ }), /***/ 2578: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; /* * merge2 * https://github.com/teambition/merge2 * * Copyright (c) 2014-2020 Teambition * Licensed under the MIT license. */ const Stream = __nccwpck_require__(2781) const PassThrough = Stream.PassThrough const slice = Array.prototype.slice module.exports = merge2 function merge2 () { const streamsQueue = [] const args = slice.call(arguments) let merging = false let options = args[args.length - 1] if (options && !Array.isArray(options) && options.pipe == null) { args.pop() } else { options = {} } const doEnd = options.end !== false const doPipeError = options.pipeError === true if (options.objectMode == null) { options.objectMode = true } if (options.highWaterMark == null) { options.highWaterMark = 64 * 1024 } const mergedStream = PassThrough(options) function addStream () { for (let i = 0, len = arguments.length; i < len; i++) { streamsQueue.push(pauseStreams(arguments[i], options)) } mergeStream() return this } function mergeStream () { if (merging) { return } merging = true let streams = streamsQueue.shift() if (!streams) { process.nextTick(endStream) return } if (!Array.isArray(streams)) { streams = [streams] } let pipesCount = streams.length + 1 function next () { if (--pipesCount > 0) { return } merging = false mergeStream() } function pipe (stream) { function onend () { stream.removeListener('merge2UnpipeEnd', onend) stream.removeListener('end', onend) if (doPipeError) { stream.removeListener('error', onerror) } next() } function onerror (err) { mergedStream.emit('error', err) } // skip ended stream if (stream._readableState.endEmitted) { return next() } stream.on('merge2UnpipeEnd', onend) stream.on('end', onend) if (doPipeError) { stream.on('error', onerror) } stream.pipe(mergedStream, { end: false }) // compatible for old stream stream.resume() } for (let i = 0; i < streams.length; i++) { pipe(streams[i]) } next() } function endStream () { merging = false // emit 'queueDrain' when all streams merged. mergedStream.emit('queueDrain') if (doEnd) { mergedStream.end() } } mergedStream.setMaxListeners(0) mergedStream.add = addStream mergedStream.on('unpipe', function (stream) { stream.emit('merge2UnpipeEnd') }) if (args.length) { addStream.apply(null, args) } return mergedStream } // check and pause streams for pipe. function pauseStreams (streams, options) { if (!Array.isArray(streams)) { // Backwards-compat with old-style streams if (!streams._readableState && streams.pipe) { streams = streams.pipe(PassThrough(options)) } if (!streams._readableState || !streams.pause || !streams.pipe) { throw new Error('Only readable stream can be merged.') } streams.pause() } else { for (let i = 0, len = streams.length; i < len; i++) { streams[i] = pauseStreams(streams[i], options) } } return streams } /***/ }), /***/ 6228: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const util = __nccwpck_require__(3837); const braces = __nccwpck_require__(610); const picomatch = __nccwpck_require__(2864); const utils = __nccwpck_require__(7426); const isEmptyString = val => val === '' || val === './'; /** * Returns an array of strings that match one or more glob patterns. * * ```js * const mm = require('micromatch'); * // mm(list, patterns[, options]); * * console.log(mm(['a.js', 'a.txt'], ['*.js'])); * //=> [ 'a.js' ] * ``` * @param {String|Array} `list` List of strings to match. * @param {String|Array} `patterns` One or more glob patterns to use for matching. * @param {Object} `options` See available [options](#options) * @return {Array} Returns an array of matches * @summary false * @api public */ const micromatch = (list, patterns, options) => { patterns = [].concat(patterns); list = [].concat(list); let omit = new Set(); let keep = new Set(); let items = new Set(); let negatives = 0; let onResult = state => { items.add(state.output); if (options && options.onResult) { options.onResult(state); } }; for (let i = 0; i < patterns.length; i++) { let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true); let negated = isMatch.state.negated || isMatch.state.negatedExtglob; if (negated) negatives++; for (let item of list) { let matched = isMatch(item, true); let match = negated ? !matched.isMatch : matched.isMatch; if (!match) continue; if (negated) { omit.add(matched.output); } else { omit.delete(matched.output); keep.add(matched.output); } } } let result = negatives === patterns.length ? [...items] : [...keep]; let matches = result.filter(item => !omit.has(item)); if (options && matches.length === 0) { if (options.failglob === true) { throw new Error(`No matches found for "${patterns.join(', ')}"`); } if (options.nonull === true || options.nullglob === true) { return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns; } } return matches; }; /** * Backwards compatibility */ micromatch.match = micromatch; /** * Returns a matcher function from the given glob `pattern` and `options`. * The returned function takes a string to match as its only argument and returns * true if the string is a match. * * ```js * const mm = require('micromatch'); * // mm.matcher(pattern[, options]); * * const isMatch = mm.matcher('*.!(*a)'); * console.log(isMatch('a.a')); //=> false * console.log(isMatch('a.b')); //=> true * ``` * @param {String} `pattern` Glob pattern * @param {Object} `options` * @return {Function} Returns a matcher function. * @api public */ micromatch.matcher = (pattern, options) => picomatch(pattern, options); /** * Returns true if **any** of the given glob `patterns` match the specified `string`. * * ```js * const mm = require('micromatch'); * // mm.isMatch(string, patterns[, options]); * * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true * console.log(mm.isMatch('a.a', 'b.*')); //=> false * ``` * @param {String} `str` The string to test. * @param {String|Array} `patterns` One or more glob patterns to use for matching. * @param {Object} `[options]` See available [options](#options). * @return {Boolean} Returns true if any patterns match `str` * @api public */ micromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); /** * Backwards compatibility */ micromatch.any = micromatch.isMatch; /** * Returns a list of strings that _**do not match any**_ of the given `patterns`. * * ```js * const mm = require('micromatch'); * // mm.not(list, patterns[, options]); * * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); * //=> ['b.b', 'c.c'] * ``` * @param {Array} `list` Array of strings to match. * @param {String|Array} `patterns` One or more glob pattern to use for matching. * @param {Object} `options` See available [options](#options) for changing how matches are performed * @return {Array} Returns an array of strings that **do not match** the given patterns. * @api public */ micromatch.not = (list, patterns, options = {}) => { patterns = [].concat(patterns).map(String); let result = new Set(); let items = []; let onResult = state => { if (options.onResult) options.onResult(state); items.push(state.output); }; let matches = new Set(micromatch(list, patterns, { ...options, onResult })); for (let item of items) { if (!matches.has(item)) { result.add(item); } } return [...result]; }; /** * Returns true if the given `string` contains the given pattern. Similar * to [.isMatch](#isMatch) but the pattern can match any part of the string. * * ```js * var mm = require('micromatch'); * // mm.contains(string, pattern[, options]); * * console.log(mm.contains('aa/bb/cc', '*b')); * //=> true * console.log(mm.contains('aa/bb/cc', '*d')); * //=> false * ``` * @param {String} `str` The string to match. * @param {String|Array} `patterns` Glob pattern to use for matching. * @param {Object} `options` See available [options](#options) for changing how matches are performed * @return {Boolean} Returns true if any of the patterns matches any part of `str`. * @api public */ micromatch.contains = (str, pattern, options) => { if (typeof str !== 'string') { throw new TypeError(`Expected a string: "${util.inspect(str)}"`); } if (Array.isArray(pattern)) { return pattern.some(p => micromatch.contains(str, p, options)); } if (typeof pattern === 'string') { if (isEmptyString(str) || isEmptyString(pattern)) { return false; } if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) { return true; } } return micromatch.isMatch(str, pattern, { ...options, contains: true }); }; /** * Filter the keys of the given object with the given `glob` pattern * and `options`. Does not attempt to match nested keys. If you need this feature, * use [glob-object][] instead. * * ```js * const mm = require('micromatch'); * // mm.matchKeys(object, patterns[, options]); * * const obj = { aa: 'a', ab: 'b', ac: 'c' }; * console.log(mm.matchKeys(obj, '*b')); * //=> { ab: 'b' } * ``` * @param {Object} `object` The object with keys to filter. * @param {String|Array} `patterns` One or more glob patterns to use for matching. * @param {Object} `options` See available [options](#options) for changing how matches are performed * @return {Object} Returns an object with only keys that match the given patterns. * @api public */ micromatch.matchKeys = (obj, patterns, options) => { if (!utils.isObject(obj)) { throw new TypeError('Expected the first argument to be an object'); } let keys = micromatch(Object.keys(obj), patterns, options); let res = {}; for (let key of keys) res[key] = obj[key]; return res; }; /** * Returns true if some of the strings in the given `list` match any of the given glob `patterns`. * * ```js * const mm = require('micromatch'); * // mm.some(list, patterns[, options]); * * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); * // true * console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); * // false * ``` * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found. * @param {String|Array} `patterns` One or more glob patterns to use for matching. * @param {Object} `options` See available [options](#options) for changing how matches are performed * @return {Boolean} Returns true if any `patterns` matches any of the strings in `list` * @api public */ micromatch.some = (list, patterns, options) => { let items = [].concat(list); for (let pattern of [].concat(patterns)) { let isMatch = picomatch(String(pattern), options); if (items.some(item => isMatch(item))) { return true; } } return false; }; /** * Returns true if every string in the given `list` matches * any of the given glob `patterns`. * * ```js * const mm = require('micromatch'); * // mm.every(list, patterns[, options]); * * console.log(mm.every('foo.js', ['foo.js'])); * // true * console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); * // true * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); * // false * console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); * // false * ``` * @param {String|Array} `list` The string or array of strings to test. * @param {String|Array} `patterns` One or more glob patterns to use for matching. * @param {Object} `options` See available [options](#options) for changing how matches are performed * @return {Boolean} Returns true if all `patterns` matches all of the strings in `list` * @api public */ micromatch.every = (list, patterns, options) => { let items = [].concat(list); for (let pattern of [].concat(patterns)) { let isMatch = picomatch(String(pattern), options); if (!items.every(item => isMatch(item))) { return false; } } return true; }; /** * Returns true if **all** of the given `patterns` match * the specified string. * * ```js * const mm = require('micromatch'); * // mm.all(string, patterns[, options]); * * console.log(mm.all('foo.js', ['foo.js'])); * // true * * console.log(mm.all('foo.js', ['*.js', '!foo.js'])); * // false * * console.log(mm.all('foo.js', ['*.js', 'foo.js'])); * // true * * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); * // true * ``` * @param {String|Array} `str` The string to test. * @param {String|Array} `patterns` One or more glob patterns to use for matching. * @param {Object} `options` See available [options](#options) for changing how matches are performed * @return {Boolean} Returns true if any patterns match `str` * @api public */ micromatch.all = (str, patterns, options) => { if (typeof str !== 'string') { throw new TypeError(`Expected a string: "${util.inspect(str)}"`); } return [].concat(patterns).every(p => picomatch(p, options)(str)); }; /** * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match. * * ```js * const mm = require('micromatch'); * // mm.capture(pattern, string[, options]); * * console.log(mm.capture('test/*.js', 'test/foo.js')); * //=> ['foo'] * console.log(mm.capture('test/*.js', 'foo/bar.css')); * //=> null * ``` * @param {String} `glob` Glob pattern to use for matching. * @param {String} `input` String to match * @param {Object} `options` See available [options](#options) for changing how matches are performed * @return {Array|null} Returns an array of captures if the input matches the glob pattern, otherwise `null`. * @api public */ micromatch.capture = (glob, input, options) => { let posix = utils.isWindows(options); let regex = picomatch.makeRe(String(glob), { ...options, capture: true }); let match = regex.exec(posix ? utils.toPosixSlashes(input) : input); if (match) { return match.slice(1).map(v => v === void 0 ? '' : v); } }; /** * Create a regular expression from the given glob `pattern`. * * ```js * const mm = require('micromatch'); * // mm.makeRe(pattern[, options]); * * console.log(mm.makeRe('*.js')); * //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ * ``` * @param {String} `pattern` A glob pattern to convert to regex. * @param {Object} `options` * @return {RegExp} Returns a regex created from the given pattern. * @api public */ micromatch.makeRe = (...args) => picomatch.makeRe(...args); /** * Scan a glob pattern to separate the pattern into segments. Used * by the [split](#split) method. * * ```js * const mm = require('micromatch'); * const state = mm.scan(pattern[, options]); * ``` * @param {String} `pattern` * @param {Object} `options` * @return {Object} Returns an object with * @api public */ micromatch.scan = (...args) => picomatch.scan(...args); /** * Parse a glob pattern to create the source string for a regular * expression. * * ```js * const mm = require('micromatch'); * const state = mm.parse(pattern[, options]); * ``` * @param {String} `glob` * @param {Object} `options` * @return {Object} Returns an object with useful properties and output to be used as regex source string. * @api public */ micromatch.parse = (patterns, options) => { let res = []; for (let pattern of [].concat(patterns || [])) { for (let str of braces(String(pattern), options)) { res.push(picomatch.parse(str, options)); } } return res; }; /** * Process the given brace `pattern`. * * ```js * const { braces } = require('micromatch'); * console.log(braces('foo/{a,b,c}/bar')); * //=> [ 'foo/(a|b|c)/bar' ] * * console.log(braces('foo/{a,b,c}/bar', { expand: true })); * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ] * ``` * @param {String} `pattern` String with brace pattern to process. * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options. * @return {Array} * @api public */ micromatch.braces = (pattern, options) => { if (typeof pattern !== 'string') throw new TypeError('Expected a string'); if ((options && options.nobrace === true) || !/\{.*\}/.test(pattern)) { return [pattern]; } return braces(pattern, options); }; /** * Expand braces */ micromatch.braceExpand = (pattern, options) => { if (typeof pattern !== 'string') throw new TypeError('Expected a string'); return micromatch.braces(pattern, { ...options, expand: true }); }; /** * Expose micromatch */ module.exports = micromatch; /***/ }), /***/ 2864: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; module.exports = __nccwpck_require__(555); /***/ }), /***/ 6476: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const path = __nccwpck_require__(1017); const WIN_SLASH = '\\\\/'; const WIN_NO_SLASH = `[^${WIN_SLASH}]`; /** * Posix glob regex */ const DOT_LITERAL = '\\.'; const PLUS_LITERAL = '\\+'; const QMARK_LITERAL = '\\?'; const SLASH_LITERAL = '\\/'; const ONE_CHAR = '(?=.)'; const QMARK = '[^/]'; const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`; const START_ANCHOR = `(?:^|${SLASH_LITERAL})`; const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`; const NO_DOT = `(?!${DOT_LITERAL})`; const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`; const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`; const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`; const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`; const STAR = `${QMARK}*?`; const POSIX_CHARS = { DOT_LITERAL, PLUS_LITERAL, QMARK_LITERAL, SLASH_LITERAL, ONE_CHAR, QMARK, END_ANCHOR, DOTS_SLASH, NO_DOT, NO_DOTS, NO_DOT_SLASH, NO_DOTS_SLASH, QMARK_NO_DOT, STAR, START_ANCHOR }; /** * Windows glob regex */ const WINDOWS_CHARS = { ...POSIX_CHARS, SLASH_LITERAL: `[${WIN_SLASH}]`, QMARK: WIN_NO_SLASH, STAR: `${WIN_NO_SLASH}*?`, DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`, NO_DOT: `(?!${DOT_LITERAL})`, NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`, NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, QMARK_NO_DOT: `[^.${WIN_SLASH}]`, START_ANCHOR: `(?:^|[${WIN_SLASH}])`, END_ANCHOR: `(?:[${WIN_SLASH}]|$)` }; /** * POSIX Bracket Regex */ const POSIX_REGEX_SOURCE = { alnum: 'a-zA-Z0-9', alpha: 'a-zA-Z', ascii: '\\x00-\\x7F', blank: ' \\t', cntrl: '\\x00-\\x1F\\x7F', digit: '0-9', graph: '\\x21-\\x7E', lower: 'a-z', print: '\\x20-\\x7E ', punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', space: ' \\t\\r\\n\\v\\f', upper: 'A-Z', word: 'A-Za-z0-9_', xdigit: 'A-Fa-f0-9' }; module.exports = { MAX_LENGTH: 1024 * 64, POSIX_REGEX_SOURCE, // regular expressions REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g, REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/, REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/, REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g, REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g, REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g, // Replace globs with equivalent patterns to reduce parsing time. REPLACEMENTS: { '***': '*', '**/**': '**', '**/**/**': '**' }, // Digits CHAR_0: 48, /* 0 */ CHAR_9: 57, /* 9 */ // Alphabet chars. CHAR_UPPERCASE_A: 65, /* A */ CHAR_LOWERCASE_A: 97, /* a */ CHAR_UPPERCASE_Z: 90, /* Z */ CHAR_LOWERCASE_Z: 122, /* z */ CHAR_LEFT_PARENTHESES: 40, /* ( */ CHAR_RIGHT_PARENTHESES: 41, /* ) */ CHAR_ASTERISK: 42, /* * */ // Non-alphabetic chars. CHAR_AMPERSAND: 38, /* & */ CHAR_AT: 64, /* @ */ CHAR_BACKWARD_SLASH: 92, /* \ */ CHAR_CARRIAGE_RETURN: 13, /* \r */ CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */ CHAR_COLON: 58, /* : */ CHAR_COMMA: 44, /* , */ CHAR_DOT: 46, /* . */ CHAR_DOUBLE_QUOTE: 34, /* " */ CHAR_EQUAL: 61, /* = */ CHAR_EXCLAMATION_MARK: 33, /* ! */ CHAR_FORM_FEED: 12, /* \f */ CHAR_FORWARD_SLASH: 47, /* / */ CHAR_GRAVE_ACCENT: 96, /* ` */ CHAR_HASH: 35, /* # */ CHAR_HYPHEN_MINUS: 45, /* - */ CHAR_LEFT_ANGLE_BRACKET: 60, /* < */ CHAR_LEFT_CURLY_BRACE: 123, /* { */ CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */ CHAR_LINE_FEED: 10, /* \n */ CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */ CHAR_PERCENT: 37, /* % */ CHAR_PLUS: 43, /* + */ CHAR_QUESTION_MARK: 63, /* ? */ CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */ CHAR_RIGHT_CURLY_BRACE: 125, /* } */ CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */ CHAR_SEMICOLON: 59, /* ; */ CHAR_SINGLE_QUOTE: 39, /* ' */ CHAR_SPACE: 32, /* */ CHAR_TAB: 9, /* \t */ CHAR_UNDERSCORE: 95, /* _ */ CHAR_VERTICAL_LINE: 124, /* | */ CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ SEP: path.sep, /** * Create EXTGLOB_CHARS */ extglobChars(chars) { return { '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` }, '?': { type: 'qmark', open: '(?:', close: ')?' }, '+': { type: 'plus', open: '(?:', close: ')+' }, '*': { type: 'star', open: '(?:', close: ')*' }, '@': { type: 'at', open: '(?:', close: ')' } }; }, /** * Create GLOB_CHARS */ globChars(win32) { return win32 === true ? WINDOWS_CHARS : POSIX_CHARS; } }; /***/ }), /***/ 5961: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const constants = __nccwpck_require__(6476); const utils = __nccwpck_require__(7426); /** * Constants */ const { MAX_LENGTH, POSIX_REGEX_SOURCE, REGEX_NON_SPECIAL_CHARS, REGEX_SPECIAL_CHARS_BACKREF, REPLACEMENTS } = constants; /** * Helpers */ const expandRange = (args, options) => { if (typeof options.expandRange === 'function') { return options.expandRange(...args, options); } args.sort(); const value = `[${args.join('-')}]`; try { /* eslint-disable-next-line no-new */ new RegExp(value); } catch (ex) { return args.map(v => utils.escapeRegex(v)).join('..'); } return value; }; /** * Create the message for a syntax error */ const syntaxError = (type, char) => { return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`; }; /** * Parse the given input string. * @param {String} input * @param {Object} options * @return {Object} */ const parse = (input, options) => { if (typeof input !== 'string') { throw new TypeError('Expected a string'); } input = REPLACEMENTS[input] || input; const opts = { ...options }; const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; let len = input.length; if (len > max) { throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); } const bos = { type: 'bos', value: '', output: opts.prepend || '' }; const tokens = [bos]; const capture = opts.capture ? '' : '?:'; const win32 = utils.isWindows(options); // create constants based on platform, for windows or posix const PLATFORM_CHARS = constants.globChars(win32); const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS); const { DOT_LITERAL, PLUS_LITERAL, SLASH_LITERAL, ONE_CHAR, DOTS_SLASH, NO_DOT, NO_DOT_SLASH, NO_DOTS_SLASH, QMARK, QMARK_NO_DOT, STAR, START_ANCHOR } = PLATFORM_CHARS; const globstar = opts => { return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; }; const nodot = opts.dot ? '' : NO_DOT; const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT; let star = opts.bash === true ? globstar(opts) : STAR; if (opts.capture) { star = `(${star})`; } // minimatch options support if (typeof opts.noext === 'boolean') { opts.noextglob = opts.noext; } const state = { input, index: -1, start: 0, dot: opts.dot === true, consumed: '', output: '', prefix: '', backtrack: false, negated: false, brackets: 0, braces: 0, parens: 0, quotes: 0, globstar: false, tokens }; input = utils.removePrefix(input, state); len = input.length; const extglobs = []; const braces = []; const stack = []; let prev = bos; let value; /** * Tokenizing helpers */ const eos = () => state.index === len - 1; const peek = state.peek = (n = 1) => input[state.index + n]; const advance = state.advance = () => input[++state.index] || ''; const remaining = () => input.slice(state.index + 1); const consume = (value = '', num = 0) => { state.consumed += value; state.index += num; }; const append = token => { state.output += token.output != null ? token.output : token.value; consume(token.value); }; const negate = () => { let count = 1; while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) { advance(); state.start++; count++; } if (count % 2 === 0) { return false; } state.negated = true; state.start++; return true; }; const increment = type => { state[type]++; stack.push(type); }; const decrement = type => { state[type]--; stack.pop(); }; /** * Push tokens onto the tokens array. This helper speeds up * tokenizing by 1) helping us avoid backtracking as much as possible, * and 2) helping us avoid creating extra tokens when consecutive * characters are plain text. This improves performance and simplifies * lookbehinds. */ const push = tok => { if (prev.type === 'globstar') { const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace'); const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren')); if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) { state.output = state.output.slice(0, -prev.output.length); prev.type = 'star'; prev.value = '*'; prev.output = star; state.output += prev.output; } } if (extglobs.length && tok.type !== 'paren') { extglobs[extglobs.length - 1].inner += tok.value; } if (tok.value || tok.output) append(tok); if (prev && prev.type === 'text' && tok.type === 'text') { prev.value += tok.value; prev.output = (prev.output || '') + tok.value; return; } tok.prev = prev; tokens.push(tok); prev = tok; }; const extglobOpen = (type, value) => { const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' }; token.prev = prev; token.parens = state.parens; token.output = state.output; const output = (opts.capture ? '(' : '') + token.open; increment('parens'); push({ type, value, output: state.output ? '' : ONE_CHAR }); push({ type: 'paren', extglob: true, value: advance(), output }); extglobs.push(token); }; const extglobClose = token => { let output = token.close + (opts.capture ? ')' : ''); let rest; if (token.type === 'negate') { let extglobStar = star; if (token.inner && token.inner.length > 1 && token.inner.includes('/')) { extglobStar = globstar(opts); } if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) { output = token.close = `)$))${extglobStar}`; } if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) { // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis. // In this case, we need to parse the string and use it in the output of the original pattern. // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`. // // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`. const expression = parse(rest, { ...options, fastpaths: false }).output; output = token.close = `)${expression})${extglobStar})`; } if (token.prev.type === 'bos') { state.negatedExtglob = true; } } push({ type: 'paren', extglob: true, value, output }); decrement('parens'); }; /** * Fast paths */ if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) { let backslashes = false; let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => { if (first === '\\') { backslashes = true; return m; } if (first === '?') { if (esc) { return esc + first + (rest ? QMARK.repeat(rest.length) : ''); } if (index === 0) { return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : ''); } return QMARK.repeat(chars.length); } if (first === '.') { return DOT_LITERAL.repeat(chars.length); } if (first === '*') { if (esc) { return esc + first + (rest ? star : ''); } return star; } return esc ? m : `\\${m}`; }); if (backslashes === true) { if (opts.unescape === true) { output = output.replace(/\\/g, ''); } else { output = output.replace(/\\+/g, m => { return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : ''); }); } } if (output === input && opts.contains === true) { state.output = input; return state; } state.output = utils.wrapOutput(output, state, options); return state; } /** * Tokenize input until we reach end-of-string */ while (!eos()) { value = advance(); if (value === '\u0000') { continue; } /** * Escaped characters */ if (value === '\\') { const next = peek(); if (next === '/' && opts.bash !== true) { continue; } if (next === '.' || next === ';') { continue; } if (!next) { value += '\\'; push({ type: 'text', value }); continue; } // collapse slashes to reduce potential for exploits const match = /^\\+/.exec(remaining()); let slashes = 0; if (match && match[0].length > 2) { slashes = match[0].length; state.index += slashes; if (slashes % 2 !== 0) { value += '\\'; } } if (opts.unescape === true) { value = advance(); } else { value += advance(); } if (state.brackets === 0) { push({ type: 'text', value }); continue; } } /** * If we're inside a regex character class, continue * until we reach the closing bracket. */ if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) { if (opts.posix !== false && value === ':') { const inner = prev.value.slice(1); if (inner.includes('[')) { prev.posix = true; if (inner.includes(':')) { const idx = prev.value.lastIndexOf('['); const pre = prev.value.slice(0, idx); const rest = prev.value.slice(idx + 2); const posix = POSIX_REGEX_SOURCE[rest]; if (posix) { prev.value = pre + posix; state.backtrack = true; advance(); if (!bos.output && tokens.indexOf(prev) === 1) { bos.output = ONE_CHAR; } continue; } } } } if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) { value = `\\${value}`; } if (value === ']' && (prev.value === '[' || prev.value === '[^')) { value = `\\${value}`; } if (opts.posix === true && value === '!' && prev.value === '[') { value = '^'; } prev.value += value; append({ value }); continue; } /** * If we're inside a quoted string, continue * until we reach the closing double quote. */ if (state.quotes === 1 && value !== '"') { value = utils.escapeRegex(value); prev.value += value; append({ value }); continue; } /** * Double quotes */ if (value === '"') { state.quotes = state.quotes === 1 ? 0 : 1; if (opts.keepQuotes === true) { push({ type: 'text', value }); } continue; } /** * Parentheses */ if (value === '(') { increment('parens'); push({ type: 'paren', value }); continue; } if (value === ')') { if (state.parens === 0 && opts.strictBrackets === true) { throw new SyntaxError(syntaxError('opening', '(')); } const extglob = extglobs[extglobs.length - 1]; if (extglob && state.parens === extglob.parens + 1) { extglobClose(extglobs.pop()); continue; } push({ type: 'paren', value, output: state.parens ? ')' : '\\)' }); decrement('parens'); continue; } /** * Square brackets */ if (value === '[') { if (opts.nobracket === true || !remaining().includes(']')) { if (opts.nobracket !== true && opts.strictBrackets === true) { throw new SyntaxError(syntaxError('closing', ']')); } value = `\\${value}`; } else { increment('brackets'); } push({ type: 'bracket', value }); continue; } if (value === ']') { if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) { push({ type: 'text', value, output: `\\${value}` }); continue; } if (state.brackets === 0) { if (opts.strictBrackets === true) { throw new SyntaxError(syntaxError('opening', '[')); } push({ type: 'text', value, output: `\\${value}` }); continue; } decrement('brackets'); const prevValue = prev.value.slice(1); if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) { value = `/${value}`; } prev.value += value; append({ value }); // when literal brackets are explicitly disabled // assume we should match with a regex character class if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) { continue; } const escaped = utils.escapeRegex(prev.value); state.output = state.output.slice(0, -prev.value.length); // when literal brackets are explicitly enabled // assume we should escape the brackets to match literal characters if (opts.literalBrackets === true) { state.output += escaped; prev.value = escaped; continue; } // when the user specifies nothing, try to match both prev.value = `(${capture}${escaped}|${prev.value})`; state.output += prev.value; continue; } /** * Braces */ if (value === '{' && opts.nobrace !== true) { increment('braces'); const open = { type: 'brace', value, output: '(', outputIndex: state.output.length, tokensIndex: state.tokens.length }; braces.push(open); push(open); continue; } if (value === '}') { const brace = braces[braces.length - 1]; if (opts.nobrace === true || !brace) { push({ type: 'text', value, output: value }); continue; } let output = ')'; if (brace.dots === true) { const arr = tokens.slice(); const range = []; for (let i = arr.length - 1; i >= 0; i--) { tokens.pop(); if (arr[i].type === 'brace') { break; } if (arr[i].type !== 'dots') { range.unshift(arr[i].value); } } output = expandRange(range, opts); state.backtrack = true; } if (brace.comma !== true && brace.dots !== true) { const out = state.output.slice(0, brace.outputIndex); const toks = state.tokens.slice(brace.tokensIndex); brace.value = brace.output = '\\{'; value = output = '\\}'; state.output = out; for (const t of toks) { state.output += (t.output || t.value); } } push({ type: 'brace', value, output }); decrement('braces'); braces.pop(); continue; } /** * Pipes */ if (value === '|') { if (extglobs.length > 0) { extglobs[extglobs.length - 1].conditions++; } push({ type: 'text', value }); continue; } /** * Commas */ if (value === ',') { let output = value; const brace = braces[braces.length - 1]; if (brace && stack[stack.length - 1] === 'braces') { brace.comma = true; output = '|'; } push({ type: 'comma', value, output }); continue; } /** * Slashes */ if (value === '/') { // if the beginning of the glob is "./", advance the start // to the current index, and don't add the "./" characters // to the state. This greatly simplifies lookbehinds when // checking for BOS characters like "!" and "." (not "./") if (prev.type === 'dot' && state.index === state.start + 1) { state.start = state.index + 1; state.consumed = ''; state.output = ''; tokens.pop(); prev = bos; // reset "prev" to the first token continue; } push({ type: 'slash', value, output: SLASH_LITERAL }); continue; } /** * Dots */ if (value === '.') { if (state.braces > 0 && prev.type === 'dot') { if (prev.value === '.') prev.output = DOT_LITERAL; const brace = braces[braces.length - 1]; prev.type = 'dots'; prev.output += value; prev.value += value; brace.dots = true; continue; } if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') { push({ type: 'text', value, output: DOT_LITERAL }); continue; } push({ type: 'dot', value, output: DOT_LITERAL }); continue; } /** * Question marks */ if (value === '?') { const isGroup = prev && prev.value === '('; if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { extglobOpen('qmark', value); continue; } if (prev && prev.type === 'paren') { const next = peek(); let output = value; if (next === '<' && !utils.supportsLookbehinds()) { throw new Error('Node.js v10 or higher is required for regex lookbehinds'); } if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) { output = `\\${value}`; } push({ type: 'text', value, output }); continue; } if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) { push({ type: 'qmark', value, output: QMARK_NO_DOT }); continue; } push({ type: 'qmark', value, output: QMARK }); continue; } /** * Exclamation */ if (value === '!') { if (opts.noextglob !== true && peek() === '(') { if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) { extglobOpen('negate', value); continue; } } if (opts.nonegate !== true && state.index === 0) { negate(); continue; } } /** * Plus */ if (value === '+') { if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { extglobOpen('plus', value); continue; } if ((prev && prev.value === '(') || opts.regex === false) { push({ type: 'plus', value, output: PLUS_LITERAL }); continue; } if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) { push({ type: 'plus', value }); continue; } push({ type: 'plus', value: PLUS_LITERAL }); continue; } /** * Plain text */ if (value === '@') { if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { push({ type: 'at', extglob: true, value, output: '' }); continue; } push({ type: 'text', value }); continue; } /** * Plain text */ if (value !== '*') { if (value === '$' || value === '^') { value = `\\${value}`; } const match = REGEX_NON_SPECIAL_CHARS.exec(remaining()); if (match) { value += match[0]; state.index += match[0].length; } push({ type: 'text', value }); continue; } /** * Stars */ if (prev && (prev.type === 'globstar' || prev.star === true)) { prev.type = 'star'; prev.star = true; prev.value += value; prev.output = star; state.backtrack = true; state.globstar = true; consume(value); continue; } let rest = remaining(); if (opts.noextglob !== true && /^\([^?]/.test(rest)) { extglobOpen('star', value); continue; } if (prev.type === 'star') { if (opts.noglobstar === true) { consume(value); continue; } const prior = prev.prev; const before = prior.prev; const isStart = prior.type === 'slash' || prior.type === 'bos'; const afterStar = before && (before.type === 'star' || before.type === 'globstar'); if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) { push({ type: 'star', value, output: '' }); continue; } const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace'); const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren'); if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) { push({ type: 'star', value, output: '' }); continue; } // strip consecutive `/**/` while (rest.slice(0, 3) === '/**') { const after = input[state.index + 4]; if (after && after !== '/') { break; } rest = rest.slice(3); consume('/**', 3); } if (prior.type === 'bos' && eos()) { prev.type = 'globstar'; prev.value += value; prev.output = globstar(opts); state.output = prev.output; state.globstar = true; consume(value); continue; } if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) { state.output = state.output.slice(0, -(prior.output + prev.output).length); prior.output = `(?:${prior.output}`; prev.type = 'globstar'; prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)'); prev.value += value; state.globstar = true; state.output += prior.output + prev.output; consume(value); continue; } if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') { const end = rest[1] !== void 0 ? '|$' : ''; state.output = state.output.slice(0, -(prior.output + prev.output).length); prior.output = `(?:${prior.output}`; prev.type = 'globstar'; prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`; prev.value += value; state.output += prior.output + prev.output; state.globstar = true; consume(value + advance()); push({ type: 'slash', value: '/', output: '' }); continue; } if (prior.type === 'bos' && rest[0] === '/') { prev.type = 'globstar'; prev.value += value; prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`; state.output = prev.output; state.globstar = true; consume(value + advance()); push({ type: 'slash', value: '/', output: '' }); continue; } // remove single star from output state.output = state.output.slice(0, -prev.output.length); // reset previous token to globstar prev.type = 'globstar'; prev.output = globstar(opts); prev.value += value; // reset output with globstar state.output += prev.output; state.globstar = true; consume(value); continue; } const token = { type: 'star', value, output: star }; if (opts.bash === true) { token.output = '.*?'; if (prev.type === 'bos' || prev.type === 'slash') { token.output = nodot + token.output; } push(token); continue; } if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) { token.output = value; push(token); continue; } if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') { if (prev.type === 'dot') { state.output += NO_DOT_SLASH; prev.output += NO_DOT_SLASH; } else if (opts.dot === true) { state.output += NO_DOTS_SLASH; prev.output += NO_DOTS_SLASH; } else { state.output += nodot; prev.output += nodot; } if (peek() !== '*') { state.output += ONE_CHAR; prev.output += ONE_CHAR; } } push(token); } while (state.brackets > 0) { if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']')); state.output = utils.escapeLast(state.output, '['); decrement('brackets'); } while (state.parens > 0) { if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')')); state.output = utils.escapeLast(state.output, '('); decrement('parens'); } while (state.braces > 0) { if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}')); state.output = utils.escapeLast(state.output, '{'); decrement('braces'); } if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) { push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` }); } // rebuild the output if we had to backtrack at any point if (state.backtrack === true) { state.output = ''; for (const token of state.tokens) { state.output += token.output != null ? token.output : token.value; if (token.suffix) { state.output += token.suffix; } } } return state; }; /** * Fast paths for creating regular expressions for common glob patterns. * This can significantly speed up processing and has very little downside * impact when none of the fast paths match. */ parse.fastpaths = (input, options) => { const opts = { ...options }; const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; const len = input.length; if (len > max) { throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); } input = REPLACEMENTS[input] || input; const win32 = utils.isWindows(options); // create constants based on platform, for windows or posix const { DOT_LITERAL, SLASH_LITERAL, ONE_CHAR, DOTS_SLASH, NO_DOT, NO_DOTS, NO_DOTS_SLASH, STAR, START_ANCHOR } = constants.globChars(win32); const nodot = opts.dot ? NO_DOTS : NO_DOT; const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT; const capture = opts.capture ? '' : '?:'; const state = { negated: false, prefix: '' }; let star = opts.bash === true ? '.*?' : STAR; if (opts.capture) { star = `(${star})`; } const globstar = opts => { if (opts.noglobstar === true) return star; return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; }; const create = str => { switch (str) { case '*': return `${nodot}${ONE_CHAR}${star}`; case '.*': return `${DOT_LITERAL}${ONE_CHAR}${star}`; case '*.*': return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; case '*/*': return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`; case '**': return nodot + globstar(opts); case '**/*': return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`; case '**/*.*': return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; case '**/.*': return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`; default: { const match = /^(.*?)\.(\w+)$/.exec(str); if (!match) return; const source = create(match[1]); if (!source) return; return source + DOT_LITERAL + match[2]; } } }; const output = utils.removePrefix(input, state); let source = create(output); if (source && opts.strictSlashes !== true) { source += `${SLASH_LITERAL}?`; } return source; }; module.exports = parse; /***/ }), /***/ 555: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const path = __nccwpck_require__(1017); const scan = __nccwpck_require__(7751); const parse = __nccwpck_require__(5961); const utils = __nccwpck_require__(7426); const constants = __nccwpck_require__(6476); const isObject = val => val && typeof val === 'object' && !Array.isArray(val); /** * Creates a matcher function from one or more glob patterns. The * returned function takes a string to match as its first argument, * and returns true if the string is a match. The returned matcher * function also takes a boolean as the second argument that, when true, * returns an object with additional information. * * ```js * const picomatch = require('picomatch'); * // picomatch(glob[, options]); * * const isMatch = picomatch('*.!(*a)'); * console.log(isMatch('a.a')); //=> false * console.log(isMatch('a.b')); //=> true * ``` * @name picomatch * @param {String|Array} `globs` One or more glob patterns. * @param {Object=} `options` * @return {Function=} Returns a matcher function. * @api public */ const picomatch = (glob, options, returnState = false) => { if (Array.isArray(glob)) { const fns = glob.map(input => picomatch(input, options, returnState)); const arrayMatcher = str => { for (const isMatch of fns) { const state = isMatch(str); if (state) return state; } return false; }; return arrayMatcher; } const isState = isObject(glob) && glob.tokens && glob.input; if (glob === '' || (typeof glob !== 'string' && !isState)) { throw new TypeError('Expected pattern to be a non-empty string'); } const opts = options || {}; const posix = utils.isWindows(options); const regex = isState ? picomatch.compileRe(glob, options) : picomatch.makeRe(glob, options, false, true); const state = regex.state; delete regex.state; let isIgnored = () => false; if (opts.ignore) { const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null }; isIgnored = picomatch(opts.ignore, ignoreOpts, returnState); } const matcher = (input, returnObject = false) => { const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix }); const result = { glob, state, regex, posix, input, output, match, isMatch }; if (typeof opts.onResult === 'function') { opts.onResult(result); } if (isMatch === false) { result.isMatch = false; return returnObject ? result : false; } if (isIgnored(input)) { if (typeof opts.onIgnore === 'function') { opts.onIgnore(result); } result.isMatch = false; return returnObject ? result : false; } if (typeof opts.onMatch === 'function') { opts.onMatch(result); } return returnObject ? result : true; }; if (returnState) { matcher.state = state; } return matcher; }; /** * Test `input` with the given `regex`. This is used by the main * `picomatch()` function to test the input string. * * ```js * const picomatch = require('picomatch'); * // picomatch.test(input, regex[, options]); * * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } * ``` * @param {String} `input` String to test. * @param {RegExp} `regex` * @return {Object} Returns an object with matching info. * @api public */ picomatch.test = (input, regex, options, { glob, posix } = {}) => { if (typeof input !== 'string') { throw new TypeError('Expected input to be a string'); } if (input === '') { return { isMatch: false, output: '' }; } const opts = options || {}; const format = opts.format || (posix ? utils.toPosixSlashes : null); let match = input === glob; let output = (match && format) ? format(input) : input; if (match === false) { output = format ? format(input) : input; match = output === glob; } if (match === false || opts.capture === true) { if (opts.matchBase === true || opts.basename === true) { match = picomatch.matchBase(input, regex, options, posix); } else { match = regex.exec(output); } } return { isMatch: Boolean(match), match, output }; }; /** * Match the basename of a filepath. * * ```js * const picomatch = require('picomatch'); * // picomatch.matchBase(input, glob[, options]); * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true * ``` * @param {String} `input` String to test. * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe). * @return {Boolean} * @api public */ picomatch.matchBase = (input, glob, options, posix = utils.isWindows(options)) => { const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options); return regex.test(path.basename(input)); }; /** * Returns true if **any** of the given glob `patterns` match the specified `string`. * * ```js * const picomatch = require('picomatch'); * // picomatch.isMatch(string, patterns[, options]); * * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false * ``` * @param {String|Array} str The string to test. * @param {String|Array} patterns One or more glob patterns to use for matching. * @param {Object} [options] See available [options](#options). * @return {Boolean} Returns true if any patterns match `str` * @api public */ picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); /** * Parse a glob pattern to create the source string for a regular * expression. * * ```js * const picomatch = require('picomatch'); * const result = picomatch.parse(pattern[, options]); * ``` * @param {String} `pattern` * @param {Object} `options` * @return {Object} Returns an object with useful properties and output to be used as a regex source string. * @api public */ picomatch.parse = (pattern, options) => { if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options)); return parse(pattern, { ...options, fastpaths: false }); }; /** * Scan a glob pattern to separate the pattern into segments. * * ```js * const picomatch = require('picomatch'); * // picomatch.scan(input[, options]); * * const result = picomatch.scan('!./foo/*.js'); * console.log(result); * { prefix: '!./', * input: '!./foo/*.js', * start: 3, * base: 'foo', * glob: '*.js', * isBrace: false, * isBracket: false, * isGlob: true, * isExtglob: false, * isGlobstar: false, * negated: true } * ``` * @param {String} `input` Glob pattern to scan. * @param {Object} `options` * @return {Object} Returns an object with * @api public */ picomatch.scan = (input, options) => scan(input, options); /** * Compile a regular expression from the `state` object returned by the * [parse()](#parse) method. * * @param {Object} `state` * @param {Object} `options` * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser. * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. * @return {RegExp} * @api public */ picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => { if (returnOutput === true) { return state.output; } const opts = options || {}; const prepend = opts.contains ? '' : '^'; const append = opts.contains ? '' : '$'; let source = `${prepend}(?:${state.output})${append}`; if (state && state.negated === true) { source = `^(?!${source}).*$`; } const regex = picomatch.toRegex(source, options); if (returnState === true) { regex.state = state; } return regex; }; /** * Create a regular expression from a parsed glob pattern. * * ```js * const picomatch = require('picomatch'); * const state = picomatch.parse('*.js'); * // picomatch.compileRe(state[, options]); * * console.log(picomatch.compileRe(state)); * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ * ``` * @param {String} `state` The object returned from the `.parse` method. * @param {Object} `options` * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression. * @return {RegExp} Returns a regex created from the given pattern. * @api public */ picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => { if (!input || typeof input !== 'string') { throw new TypeError('Expected a non-empty string'); } let parsed = { negated: false, fastpaths: true }; if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { parsed.output = parse.fastpaths(input, options); } if (!parsed.output) { parsed = parse(input, options); } return picomatch.compileRe(parsed, options, returnOutput, returnState); }; /** * Create a regular expression from the given regex source string. * * ```js * const picomatch = require('picomatch'); * // picomatch.toRegex(source[, options]); * * const { output } = picomatch.parse('*.js'); * console.log(picomatch.toRegex(output)); * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ * ``` * @param {String} `source` Regular expression source string. * @param {Object} `options` * @return {RegExp} * @api public */ picomatch.toRegex = (source, options) => { try { const opts = options || {}; return new RegExp(source, opts.flags || (opts.nocase ? 'i' : '')); } catch (err) { if (options && options.debug === true) throw err; return /$^/; } }; /** * Picomatch constants. * @return {Object} */ picomatch.constants = constants; /** * Expose "picomatch" */ module.exports = picomatch; /***/ }), /***/ 7751: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const utils = __nccwpck_require__(7426); const { CHAR_ASTERISK, /* * */ CHAR_AT, /* @ */ CHAR_BACKWARD_SLASH, /* \ */ CHAR_COMMA, /* , */ CHAR_DOT, /* . */ CHAR_EXCLAMATION_MARK, /* ! */ CHAR_FORWARD_SLASH, /* / */ CHAR_LEFT_CURLY_BRACE, /* { */ CHAR_LEFT_PARENTHESES, /* ( */ CHAR_LEFT_SQUARE_BRACKET, /* [ */ CHAR_PLUS, /* + */ CHAR_QUESTION_MARK, /* ? */ CHAR_RIGHT_CURLY_BRACE, /* } */ CHAR_RIGHT_PARENTHESES, /* ) */ CHAR_RIGHT_SQUARE_BRACKET /* ] */ } = __nccwpck_require__(6476); const isPathSeparator = code => { return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; }; const depth = token => { if (token.isPrefix !== true) { token.depth = token.isGlobstar ? Infinity : 1; } }; /** * Quickly scans a glob pattern and returns an object with a handful of * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not * with `!(`) and `negatedExtglob` (true if the path starts with `!(`). * * ```js * const pm = require('picomatch'); * console.log(pm.scan('foo/bar/*.js')); * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' } * ``` * @param {String} `str` * @param {Object} `options` * @return {Object} Returns an object with tokens and regex source string. * @api public */ const scan = (input, options) => { const opts = options || {}; const length = input.length - 1; const scanToEnd = opts.parts === true || opts.scanToEnd === true; const slashes = []; const tokens = []; const parts = []; let str = input; let index = -1; let start = 0; let lastIndex = 0; let isBrace = false; let isBracket = false; let isGlob = false; let isExtglob = false; let isGlobstar = false; let braceEscaped = false; let backslashes = false; let negated = false; let negatedExtglob = false; let finished = false; let braces = 0; let prev; let code; let token = { value: '', depth: 0, isGlob: false }; const eos = () => index >= length; const peek = () => str.charCodeAt(index + 1); const advance = () => { prev = code; return str.charCodeAt(++index); }; while (index < length) { code = advance(); let next; if (code === CHAR_BACKWARD_SLASH) { backslashes = token.backslashes = true; code = advance(); if (code === CHAR_LEFT_CURLY_BRACE) { braceEscaped = true; } continue; } if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) { braces++; while (eos() !== true && (code = advance())) { if (code === CHAR_BACKWARD_SLASH) { backslashes = token.backslashes = true; advance(); continue; } if (code === CHAR_LEFT_CURLY_BRACE) { braces++; continue; } if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) { isBrace = token.isBrace = true; isGlob = token.isGlob = true; finished = true; if (scanToEnd === true) { continue; } break; } if (braceEscaped !== true && code === CHAR_COMMA) { isBrace = token.isBrace = true; isGlob = token.isGlob = true; finished = true; if (scanToEnd === true) { continue; } break; } if (code === CHAR_RIGHT_CURLY_BRACE) { braces--; if (braces === 0) { braceEscaped = false; isBrace = token.isBrace = true; finished = true; break; } } } if (scanToEnd === true) { continue; } break; } if (code === CHAR_FORWARD_SLASH) { slashes.push(index); tokens.push(token); token = { value: '', depth: 0, isGlob: false }; if (finished === true) continue; if (prev === CHAR_DOT && index === (start + 1)) { start += 2; continue; } lastIndex = index + 1; continue; } if (opts.noext !== true) { const isExtglobChar = code === CHAR_PLUS || code === CHAR_AT || code === CHAR_ASTERISK || code === CHAR_QUESTION_MARK || code === CHAR_EXCLAMATION_MARK; if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) { isGlob = token.isGlob = true; isExtglob = token.isExtglob = true; finished = true; if (code === CHAR_EXCLAMATION_MARK && index === start) { negatedExtglob = true; } if (scanToEnd === true) { while (eos() !== true && (code = advance())) { if (code === CHAR_BACKWARD_SLASH) { backslashes = token.backslashes = true; code = advance(); continue; } if (code === CHAR_RIGHT_PARENTHESES) { isGlob = token.isGlob = true; finished = true; break; } } continue; } break; } } if (code === CHAR_ASTERISK) { if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true; isGlob = token.isGlob = true; finished = true; if (scanToEnd === true) { continue; } break; } if (code === CHAR_QUESTION_MARK) { isGlob = token.isGlob = true; finished = true; if (scanToEnd === true) { continue; } break; } if (code === CHAR_LEFT_SQUARE_BRACKET) { while (eos() !== true && (next = advance())) { if (next === CHAR_BACKWARD_SLASH) { backslashes = token.backslashes = true; advance(); continue; } if (next === CHAR_RIGHT_SQUARE_BRACKET) { isBracket = token.isBracket = true; isGlob = token.isGlob = true; finished = true; break; } } if (scanToEnd === true) { continue; } break; } if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) { negated = token.negated = true; start++; continue; } if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) { isGlob = token.isGlob = true; if (scanToEnd === true) { while (eos() !== true && (code = advance())) { if (code === CHAR_LEFT_PARENTHESES) { backslashes = token.backslashes = true; code = advance(); continue; } if (code === CHAR_RIGHT_PARENTHESES) { finished = true; break; } } continue; } break; } if (isGlob === true) { finished = true; if (scanToEnd === true) { continue; } break; } } if (opts.noext === true) { isExtglob = false; isGlob = false; } let base = str; let prefix = ''; let glob = ''; if (start > 0) { prefix = str.slice(0, start); str = str.slice(start); lastIndex -= start; } if (base && isGlob === true && lastIndex > 0) { base = str.slice(0, lastIndex); glob = str.slice(lastIndex); } else if (isGlob === true) { base = ''; glob = str; } else { base = str; } if (base && base !== '' && base !== '/' && base !== str) { if (isPathSeparator(base.charCodeAt(base.length - 1))) { base = base.slice(0, -1); } } if (opts.unescape === true) { if (glob) glob = utils.removeBackslashes(glob); if (base && backslashes === true) { base = utils.removeBackslashes(base); } } const state = { prefix, input, start, base, glob, isBrace, isBracket, isGlob, isExtglob, isGlobstar, negated, negatedExtglob }; if (opts.tokens === true) { state.maxDepth = 0; if (!isPathSeparator(code)) { tokens.push(token); } state.tokens = tokens; } if (opts.parts === true || opts.tokens === true) { let prevIndex; for (let idx = 0; idx < slashes.length; idx++) { const n = prevIndex ? prevIndex + 1 : start; const i = slashes[idx]; const value = input.slice(n, i); if (opts.tokens) { if (idx === 0 && start !== 0) { tokens[idx].isPrefix = true; tokens[idx].value = prefix; } else { tokens[idx].value = value; } depth(tokens[idx]); state.maxDepth += tokens[idx].depth; } if (idx !== 0 || value !== '') { parts.push(value); } prevIndex = i; } if (prevIndex && prevIndex + 1 < input.length) { const value = input.slice(prevIndex + 1); parts.push(value); if (opts.tokens) { tokens[tokens.length - 1].value = value; depth(tokens[tokens.length - 1]); state.maxDepth += tokens[tokens.length - 1].depth; } } state.slashes = slashes; state.parts = parts; } return state; }; module.exports = scan; /***/ }), /***/ 7426: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; const path = __nccwpck_require__(1017); const win32 = process.platform === 'win32'; const { REGEX_BACKSLASH, REGEX_REMOVE_BACKSLASH, REGEX_SPECIAL_CHARS, REGEX_SPECIAL_CHARS_GLOBAL } = __nccwpck_require__(6476); exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str); exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str); exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1'); exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/'); exports.removeBackslashes = str => { return str.replace(REGEX_REMOVE_BACKSLASH, match => { return match === '\\' ? '' : match; }); }; exports.supportsLookbehinds = () => { const segs = process.version.slice(1).split('.').map(Number); if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) { return true; } return false; }; exports.isWindows = options => { if (options && typeof options.windows === 'boolean') { return options.windows; } return win32 === true || path.sep === '\\'; }; exports.escapeLast = (input, char, lastIdx) => { const idx = input.lastIndexOf(char, lastIdx); if (idx === -1) return input; if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1); return `${input.slice(0, idx)}\\${input.slice(idx)}`; }; exports.removePrefix = (input, state = {}) => { let output = input; if (output.startsWith('./')) { output = output.slice(2); state.prefix = './'; } return output; }; exports.wrapOutput = (input, state = {}, options = {}) => { const prepend = options.contains ? '' : '^'; const append = options.contains ? '' : '$'; let output = `${prepend}(?:${input})${append}`; if (state.negated === true) { output = `(?:^(?!${output}).*$)`; } return output; }; /***/ }), /***/ 3105: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /*! * mime-db * Copyright(c) 2014 Jonathan Ong * Copyright(c) 2015-2022 Douglas Christopher Wilson * MIT Licensed */ /** * Module exports. */ module.exports = __nccwpck_require__(3765) /***/ }), /***/ 3583: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; /*! * mime-types * Copyright(c) 2014 Jonathan Ong * Copyright(c) 2015 Douglas Christopher Wilson * MIT Licensed */ /** * Module dependencies. * @private */ var db = __nccwpck_require__(3105) var extname = (__nccwpck_require__(1017).extname) /** * Module variables. * @private */ var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ var TEXT_TYPE_REGEXP = /^text\//i /** * Module exports. * @public */ exports.charset = charset exports.charsets = { lookup: charset } exports.contentType = contentType exports.extension = extension exports.extensions = Object.create(null) exports.lookup = lookup exports.types = Object.create(null) // Populate the extensions/types maps populateMaps(exports.extensions, exports.types) /** * Get the default charset for a MIME type. * * @param {string} type * @return {boolean|string} */ function charset (type) { if (!type || typeof type !== 'string') { return false } // TODO: use media-typer var match = EXTRACT_TYPE_REGEXP.exec(type) var mime = match && db[match[1].toLowerCase()] if (mime && mime.charset) { return mime.charset } // default text/* to utf-8 if (match && TEXT_TYPE_REGEXP.test(match[1])) { return 'UTF-8' } return false } /** * Create a full Content-Type header given a MIME type or extension. * * @param {string} str * @return {boolean|string} */ function contentType (str) { // TODO: should this even be in this module? if (!str || typeof str !== 'string') { return false } var mime = str.indexOf('/') === -1 ? exports.lookup(str) : str if (!mime) { return false } // TODO: use content-type or other module if (mime.indexOf('charset') === -1) { var charset = exports.charset(mime) if (charset) mime += '; charset=' + charset.toLowerCase() } return mime } /** * Get the default extension for a MIME type. * * @param {string} type * @return {boolean|string} */ function extension (type) { if (!type || typeof type !== 'string') { return false } // TODO: use media-typer var match = EXTRACT_TYPE_REGEXP.exec(type) // get extensions var exts = match && exports.extensions[match[1].toLowerCase()] if (!exts || !exts.length) { return false } return exts[0] } /** * Lookup the MIME type for a file path/extension. * * @param {string} path * @return {boolean|string} */ function lookup (path) { if (!path || typeof path !== 'string') { return false } // get the extension ("ext" or ".ext" or full path) var extension = extname('x.' + path) .toLowerCase() .substr(1) if (!extension) { return false } return exports.types[extension] || false } /** * Populate the extensions and types maps. * @private */ function populateMaps (extensions, types) { // source preference (least -> most) var preference = ['nginx', 'apache', undefined, 'iana'] Object.keys(db).forEach(function forEachMimeType (type) { var mime = db[type] var exts = mime.extensions if (!exts || !exts.length) { return } // mime -> extensions extensions[type] = exts // extension -> mime for (var i = 0; i < exts.length; i++) { var extension = exts[i] if (types[extension]) { var from = preference.indexOf(db[types[extension]].source) var to = preference.indexOf(mime.source) if (types[extension] !== 'application/octet-stream' && (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { // skip the remapping continue } } // set the extension -> mime types[extension] = type } }) } /***/ }), /***/ 900: /***/ ((module) => { /** * Helpers. */ var s = 1000; var m = s * 60; var h = m * 60; var d = h * 24; var w = d * 7; var y = d * 365.25; /** * Parse or format the given `val`. * * Options: * * - `long` verbose formatting [false] * * @param {String|Number} val * @param {Object} [options] * @throws {Error} throw an error if val is not a non-empty string or a number * @return {String|Number} * @api public */ module.exports = function(val, options) { options = options || {}; var type = typeof val; if (type === 'string' && val.length > 0) { return parse(val); } else if (type === 'number' && isFinite(val)) { return options.long ? fmtLong(val) : fmtShort(val); } throw new Error( 'val is not a non-empty string or a valid number. val=' + JSON.stringify(val) ); }; /** * Parse the given `str` and return milliseconds. * * @param {String} str * @return {Number} * @api private */ function parse(str) { str = String(str); if (str.length > 100) { return; } var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( str ); if (!match) { return; } var n = parseFloat(match[1]); var type = (match[2] || 'ms').toLowerCase(); switch (type) { case 'years': case 'year': case 'yrs': case 'yr': case 'y': return n * y; case 'weeks': case 'week': case 'w': return n * w; case 'days': case 'day': case 'd': return n * d; case 'hours': case 'hour': case 'hrs': case 'hr': case 'h': return n * h; case 'minutes': case 'minute': case 'mins': case 'min': case 'm': return n * m; case 'seconds': case 'second': case 'secs': case 'sec': case 's': return n * s; case 'milliseconds': case 'millisecond': case 'msecs': case 'msec': case 'ms': return n; default: return undefined; } } /** * Short format for `ms`. * * @param {Number} ms * @return {String} * @api private */ function fmtShort(ms) { var msAbs = Math.abs(ms); if (msAbs >= d) { return Math.round(ms / d) + 'd'; } if (msAbs >= h) { return Math.round(ms / h) + 'h'; } if (msAbs >= m) { return Math.round(ms / m) + 'm'; } if (msAbs >= s) { return Math.round(ms / s) + 's'; } return ms + 'ms'; } /** * Long format for `ms`. * * @param {Number} ms * @return {String} * @api private */ function fmtLong(ms) { var msAbs = Math.abs(ms); if (msAbs >= d) { return plural(ms, msAbs, d, 'day'); } if (msAbs >= h) { return plural(ms, msAbs, h, 'hour'); } if (msAbs >= m) { return plural(ms, msAbs, m, 'minute'); } if (msAbs >= s) { return plural(ms, msAbs, s, 'second'); } return ms + ' ms'; } /** * Pluralization helper. */ function plural(ms, msAbs, n, name) { var isPlural = msAbs >= n * 1.5; return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); } /***/ }), /***/ 1223: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var wrappy = __nccwpck_require__(2940) module.exports = wrappy(once) module.exports.strict = wrappy(onceStrict) once.proto = once(function () { Object.defineProperty(Function.prototype, 'once', { value: function () { return once(this) }, configurable: true }) Object.defineProperty(Function.prototype, 'onceStrict', { value: function () { return onceStrict(this) }, configurable: true }) }) function once (fn) { var f = function () { if (f.called) return f.value f.called = true return f.value = fn.apply(this, arguments) } f.called = false return f } function onceStrict (fn) { var f = function () { if (f.called) throw new Error(f.onceError) f.called = true return f.value = fn.apply(this, arguments) } var name = fn.name || 'Function wrapped with `once`' f.onceError = name + " shouldn't be called more than once" f.called = false return f } /***/ }), /***/ 3329: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; var parseUrl = (__nccwpck_require__(7310).parse); var DEFAULT_PORTS = { ftp: 21, gopher: 70, http: 80, https: 443, ws: 80, wss: 443, }; var stringEndsWith = String.prototype.endsWith || function(s) { return s.length <= this.length && this.indexOf(s, this.length - s.length) !== -1; }; /** * @param {string|object} url - The URL, or the result from url.parse. * @return {string} The URL of the proxy that should handle the request to the * given URL. If no proxy is set, this will be an empty string. */ function getProxyForUrl(url) { var parsedUrl = typeof url === 'string' ? parseUrl(url) : url || {}; var proto = parsedUrl.protocol; var hostname = parsedUrl.host; var port = parsedUrl.port; if (typeof hostname !== 'string' || !hostname || typeof proto !== 'string') { return ''; // Don't proxy URLs without a valid scheme or host. } proto = proto.split(':', 1)[0]; // Stripping ports in this way instead of using parsedUrl.hostname to make // sure that the brackets around IPv6 addresses are kept. hostname = hostname.replace(/:\d*$/, ''); port = parseInt(port) || DEFAULT_PORTS[proto] || 0; if (!shouldProxy(hostname, port)) { return ''; // Don't proxy URLs that match NO_PROXY. } var proxy = getEnv('npm_config_' + proto + '_proxy') || getEnv(proto + '_proxy') || getEnv('npm_config_proxy') || getEnv('all_proxy'); if (proxy && proxy.indexOf('://') === -1) { // Missing scheme in proxy, default to the requested URL's scheme. proxy = proto + '://' + proxy; } return proxy; } /** * Determines whether a given URL should be proxied. * * @param {string} hostname - The host name of the URL. * @param {number} port - The effective port of the URL. * @returns {boolean} Whether the given URL should be proxied. * @private */ function shouldProxy(hostname, port) { var NO_PROXY = (getEnv('npm_config_no_proxy') || getEnv('no_proxy')).toLowerCase(); if (!NO_PROXY) { return true; // Always proxy if NO_PROXY is not set. } if (NO_PROXY === '*') { return false; // Never proxy if wildcard is set. } return NO_PROXY.split(/[,\s]/).every(function(proxy) { if (!proxy) { return true; // Skip zero-length hosts. } var parsedProxy = proxy.match(/^(.+):(\d+)$/); var parsedProxyHostname = parsedProxy ? parsedProxy[1] : proxy; var parsedProxyPort = parsedProxy ? parseInt(parsedProxy[2]) : 0; if (parsedProxyPort && parsedProxyPort !== port) { return true; // Skip if ports don't match. } if (!/^[.*]/.test(parsedProxyHostname)) { // No wildcards, so stop proxying if there is an exact match. return hostname !== parsedProxyHostname; } if (parsedProxyHostname.charAt(0) === '*') { // Remove leading wildcard. parsedProxyHostname = parsedProxyHostname.slice(1); } // Stop proxying if the hostname ends with the no_proxy host. return !stringEndsWith.call(hostname, parsedProxyHostname); }); } /** * Get the value for an environment variable. * * @param {string} key - The name of the environment variable. * @return {string} The value of the environment variable. * @private */ function getEnv(key) { return process.env[key.toLowerCase()] || process.env[key.toUpperCase()] || ''; } exports.getProxyForUrl = getProxyForUrl; /***/ }), /***/ 9795: /***/ ((module) => { /*! queue-microtask. MIT License. Feross Aboukhadijeh */ let promise module.exports = typeof queueMicrotask === 'function' ? queueMicrotask.bind(typeof window !== 'undefined' ? window : global) // reuse resolved promise, and allocate it lazily : cb => (promise || (promise = Promise.resolve())) .then(cb) .catch(err => setTimeout(() => { throw err }, 0)) /***/ }), /***/ 9273: /***/ ((module) => { "use strict"; class QuickLRU { constructor(options = {}) { if (!(options.maxSize && options.maxSize > 0)) { throw new TypeError('`maxSize` must be a number greater than 0'); } this.maxSize = options.maxSize; this.onEviction = options.onEviction; this.cache = new Map(); this.oldCache = new Map(); this._size = 0; } _set(key, value) { this.cache.set(key, value); this._size++; if (this._size >= this.maxSize) { this._size = 0; if (typeof this.onEviction === 'function') { for (const [key, value] of this.oldCache.entries()) { this.onEviction(key, value); } } this.oldCache = this.cache; this.cache = new Map(); } } get(key) { if (this.cache.has(key)) { return this.cache.get(key); } if (this.oldCache.has(key)) { const value = this.oldCache.get(key); this.oldCache.delete(key); this._set(key, value); return value; } } set(key, value) { if (this.cache.has(key)) { this.cache.set(key, value); } else { this._set(key, value); } return this; } has(key) { return this.cache.has(key) || this.oldCache.has(key); } peek(key) { if (this.cache.has(key)) { return this.cache.get(key); } if (this.oldCache.has(key)) { return this.oldCache.get(key); } } delete(key) { const deleted = this.cache.delete(key); if (deleted) { this._size--; } return this.oldCache.delete(key) || deleted; } clear() { this.cache.clear(); this.oldCache.clear(); this._size = 0; } * keys() { for (const [key] of this) { yield key; } } * values() { for (const [, value] of this) { yield value; } } * [Symbol.iterator]() { for (const item of this.cache) { yield item; } for (const item of this.oldCache) { const [key] = item; if (!this.cache.has(key)) { yield item; } } } get size() { let oldCacheSize = 0; for (const key of this.oldCache.keys()) { if (!this.cache.has(key)) { oldCacheSize++; } } return Math.min(this._size + oldCacheSize, this.maxSize); } } module.exports = QuickLRU; /***/ }), /***/ 6624: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const tls = __nccwpck_require__(4404); module.exports = (options = {}, connect = tls.connect) => new Promise((resolve, reject) => { let timeout = false; let socket; const callback = async () => { await socketPromise; socket.off('timeout', onTimeout); socket.off('error', reject); if (options.resolveSocket) { resolve({alpnProtocol: socket.alpnProtocol, socket, timeout}); if (timeout) { await Promise.resolve(); socket.emit('timeout'); } } else { socket.destroy(); resolve({alpnProtocol: socket.alpnProtocol, timeout}); } }; const onTimeout = async () => { timeout = true; callback(); }; const socketPromise = (async () => { try { socket = await connect(options, callback); socket.on('error', reject); socket.once('timeout', onTimeout); } catch (error) { reject(error); } })(); }); /***/ }), /***/ 2113: /***/ ((module) => { "use strict"; function reusify (Constructor) { var head = new Constructor() var tail = head function get () { var current = head if (current.next) { head = current.next } else { head = new Constructor() tail = head } current.next = null return current } function release (obj) { tail.next = obj tail = obj } return { get: get, release: release } } module.exports = reusify /***/ }), /***/ 5288: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /*! run-parallel. MIT License. Feross Aboukhadijeh */ module.exports = runParallel const queueMicrotask = __nccwpck_require__(9795) function runParallel (tasks, cb) { let results, pending, keys let isSync = true if (Array.isArray(tasks)) { results = [] pending = tasks.length } else { keys = Object.keys(tasks) results = {} pending = keys.length } function done (err) { function end () { if (cb) cb(err, results) cb = null } if (isSync) queueMicrotask(end) else end() } function each (i, err, result) { results[i] = result if (--pending === 0 || err) { done(err) } } if (!pending) { // empty done(null) } else if (keys) { // object keys.forEach(function (key) { tasks[key](function (err, result) { each(key, err, result) }) }) } else { // array tasks.forEach(function (task, i) { task(function (err, result) { each(i, err, result) }) }) } isSync = false } /***/ }), /***/ 2043: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { ;(function (sax) { // wrapper for non-node envs sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } sax.SAXParser = SAXParser sax.SAXStream = SAXStream sax.createStream = createStream // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), // since that's the earliest that a buffer overrun could occur. This way, checks are // as rare as required, but as often as necessary to ensure never crossing this bound. // Furthermore, buffers are only tested at most once per write(), so passing a very // large string into write() might have undesirable effects, but this is manageable by // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme // edge case, result in creating at most one complete copy of the string passed in. // Set to Infinity to have unlimited buffers. sax.MAX_BUFFER_LENGTH = 64 * 1024 var buffers = [ 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', 'procInstName', 'procInstBody', 'entity', 'attribName', 'attribValue', 'cdata', 'script' ] sax.EVENTS = [ 'text', 'processinginstruction', 'sgmldeclaration', 'doctype', 'comment', 'opentagstart', 'attribute', 'opentag', 'closetag', 'opencdata', 'cdata', 'closecdata', 'error', 'end', 'ready', 'script', 'opennamespace', 'closenamespace' ] function SAXParser (strict, opt) { if (!(this instanceof SAXParser)) { return new SAXParser(strict, opt) } var parser = this clearBuffers(parser) parser.q = parser.c = '' parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH parser.opt = opt || {} parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' parser.tags = [] parser.closed = parser.closedRoot = parser.sawRoot = false parser.tag = parser.error = null parser.strict = !!strict parser.noscript = !!(strict || parser.opt.noscript) parser.state = S.BEGIN parser.strictEntities = parser.opt.strictEntities parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) parser.attribList = [] // namespaces form a prototype chain. // it always points at the current tag, // which protos to its parent tag. if (parser.opt.xmlns) { parser.ns = Object.create(rootNS) } // mostly just for error reporting parser.trackPosition = parser.opt.position !== false if (parser.trackPosition) { parser.position = parser.line = parser.column = 0 } emit(parser, 'onready') } if (!Object.create) { Object.create = function (o) { function F () {} F.prototype = o var newf = new F() return newf } } if (!Object.keys) { Object.keys = function (o) { var a = [] for (var i in o) if (o.hasOwnProperty(i)) a.push(i) return a } } function checkBufferLength (parser) { var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) var maxActual = 0 for (var i = 0, l = buffers.length; i < l; i++) { var len = parser[buffers[i]].length if (len > maxAllowed) { // Text/cdata nodes can get big, and since they're buffered, // we can get here under normal conditions. // Avoid issues by emitting the text node now, // so at least it won't get any bigger. switch (buffers[i]) { case 'textNode': closeText(parser) break case 'cdata': emitNode(parser, 'oncdata', parser.cdata) parser.cdata = '' break case 'script': emitNode(parser, 'onscript', parser.script) parser.script = '' break default: error(parser, 'Max buffer length exceeded: ' + buffers[i]) } } maxActual = Math.max(maxActual, len) } // schedule the next check for the earliest possible buffer overrun. var m = sax.MAX_BUFFER_LENGTH - maxActual parser.bufferCheckPosition = m + parser.position } function clearBuffers (parser) { for (var i = 0, l = buffers.length; i < l; i++) { parser[buffers[i]] = '' } } function flushBuffers (parser) { closeText(parser) if (parser.cdata !== '') { emitNode(parser, 'oncdata', parser.cdata) parser.cdata = '' } if (parser.script !== '') { emitNode(parser, 'onscript', parser.script) parser.script = '' } } SAXParser.prototype = { end: function () { end(this) }, write: write, resume: function () { this.error = null; return this }, close: function () { return this.write(null) }, flush: function () { flushBuffers(this) } } var Stream try { Stream = (__nccwpck_require__(2781).Stream) } catch (ex) { Stream = function () {} } var streamWraps = sax.EVENTS.filter(function (ev) { return ev !== 'error' && ev !== 'end' }) function createStream (strict, opt) { return new SAXStream(strict, opt) } function SAXStream (strict, opt) { if (!(this instanceof SAXStream)) { return new SAXStream(strict, opt) } Stream.apply(this) this._parser = new SAXParser(strict, opt) this.writable = true this.readable = true var me = this this._parser.onend = function () { me.emit('end') } this._parser.onerror = function (er) { me.emit('error', er) // if didn't throw, then means error was handled. // go ahead and clear error, so we can write again. me._parser.error = null } this._decoder = null streamWraps.forEach(function (ev) { Object.defineProperty(me, 'on' + ev, { get: function () { return me._parser['on' + ev] }, set: function (h) { if (!h) { me.removeAllListeners(ev) me._parser['on' + ev] = h return h } me.on(ev, h) }, enumerable: true, configurable: false }) }) } SAXStream.prototype = Object.create(Stream.prototype, { constructor: { value: SAXStream } }) SAXStream.prototype.write = function (data) { if (typeof Buffer === 'function' && typeof Buffer.isBuffer === 'function' && Buffer.isBuffer(data)) { if (!this._decoder) { var SD = (__nccwpck_require__(1576).StringDecoder) this._decoder = new SD('utf8') } data = this._decoder.write(data) } this._parser.write(data.toString()) this.emit('data', data) return true } SAXStream.prototype.end = function (chunk) { if (chunk && chunk.length) { this.write(chunk) } this._parser.end() return true } SAXStream.prototype.on = function (ev, handler) { var me = this if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { me._parser['on' + ev] = function () { var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) args.splice(0, 0, ev) me.emit.apply(me, args) } } return Stream.prototype.on.call(me, ev, handler) } // this really needs to be replaced with character classes. // XML allows all manner of ridiculous numbers and digits. var CDATA = '[CDATA[' var DOCTYPE = 'DOCTYPE' var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } // http://www.w3.org/TR/REC-xml/#NT-NameStartChar // This implementation works on strings, a single character at a time // as such, it cannot ever support astral-plane characters (10000-EFFFF) // without a significant breaking change to either this parser, or the // JavaScript language. Implementation of an emoji-capable xml parser // is left as an exercise for the reader. var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ function isWhitespace (c) { return c === ' ' || c === '\n' || c === '\r' || c === '\t' } function isQuote (c) { return c === '"' || c === '\'' } function isAttribEnd (c) { return c === '>' || isWhitespace(c) } function isMatch (regex, c) { return regex.test(c) } function notMatch (regex, c) { return !isMatch(regex, c) } var S = 0 sax.STATE = { BEGIN: S++, // leading byte order mark or whitespace BEGIN_WHITESPACE: S++, // leading whitespace TEXT: S++, // general stuff TEXT_ENTITY: S++, // & and such. OPEN_WAKA: S++, // < SGML_DECL: S++, // SCRIPT: S++, //