diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 0000000..37ed630
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,26 @@
+---
+name: Bug report
+about: Create a report to help us improve
+title: ''
+labels: 'bug'
+assignees: 'dorny,dharmendrasha,j-catania'
+---
+
+## Describe the bug
+A clear and concise description of what the bug is.
+
+## To Reproduce
+Steps to reproduce the behavior:
+1. Go to '...'
+2. Click on '....'
+3. Scroll down to '....'
+4. See error
+
+## Expected behavior
+A clear and concise description of what you expected to happen.
+
+## Screenshots
+If applicable, add screenshots to help explain your problem.
+
+## Additional context
+Add any other context about the problem here.
diff --git a/.github/ISSUE_TEMPLATE/feature.md b/.github/ISSUE_TEMPLATE/feature.md
new file mode 100644
index 0000000..d8b8558
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature.md
@@ -0,0 +1,13 @@
+---
+name: Feature Request
+about: Suggest a feature
+title: ''
+labels: 'enhancement'
+assignees: 'dorny,dharmendrasha,j-catania'
+---
+
+## Describe
+
+## Proposed solution
+
+## Alternatives considered
diff --git a/.github/workflows/check-dist.yml b/.github/workflows/check-dist.yml
index 22b61c2..5f4b9d2 100644
--- a/.github/workflows/check-dist.yml
+++ b/.github/workflows/check-dist.yml
@@ -23,10 +23,10 @@ jobs:
steps:
- uses: actions/checkout@v4
- - name: Set Node.js 16.x
- uses: actions/setup-node@v3
+ - name: Set Node.js
+ uses: actions/setup-node@v4
with:
- node-version: 16.x
+ node-version-file: '.nvmrc'
- name: Install dependencies
run: npm ci
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 12b8778..545b4e3 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -14,7 +14,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- - run: node --version
+ - uses: actions/setup-node@v4
+ with:
+ node-version-file: '.nvmrc'
- run: npm ci
- run: npm run build
- run: npm run format-check
diff --git a/.nvmrc b/.nvmrc
index 79bdb1b..eb800ed 100644
--- a/.nvmrc
+++ b/.nvmrc
@@ -1 +1 @@
-v18.7.0
+v18.19.0
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8adafbd..bd75192 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,12 @@
# Changelog
+## 1.8.0
+* Add `SwiftXunitParser` class based on `JavaJunitParser` for `swift-xunit` reporter https://github.com/dorny/test-reporter/pull/317
+* Use NodeJS 18 LTS as default runtime https://github.com/dorny/test-reporter/pull/332
+* Escape `<>` characters in suite name https://github.com/dorny/test-reporter/pull/236
+* Update actions runtime to Node20 https://github.com/dorny/test-reporter/pull/315
+* Update check title and remove icon https://github.com/dorny/test-reporter/pull/144
+
## 1.7.0
* Fix #199: Use ✅ instead of ✔️ for better cross platform look by @petrdvorak in https://github.com/dorny/test-reporter/pull/200
* Verify content of dist/ folder matches build output by @dorny in https://github.com/dorny/test-reporter/pull/207
diff --git a/README.md b/README.md
index 0ae7b76..f4be6e0 100644
--- a/README.md
+++ b/README.md
@@ -18,6 +18,7 @@ This [Github Action](https://github.com/features/actions) displays test results
- Flutter / [test](https://pub.dev/packages/test)
- Java / [JUnit](https://junit.org/)
- JavaScript / [JEST](https://jestjs.io/) / [Mocha](https://mochajs.org/)
+- Swift / xUnit
For more information see [Supported formats](#supported-formats) section.
@@ -317,6 +318,12 @@ Mocha, unfortunately, doesn't have the option to store `json` output directly to
There is a work in progress to fix it: [mocha#4607](https://github.com/mochajs/mocha/pull/4607)
+
+ swift-xunit (Experimental)
+
+Support for Swift test results in xUnit format is experimental - should work but it was not extensively tested.
+
+
## GitHub limitations
Unfortunately, there are some known issues and limitations caused by GitHub API:
diff --git a/__tests__/__outputs__/jest-react-component-test-results.md b/__tests__/__outputs__/jest-react-component-test-results.md
new file mode 100644
index 0000000..7891a19
--- /dev/null
+++ b/__tests__/__outputs__/jest-react-component-test-results.md
@@ -0,0 +1,10 @@
+
+## ✅ fixtures/external/jest/jest-react-component-test-results.xml
+**1** tests were completed in **1000ms** with **1** passed, **0** failed and **0** skipped.
+|Test suite|Passed|Failed|Skipped|Time|
+|:---|---:|---:|---:|---:|
+|[\](#r0s0)|1✅|||798ms|
+### ✅ \
+```
+✅ should render properly
+```
\ No newline at end of file
diff --git a/__tests__/__outputs__/swift-xunit.md b/__tests__/__outputs__/swift-xunit.md
new file mode 100644
index 0000000..ecafaf9
--- /dev/null
+++ b/__tests__/__outputs__/swift-xunit.md
@@ -0,0 +1,13 @@
+
+## ❌ fixtures/swift-xunit.xml
+**3** tests were completed in **220ms** with **2** passed, **1** failed and **0** skipped.
+|Test suite|Passed|Failed|Skipped|Time|
+|:---|---:|---:|---:|---:|
+|[TestResults](#r0s0)|2✅|1❌||220ms|
+### ❌ TestResults
+```
+AcmeLibTests.AcmeLibTests
+ ✅ test_always_pass
+ ✅ test_always_skip
+ ❌ test_always_fail
+```
\ No newline at end of file
diff --git a/__tests__/__snapshots__/dart-json.test.ts.snap b/__tests__/__snapshots__/dart-json.test.ts.snap
index 847db6a..a499822 100644
--- a/__tests__/__snapshots__/dart-json.test.ts.snap
+++ b/__tests__/__snapshots__/dart-json.test.ts.snap
@@ -3,12 +3,12 @@
exports[`dart-json tests matches report snapshot 1`] = `
TestRunResult {
"path": "fixtures/dart-json.json",
- "suites": Array [
+ "suites": [
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Test 1",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Passing test",
@@ -19,11 +19,11 @@ TestRunResult {
},
TestGroupResult {
"name": "Test 1 Test 1.1",
- "tests": Array [
+ "tests": [
TestCaseResult {
- "error": Object {
+ "error": {
"details": "package:test_api expect
-test\\\\main_test.dart 13:9 main...
+test\\main_test.dart 13:9 main...
",
"line": 13,
"message": "Expected: <2>
@@ -36,9 +36,9 @@ test\\\\main_test.dart 13:9 main...
"time": 20,
},
TestCaseResult {
- "error": Object {
+ "error": {
"details": "package:darttest/main.dart 2:3 throwError
-test\\\\main_test.dart 17:9 main...
+test\\main_test.dart 17:9 main...
",
"line": 17,
"message": "Exception: Some error",
@@ -52,10 +52,10 @@ test\\\\main_test.dart 17:9 main...
},
TestGroupResult {
"name": "Test 2",
- "tests": Array [
+ "tests": [
TestCaseResult {
- "error": Object {
- "details": "test\\\\main_test.dart 24:7 main..
+ "error": {
+ "details": "test\\main_test.dart 24:7 main..
",
"line": 24,
"message": "Exception: Some error",
@@ -72,12 +72,12 @@ test\\\\main_test.dart 17:9 main...
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
- "error": Object {
+ "error": {
"details": "dart:isolate _RawReceivePortImpl._handleMessage
",
"line": 5,
@@ -108,12 +108,12 @@ test\\\\main_test.dart 17:9 main...
exports[`dart-json tests report from rrousselGit/provider test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/flutter/provider-test-results.json",
- "suites": Array [
+ "suites": [
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "valueListenableProvider",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "rebuilds when value change",
@@ -139,7 +139,7 @@ TestRunResult {
"time": 22,
},
TestCaseResult {
- "error": Object {
+ "error": {
"details": "══╡ EXCEPTION CAUGHT BY FLUTTER TEST FRAMEWORK ╞════════════════════════════════════════════════════
The following TestFailure object was thrown running a test:
Expected: <2>
@@ -178,10 +178,10 @@ Unexpected number of calls
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "ListenableProvider",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with MultiProvider",
@@ -252,7 +252,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "ListenableProvider value constructor",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "pass down key",
@@ -269,7 +269,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "ListenableProvider stateful constructor",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "called with context",
@@ -295,10 +295,10 @@ Unexpected number of calls
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "consumer",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "obtains value from Provider",
@@ -321,7 +321,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "consumer2",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "obtains value from Provider",
@@ -344,7 +344,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "consumer3",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "obtains value from Provider",
@@ -367,7 +367,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "consumer4",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "obtains value from Provider",
@@ -390,7 +390,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "consumer5",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "obtains value from Provider",
@@ -413,7 +413,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "consumer6",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "obtains value from Provider",
@@ -439,10 +439,10 @@ Unexpected number of calls
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Use builder property, not child",
@@ -453,7 +453,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "ChangeNotifierProvider",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "value",
@@ -515,10 +515,10 @@ Unexpected number of calls
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "ChangeNotifierProvider",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "default",
@@ -535,7 +535,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "ListenableProvider",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "default",
@@ -552,7 +552,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "Provider",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "default",
@@ -569,7 +569,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "ProxyProvider",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "0",
@@ -616,7 +616,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "MultiProvider",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "with 1 ChangeNotifierProvider default",
@@ -690,10 +690,10 @@ Unexpected number of calls
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "MultiProvider",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throw if providers is null",
@@ -719,10 +719,10 @@ Unexpected number of calls
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with MultiProvider",
@@ -763,7 +763,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "StreamProvider()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "create and dispose stream with builder",
@@ -783,10 +783,10 @@ Unexpected number of calls
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "watch in layoutbuilder",
@@ -827,7 +827,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "BuildContext",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "internal selected value is updated",
@@ -985,10 +985,10 @@ Unexpected number of calls
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "ReassembleHandler",
@@ -1014,10 +1014,10 @@ Unexpected number of calls
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with MultiProvider",
@@ -1076,7 +1076,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "FutureProvider()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "crashes if builder is null",
@@ -1090,10 +1090,10 @@ Unexpected number of calls
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with MultiProvider",
@@ -1104,7 +1104,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "Provider.of",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws if T is dynamic",
@@ -1139,7 +1139,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "Provider",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws if the provided value is a Listenable/Stream",
@@ -1177,10 +1177,10 @@ Unexpected number of calls
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "regression test #377",
@@ -1371,7 +1371,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "diagnostics",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "InheritedProvider.value",
@@ -1406,7 +1406,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "InheritedProvider.value()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "markNeedsNotifyDependents during startListening is noop",
@@ -1459,7 +1459,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "InheritedProvider()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "hasValue",
@@ -1614,7 +1614,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "DeferredInheritedProvider.value()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "hasValue",
@@ -1667,7 +1667,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "DeferredInheritedProvider()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "create can't call inherited widgets",
@@ -1699,10 +1699,10 @@ Unexpected number of calls
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "ListenableProxyProvider",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws if update is missing",
@@ -1743,7 +1743,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "ListenableProxyProvider variants",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "ListenableProxyProvider",
@@ -1787,10 +1787,10 @@ Unexpected number of calls
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "asserts that builder/selector are not null",
@@ -1900,10 +1900,10 @@ Unexpected number of calls
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "asserts",
@@ -1935,10 +1935,10 @@ Unexpected number of calls
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "ProxyProvider",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws if the provided value is a Listenable/Stream",
@@ -2009,7 +2009,7 @@ Unexpected number of calls
},
TestGroupResult {
"name": "ProxyProvider variants",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "ProxyProvider2",
diff --git a/__tests__/__snapshots__/dotnet-trx.test.ts.snap b/__tests__/__snapshots__/dotnet-trx.test.ts.snap
index 0301a56..1ca07eb 100644
--- a/__tests__/__snapshots__/dotnet-trx.test.ts.snap
+++ b/__tests__/__snapshots__/dotnet-trx.test.ts.snap
@@ -3,12 +3,12 @@
exports[`dotnet-trx tests matches report snapshot 1`] = `
TestRunResult {
"path": "fixtures/dotnet-trx.trx",
- "suites": Array [
+ "suites": [
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Custom Name",
@@ -16,10 +16,10 @@ TestRunResult {
"time": 0.1371,
},
TestCaseResult {
- "error": Object {
+ "error": {
"details": "System.DivideByZeroException : Attempted to divide by zero.
- at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-reporter\\\\reports\\\\dotnet\\\\DotnetTests.Unit\\\\Calculator.cs:line 9
- at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-reporter\\\\reports\\\\dotnet\\\\DotnetTests.XUnitTests\\\\CalculatorTests.cs:line 33",
+ at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.Unit\\Calculator.cs:line 9
+ at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 33",
"line": 9,
"message": "System.DivideByZeroException : Attempted to divide by zero.",
"path": "DotnetTests.Unit/Calculator.cs",
@@ -29,9 +29,9 @@ TestRunResult {
"time": 0.8377,
},
TestCaseResult {
- "error": Object {
+ "error": {
"details": "System.Exception : Test
- at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-reporter\\\\reports\\\\dotnet\\\\DotnetTests.XUnitTests\\\\CalculatorTests.cs:line 39",
+ at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 39",
"line": 39,
"message": "System.Exception : Test",
"path": "DotnetTests.XUnitTests/CalculatorTests.cs",
@@ -41,11 +41,11 @@ TestRunResult {
"time": 2.5175,
},
TestCaseResult {
- "error": Object {
+ "error": {
"details": "Assert.Equal() Failure
Expected: 3
Actual: 2
- at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-reporter\\\\reports\\\\dotnet\\\\DotnetTests.XUnitTests\\\\CalculatorTests.cs:line 27",
+ at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 27",
"line": 27,
"message": "Assert.Equal() Failure
Expected: 3
@@ -63,11 +63,11 @@ Actual: 2",
"time": 0.0078,
},
TestCaseResult {
- "error": Object {
+ "error": {
"details": "Assert.True() Failure
Expected: True
Actual: False
- at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-reporter\\\\reports\\\\dotnet\\\\DotnetTests.XUnitTests\\\\CalculatorTests.cs:line 59",
+ at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 59",
"line": 59,
"message": "Assert.True() Failure
Expected: True
@@ -91,11 +91,11 @@ Actual: False",
"time": 0.0097,
},
TestCaseResult {
- "error": Object {
+ "error": {
"details": "Assert.True() Failure
Expected: True
Actual: False
- at DotnetTests.XUnitTests.CalculatorTests.Theory_With_Custom_Name(Int32 i) in C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-reporter\\\\reports\\\\dotnet\\\\DotnetTests.XUnitTests\\\\CalculatorTests.cs:line 67",
+ at DotnetTests.XUnitTests.CalculatorTests.Theory_With_Custom_Name(Int32 i) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 67",
"line": 67,
"message": "Assert.True() Failure
Expected: True
@@ -132,12 +132,12 @@ Actual: False",
exports[`dotnet-trx tests report from FluentValidation test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/FluentValidation.Tests.trx",
- "suites": Array [
+ "suites": [
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Can_replace_default_errorcode_resolver",
@@ -355,10 +355,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Benchmark",
@@ -396,10 +396,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Finds_validators_for_types",
@@ -419,10 +419,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Cascade_mode_can_be_set_after_validator_instantiated",
@@ -658,10 +658,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Can_validate_using_validator_for_base_type",
@@ -747,10 +747,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Options_should_only_apply_to_current_validator",
@@ -776,10 +776,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Can_define_nested_rules_for_collection",
@@ -799,10 +799,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Async_condition_should_work_with_child_collection",
@@ -906,10 +906,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Async_condition_should_work_with_complex_property",
@@ -1019,10 +1019,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Async_condition_executed_synchronosuly_with_asynchronous_collection_rule",
@@ -1138,10 +1138,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "IsValidTests",
@@ -1161,10 +1161,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Does_not_invoke_action_if_validation_success",
@@ -1190,10 +1190,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Replaces_propertyvalue_placeholder",
@@ -1237,10 +1237,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "New_Custom_Returns_single_failure",
@@ -1308,10 +1308,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Empty_should_create_EmptyValidator",
@@ -1499,139 +1499,139 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "Fails_email_validation_aspnetcore_compatible(email: \\" \\\\r \\\\t \\\\n\\")",
+ "name": "Fails_email_validation_aspnetcore_compatible(email: " \\r \\t \\n")",
"result": "success",
"time": 0.0247,
},
TestCaseResult {
"error": undefined,
- "name": "Fails_email_validation_aspnetcore_compatible(email: \\"\\")",
+ "name": "Fails_email_validation_aspnetcore_compatible(email: "")",
"result": "success",
"time": 0.0314,
},
TestCaseResult {
"error": undefined,
- "name": "Fails_email_validation_aspnetcore_compatible(email: \\"@someDomain.com\\")",
+ "name": "Fails_email_validation_aspnetcore_compatible(email: "@someDomain.com")",
"result": "success",
"time": 0.0248,
},
TestCaseResult {
"error": undefined,
- "name": "Fails_email_validation_aspnetcore_compatible(email: \\"@someDomain@abc.com\\")",
+ "name": "Fails_email_validation_aspnetcore_compatible(email: "@someDomain@abc.com")",
"result": "success",
"time": 0.0234,
},
TestCaseResult {
"error": undefined,
- "name": "Fails_email_validation_aspnetcore_compatible(email: \\"0\\")",
+ "name": "Fails_email_validation_aspnetcore_compatible(email: "0")",
"result": "success",
"time": 0.2222,
},
TestCaseResult {
"error": undefined,
- "name": "Fails_email_validation_aspnetcore_compatible(email: \\"someName\\")",
+ "name": "Fails_email_validation_aspnetcore_compatible(email: "someName")",
"result": "success",
"time": 0.023,
},
TestCaseResult {
"error": undefined,
- "name": "Fails_email_validation_aspnetcore_compatible(email: \\"someName@\\")",
+ "name": "Fails_email_validation_aspnetcore_compatible(email: "someName@")",
"result": "success",
"time": 0.022699999999999998,
},
TestCaseResult {
"error": undefined,
- "name": "Fails_email_validation_aspnetcore_compatible(email: \\"someName@a@b.com\\")",
+ "name": "Fails_email_validation_aspnetcore_compatible(email: "someName@a@b.com")",
"result": "success",
"time": 0.022699999999999998,
},
TestCaseResult {
"error": undefined,
- "name": "Invalid_email_addressex_regex(email: \\"\\")",
+ "name": "Invalid_email_addressex_regex(email: "")",
"result": "success",
"time": 0.3859,
},
TestCaseResult {
"error": undefined,
- "name": "Invalid_email_addressex_regex(email: \\"first.last@test..co.uk\\")",
+ "name": "Invalid_email_addressex_regex(email: "first.last@test..co.uk")",
"result": "success",
"time": 0.0334,
},
TestCaseResult {
"error": undefined,
- "name": "Invalid_email_addressex_regex(email: \\"testperso\\")",
+ "name": "Invalid_email_addressex_regex(email: "testperso")",
"result": "success",
"time": 0.0395,
},
TestCaseResult {
"error": undefined,
- "name": "Invalid_email_addressex_regex(email: \\"thisisaverylongstringcodeplex.com\\")",
+ "name": "Invalid_email_addressex_regex(email: "thisisaverylongstringcodeplex.com")",
"result": "success",
"time": 0.0316,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_aspnetcore_compatible(email: \\" @someDomain.com\\")",
+ "name": "Valid_email_addresses_aspnetcore_compatible(email: " @someDomain.com")",
"result": "success",
"time": 0.0173,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_aspnetcore_compatible(email: \\"!#$%&'*+-/=?^_\`|~@someDomain.com\\")",
+ "name": "Valid_email_addresses_aspnetcore_compatible(email: "!#$%&'*+-/=?^_\`|~@someDomain.com")",
"result": "success",
"time": 0.0174,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_aspnetcore_compatible(email: \\"\\\\\\"firstName.lastName\\\\\\"@someDomain.com\\")",
+ "name": "Valid_email_addresses_aspnetcore_compatible(email: "\\"firstName.lastName\\"@someDomain.com")",
"result": "success",
"time": 0.0167,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_aspnetcore_compatible(email: \\"1234@someDomain.com\\")",
+ "name": "Valid_email_addresses_aspnetcore_compatible(email: "1234@someDomain.com")",
"result": "success",
"time": 0.0307,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_aspnetcore_compatible(email: \\"firstName.lastName@someDomain.com\\")",
+ "name": "Valid_email_addresses_aspnetcore_compatible(email: "firstName.lastName@someDomain.com")",
"result": "success",
"time": 0.0191,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_aspnetcore_compatible(email: \\"someName@1234.com\\")",
+ "name": "Valid_email_addresses_aspnetcore_compatible(email: "someName@1234.com")",
"result": "success",
"time": 0.0158,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_aspnetcore_compatible(email: \\"someName@some_domain.com\\")",
+ "name": "Valid_email_addresses_aspnetcore_compatible(email: "someName@some_domain.com")",
"result": "success",
"time": 0.0162,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_aspnetcore_compatible(email: \\"someName@some~domain.com\\")",
+ "name": "Valid_email_addresses_aspnetcore_compatible(email: "someName@some~domain.com")",
"result": "success",
"time": 0.0166,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_aspnetcore_compatible(email: \\"someName@someDomain.com\\")",
+ "name": "Valid_email_addresses_aspnetcore_compatible(email: "someName@someDomain.com")",
"result": "success",
"time": 0.0167,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_aspnetcore_compatible(email: \\"someName@someDomain.com\\")",
+ "name": "Valid_email_addresses_aspnetcore_compatible(email: "someName@someDomain.com")",
"result": "success",
"time": 0.0157,
},
@@ -1643,73 +1643,73 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_regex(email: \\"__somename@example.com\\")",
+ "name": "Valid_email_addresses_regex(email: "__somename@example.com")",
"result": "success",
"time": 0.0219,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_regex(email: \\"!def!xyz%abc@example.com\\")",
+ "name": "Valid_email_addresses_regex(email: "!def!xyz%abc@example.com")",
"result": "success",
"time": 0.022,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_regex(email: \\"\\\\\\"Abc@def\\\\\\"@example.com\\")",
+ "name": "Valid_email_addresses_regex(email: "\\"Abc@def\\"@example.com")",
"result": "success",
"time": 0.0245,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_regex(email: \\"\\\\\\"Abc\\\\\\\\@def\\\\\\"@example.com\\")",
+ "name": "Valid_email_addresses_regex(email: "\\"Abc\\\\@def\\"@example.com")",
"result": "success",
"time": 0.026,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_regex(email: \\"\\\\\\"Fred Bloggs\\\\\\"@example.com\\")",
+ "name": "Valid_email_addresses_regex(email: "\\"Fred Bloggs\\"@example.com")",
"result": "success",
"time": 0.0258,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_regex(email: \\"\\\\\\"Joe\\\\\\\\Blow\\\\\\"@example.com\\")",
+ "name": "Valid_email_addresses_regex(email: "\\"Joe\\\\Blow\\"@example.com")",
"result": "success",
"time": 0.0244,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_regex(email: \\"$A12345@example.com\\")",
+ "name": "Valid_email_addresses_regex(email: "$A12345@example.com")",
"result": "success",
"time": 0.022600000000000002,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_regex(email: \\"customer/department=shipping@example.com\\")",
+ "name": "Valid_email_addresses_regex(email: "customer/department=shipping@example.com")",
"result": "success",
"time": 0.0234,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_regex(email: \\"first.last@test.co.uk\\")",
+ "name": "Valid_email_addresses_regex(email: "first.last@test.co.uk")",
"result": "success",
"time": 0.022600000000000002,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_regex(email: \\"testperson@gmail.com\\")",
+ "name": "Valid_email_addresses_regex(email: "testperson@gmail.com")",
"result": "success",
"time": 16.3384,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_regex(email: \\"TestPerson@gmail.com\\")",
+ "name": "Valid_email_addresses_regex(email: "TestPerson@gmail.com")",
"result": "success",
"time": 0.0805,
},
TestCaseResult {
"error": undefined,
- "name": "Valid_email_addresses_regex(email: \\"testperson+label@gmail.com\\")",
+ "name": "Valid_email_addresses_regex(email: "testperson+label@gmail.com")",
"result": "success",
"time": 0.0275,
},
@@ -1726,10 +1726,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Passes_for_ienumerable_that_doesnt_implement_ICollection",
@@ -1791,10 +1791,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Flags_enum_invalid_when_using_outofrange_negative_value",
@@ -1874,10 +1874,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Comparison_property_uses_custom_resolver",
@@ -1945,10 +1945,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Min_and_max_properties_should_be_set",
@@ -1992,10 +1992,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "To_and_from_properties_should_be_set",
@@ -2117,10 +2117,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Should_extract_member_from_member_expression",
@@ -2152,10 +2152,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Async_condition_should_work_with_child_collection",
@@ -2367,10 +2367,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Comparison_property_uses_custom_resolver",
@@ -2462,10 +2462,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Comparison_property_uses_custom_resolver",
@@ -2551,10 +2551,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "To_and_from_properties_should_be_set",
@@ -2670,10 +2670,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Can_use_custom_subclass_with_nongeneric_overload",
@@ -2747,10 +2747,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Uses_inline_validator_to_build_rules",
@@ -2764,10 +2764,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "All_languages_should_be_loaded",
@@ -2836,19 +2836,19 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "Gets_translation_for_bosnian_latin_culture(cultureName: \\"bs-Latn-BA\\")",
+ "name": "Gets_translation_for_bosnian_latin_culture(cultureName: "bs-Latn-BA")",
"result": "success",
"time": 0.011,
},
TestCaseResult {
"error": undefined,
- "name": "Gets_translation_for_bosnian_latin_culture(cultureName: \\"bs-Latn\\")",
+ "name": "Gets_translation_for_bosnian_latin_culture(cultureName: "bs-Latn")",
"result": "success",
"time": 0.0627,
},
TestCaseResult {
"error": undefined,
- "name": "Gets_translation_for_bosnian_latin_culture(cultureName: \\"bs\\")",
+ "name": "Gets_translation_for_bosnian_latin_culture(cultureName: "bs")",
"result": "success",
"time": 0.661,
},
@@ -2866,19 +2866,19 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "Gets_translation_for_serbian_culture(cultureName: \\"sr-Latn-RS\\")",
+ "name": "Gets_translation_for_serbian_culture(cultureName: "sr-Latn-RS")",
"result": "success",
"time": 0.01,
},
TestCaseResult {
"error": undefined,
- "name": "Gets_translation_for_serbian_culture(cultureName: \\"sr-Latn\\")",
+ "name": "Gets_translation_for_serbian_culture(cultureName: "sr-Latn")",
"result": "success",
"time": 0.0484,
},
TestCaseResult {
"error": undefined,
- "name": "Gets_translation_for_serbian_culture(cultureName: \\"sr\\")",
+ "name": "Gets_translation_for_serbian_culture(cultureName: "sr")",
"result": "success",
"time": 0.4456,
},
@@ -2901,10 +2901,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Min_and_max_properties_should_be_set",
@@ -3008,10 +3008,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Comparison_property_uses_custom_resolver",
@@ -3097,10 +3097,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Comparison_property_uses_custom_resolver",
@@ -3204,10 +3204,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Correctly_assigns_default_localized_error_message",
@@ -3251,10 +3251,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Uses_localized_name",
@@ -3274,10 +3274,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "ComplexPropertyGet",
@@ -3339,10 +3339,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Adds_argument_and_custom_arguments",
@@ -3410,10 +3410,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Can_use_child_validator_at_model_level",
@@ -3433,10 +3433,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Resolves_nested_properties",
@@ -3462,10 +3462,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Fails_for_array",
@@ -3533,10 +3533,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Comparison_property_uses_custom_resolver",
@@ -3610,10 +3610,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Fails_when_nullable_value_type_is_null",
@@ -3651,10 +3651,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Not_null_validator_should_not_crash_with_non_nullable_value_type",
@@ -3692,10 +3692,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "OnFailure_called_for_each_failed_rule",
@@ -3763,10 +3763,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Should_fail_when_predicate_returns_false",
@@ -3804,10 +3804,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "AddIndexer_throws_when_nothing_added",
@@ -3857,10 +3857,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Can_access_expression_in_message",
@@ -3958,10 +3958,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Adding_a_validator_should_return_builder",
@@ -4143,10 +4143,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Async_inside_dependent_rules",
@@ -4238,10 +4238,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Applies_multiple_rulesets_to_rule",
@@ -4375,10 +4375,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Scale_precision_should_be_valid",
@@ -4422,10 +4422,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Async_condition_can_be_used_inside_ruleset",
@@ -4685,10 +4685,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Should_validate_property_value_without_instance",
@@ -4702,10 +4702,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "IsValidTests_CaseInsensitive_CaseCorrect",
@@ -4773,10 +4773,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Add_AddsItem",
@@ -4802,10 +4802,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Transforms_collection_element",
@@ -4837,10 +4837,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Can_Provide_conditional_severity",
@@ -4890,10 +4890,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Can_Provide_state_for_item_in_collection",
@@ -4925,10 +4925,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Does_not_throw_when_valid",
@@ -5020,10 +5020,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Can_serialize_failure",
@@ -5079,10 +5079,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Does_not_throw_when_rule_declared_without_property",
@@ -5120,10 +5120,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Can_use_property_with_include",
@@ -5191,10 +5191,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Allows_only_one_failure_to_match",
@@ -5353,7 +5353,7 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "ShouldHaveValidationError_should_not_throw_when_there_are_validation_errors__WhenAsyn_is_used(age: 42, cardNumber: \\"\\")",
+ "name": "ShouldHaveValidationError_should_not_throw_when_there_are_validation_errors__WhenAsyn_is_used(age: 42, cardNumber: "")",
"result": "success",
"time": 0.6089,
},
@@ -5389,13 +5389,13 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "ShouldHaveValidationError_should_throw_when_there_are_not_validation_errors__WhenAsyn_Is_Used(age: 17, cardNumber: \\"\\")",
+ "name": "ShouldHaveValidationError_should_throw_when_there_are_not_validation_errors__WhenAsyn_Is_Used(age: 17, cardNumber: "")",
"result": "success",
"time": 1.7937,
},
TestCaseResult {
"error": undefined,
- "name": "ShouldHaveValidationError_should_throw_when_there_are_not_validation_errors__WhenAsyn_Is_Used(age: 17, cardNumber: \\"cardNumber\\")",
+ "name": "ShouldHaveValidationError_should_throw_when_there_are_not_validation_errors__WhenAsyn_Is_Used(age: 17, cardNumber: "cardNumber")",
"result": "success",
"time": 0.6412,
},
@@ -5407,7 +5407,7 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "ShouldHaveValidationError_should_throw_when_there_are_not_validation_errors__WhenAsyn_Is_Used(age: 42, cardNumber: \\"cardNumber\\")",
+ "name": "ShouldHaveValidationError_should_throw_when_there_are_not_validation_errors__WhenAsyn_Is_Used(age: 42, cardNumber: "cardNumber")",
"result": "success",
"time": 0.8127,
},
@@ -5521,13 +5521,13 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "ShouldNotHaveValidationError_should_throw_when_there_are_not_validation_errors__WhenAsyn_is_used(age: 17, cardNumber: \\"\\")",
+ "name": "ShouldNotHaveValidationError_should_throw_when_there_are_not_validation_errors__WhenAsyn_is_used(age: 17, cardNumber: "")",
"result": "success",
"time": 0.5719,
},
TestCaseResult {
"error": undefined,
- "name": "ShouldNotHaveValidationError_should_throw_when_there_are_not_validation_errors__WhenAsyn_is_used(age: 17, cardNumber: \\"cardNumber\\")",
+ "name": "ShouldNotHaveValidationError_should_throw_when_there_are_not_validation_errors__WhenAsyn_is_used(age: 17, cardNumber: "cardNumber")",
"result": "success",
"time": 0.5838,
},
@@ -5539,13 +5539,13 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "ShouldNotHaveValidationError_should_throw_when_there_are_not_validation_errors__WhenAsyn_is_used(age: 42, cardNumber: \\"cardNumber\\")",
+ "name": "ShouldNotHaveValidationError_should_throw_when_there_are_not_validation_errors__WhenAsyn_is_used(age: 42, cardNumber: "cardNumber")",
"result": "success",
"time": 0.897,
},
TestCaseResult {
"error": undefined,
- "name": "ShouldNotHaveValidationError_should_throw_when_there_are_validation_errors__WhenAsyn_is_used(age: 42, cardNumber: \\"\\")",
+ "name": "ShouldNotHaveValidationError_should_throw_when_there_are_validation_errors__WhenAsyn_is_used(age: 42, cardNumber: "")",
"result": "success",
"time": 0.6807,
},
@@ -5599,25 +5599,25 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "Unexpected_message_check(withoutErrMsg: \\"bar\\", errMessages: [\\"bar\\"])",
+ "name": "Unexpected_message_check(withoutErrMsg: "bar", errMessages: ["bar"])",
"result": "success",
"time": 0.1548,
},
TestCaseResult {
"error": undefined,
- "name": "Unexpected_message_check(withoutErrMsg: \\"bar\\", errMessages: [\\"foo\\", \\"bar\\"])",
+ "name": "Unexpected_message_check(withoutErrMsg: "bar", errMessages: ["foo", "bar"])",
"result": "success",
"time": 0.1797,
},
TestCaseResult {
"error": undefined,
- "name": "Unexpected_message_check(withoutErrMsg: \\"bar\\", errMessages: [\\"foo\\"])",
+ "name": "Unexpected_message_check(withoutErrMsg: "bar", errMessages: ["foo"])",
"result": "success",
"time": 0.3297,
},
TestCaseResult {
"error": undefined,
- "name": "Unexpected_message_check(withoutErrMsg: \\"bar\\", errMessages: [])",
+ "name": "Unexpected_message_check(withoutErrMsg: "bar", errMessages: [])",
"result": "success",
"time": 0.7345999999999999,
},
@@ -5647,12 +5647,12 @@ TestRunResult {
exports[`dotnet-trx tests report from SilentNotes test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/SilentNotes.trx",
- "suites": Array [
+ "suites": [
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "AreEqualWorksWithDifferentPassword",
@@ -5696,10 +5696,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "FileLifecycleWorks",
@@ -5737,10 +5737,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "FileLifecycleWorks",
@@ -5790,10 +5790,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "ChoosesCorrectUrlForGmxComEmail",
@@ -5813,10 +5813,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "FileLifecycleWorks",
@@ -5848,10 +5848,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "FileLifecycleWorks",
@@ -5883,10 +5883,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "FileLifecycleWorks",
@@ -5924,10 +5924,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "AreEqualWorksWithNullDate",
@@ -5989,10 +5989,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "ParsesAllErrorCodesCorrectly",
@@ -6018,10 +6018,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "BuildAuthorizationRequestUrlEscapesParameters",
@@ -6083,10 +6083,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "BuildOAuth2AuthorizationRequestUrlWorks",
@@ -6124,10 +6124,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "AreEqualsWorksCorrectly",
@@ -6177,10 +6177,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "DecryptAfterDesrializationCanReadAllPropertiesBack",
diff --git a/__tests__/__snapshots__/java-junit.test.ts.snap b/__tests__/__snapshots__/java-junit.test.ts.snap
index 73c5f80..041f380 100644
--- a/__tests__/__snapshots__/java-junit.test.ts.snap
+++ b/__tests__/__snapshots__/java-junit.test.ts.snap
@@ -3,12 +3,12 @@
exports[`java-junit tests report from apache/pulsar single suite test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/java/TEST-org.apache.pulsar.AddMissingPatchVersionTest.xml",
- "suites": Array [
+ "suites": [
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testVersionStrings",
@@ -16,7 +16,7 @@ TestRunResult {
"time": 99,
},
TestCaseResult {
- "error": Object {
+ "error": {
"details": "
java.lang.AssertionError: expected [1.2.1] but found [1.2.0]
at org.testng.Assert.fail(Assert.java:99)
@@ -62,12 +62,12 @@ at java.lang.Thread.run(Thread.java:748)
exports[`java-junit tests report from apache/pulsar test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/java/pulsar-test-report.xml",
- "suites": Array [
+ "suites": [
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testVersionStrings",
@@ -75,7 +75,7 @@ TestRunResult {
"time": 99,
},
TestCaseResult {
- "error": Object {
+ "error": {
"details": "
java.lang.AssertionError: expected [1.2.1] but found [1.2.0]
at org.testng.Assert.fail(Assert.java:99)
@@ -114,10 +114,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 116,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testMainRunBookieNoConfig",
@@ -179,10 +179,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 591.0000000000001,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testGetWorkerService",
@@ -202,10 +202,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 96,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testOwnedNamespaces",
@@ -237,10 +237,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 9184.999999999998,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testOffloadPoliciesAppliedApi",
@@ -290,10 +290,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 18584.000000000004,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "httpEndpoint",
@@ -313,10 +313,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1389,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testAuthentication",
@@ -336,10 +336,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 185,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "simple",
@@ -353,10 +353,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 2384,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "crossColoLookup",
@@ -388,10 +388,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1930,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testCreateNamespaceWithDefaultBundles",
@@ -411,10 +411,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 33168,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testGetAllPartitions",
@@ -434,10 +434,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 32357,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testSplitMapWithRefreshedStatMap",
@@ -505,10 +505,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 75271.99999999999,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testUnloadNotLoadedNamespace",
@@ -528,10 +528,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 13704,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testOwnershipForCurrentServer",
@@ -545,10 +545,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 15709,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testGetOwnedServiceUnits",
@@ -604,10 +604,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 15749.999999999998,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testLoadProtocolHandler",
@@ -633,10 +633,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 7398.000000000001,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testWrapper",
@@ -650,10 +650,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 15,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testStart",
@@ -697,10 +697,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 946.0000000000001,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testMessagePublishBufferThrottleEnable",
@@ -726,10 +726,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 13830,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testResumptionAfterBacklogRelaxed",
@@ -869,10 +869,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 40154,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testReleaseOwnershipWithZookeeperDisconnectedBeforeOwnershipNodeDeleted",
@@ -928,10 +928,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 114278,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testJvmDirectMemoryUsedMetric",
@@ -951,10 +951,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 49,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testAckStatsOnPartitionedTopicForExclusiveSubscription",
@@ -980,10 +980,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 21163.999999999996,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testManagedCursorMetrics",
@@ -997,10 +997,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 281,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testManagedLedgerMetrics",
@@ -1014,10 +1014,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 285,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testPerTopicStats",
@@ -1115,10 +1115,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 83145.00000000001,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testConsumersAfterMarkDelete",
@@ -1138,10 +1138,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1806,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testSimpleAggregation",
@@ -1155,10 +1155,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 40,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testSendAndReceiveNamespaceEvents",
@@ -1172,10 +1172,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1043,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "noSortedTest",
@@ -1195,10 +1195,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 29646,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testCloseReleaseAllEntries",
@@ -1224,10 +1224,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 28,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testAbortOnTopic",
@@ -1259,10 +1259,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 93,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testOpenReaderOnNonExistentTxn",
@@ -1312,10 +1312,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 80.99999999999999,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testCloseShouldReleaseBuffer",
@@ -1329,10 +1329,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 14,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testTransactionBufferLowWaterMark",
@@ -1352,10 +1352,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 37577,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "commitTxnTest",
@@ -1381,10 +1381,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 49067.00000000001,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testClientStart",
@@ -1410,10 +1410,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 95,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testTransactionMetaStoreAssignAndFailover",
@@ -1427,10 +1427,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1151,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "txnAckTestNoBatchAndSharedSubMemoryDeleteTest",
@@ -1456,10 +1456,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 57293.00000000001,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testRestException",
@@ -1485,10 +1485,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 37.00000000000001,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testTlsAuthDisallowInsecure",
@@ -1550,10 +1550,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 27441.999999999996,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testAuthorizedUserAsOriginalPrincipal",
@@ -1585,10 +1585,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 8297,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testSerializationEmpty",
@@ -1620,10 +1620,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 30,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testBatchMessageIndexAckForExclusiveSubscription",
@@ -1655,10 +1655,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 13732,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testBatchMessageIndexAckForSharedSubscription",
@@ -1696,10 +1696,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 43856,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testDisconnectClientWithoutClosingConnection",
@@ -1797,10 +1797,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 148203,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testCompactedOutMessages",
@@ -1814,10 +1814,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1385,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testAckResponse",
@@ -1831,10 +1831,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 549,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testReadCompactNonPersistentExclusive",
@@ -1866,10 +1866,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 12164,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testConsumerDedup",
@@ -1919,10 +1919,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 3522.0000000000005,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testConsumerUnsubscribeReference",
@@ -1936,10 +1936,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 129,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testTlsClientAuthOverHTTPProtocol",
@@ -1965,10 +1965,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 23461,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testTlsClientAuthOverHTTPProtocol",
@@ -1994,10 +1994,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 7675.000000000001,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testValidate",
@@ -2011,10 +2011,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 183,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testChecksumCompatibilityInMixedVersionBrokerCluster",
@@ -2040,10 +2040,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 46690,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testPublishWithFailure",
@@ -2105,10 +2105,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 73214,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testWithoutBatches",
@@ -2128,10 +2128,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 5484.999999999999,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testReadMessageWithBatchingWithMessageInclusive",
@@ -2187,10 +2187,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 35143,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testNegativeAcks",
@@ -2390,10 +2390,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 10661.999999999998,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testStartEmptyPatternConsumer",
@@ -2467,10 +2467,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 62620,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testSharedAckedNormalTopic",
@@ -2508,10 +2508,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 33504,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testMultiHostUrlRetrySuccess",
@@ -2537,10 +2537,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 15314,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testSerializationAndDeserialization",
@@ -2554,10 +2554,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 10,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "createTopicDeleteTopicCreateTopic",
@@ -2571,10 +2571,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1937,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testCheckSequenceId",
@@ -2612,10 +2612,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 17976,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testCreateConsumerOnNotExistsTopic",
@@ -2635,10 +2635,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 4053,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testSingleTopicConsumerNoBatchFullName",
@@ -2676,10 +2676,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 13659,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testTopicAutoUpdatePartitions",
@@ -2789,10 +2789,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 133445.00000000003,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testCheckUnAcknowledgedMessageTimer",
@@ -2842,10 +2842,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 43828,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "zeroQueueSizeSharedSubscription",
@@ -2937,10 +2937,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 15712,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testGetProperties",
@@ -2954,10 +2954,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 316,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testChecksumSendCommand",
@@ -2971,10 +2971,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 30,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testCompressDecompress",
@@ -3018,10 +3018,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 223.00000000000003,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testCompressDecompress",
@@ -3299,10 +3299,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 737.0000000000005,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testCrc32cHardware",
@@ -3346,10 +3346,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 5403.000000000001,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testLoadReportSerialization",
@@ -3381,10 +3381,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1530,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testInvalidMetadata",
@@ -3404,10 +3404,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 161,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testIncludes",
@@ -3451,10 +3451,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 64,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testConstructor",
@@ -3492,10 +3492,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 98.99999999999999,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "namespace",
@@ -3515,10 +3515,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 207,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testOptionalSettingPresent",
@@ -3550,10 +3550,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 48,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testShortTopicName",
@@ -3585,10 +3585,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 529,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testEmptyServiceUriString",
@@ -3722,10 +3722,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 237.00000000000006,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testAutoFailoverPolicyData",
@@ -3739,10 +3739,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 15,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testAutoFailoverPolicyType",
@@ -3756,10 +3756,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 19,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testInvalidTopicType",
@@ -3803,10 +3803,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 64,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testBacklogQuotaIdentity",
@@ -3820,10 +3820,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 12,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "simple",
@@ -3837,10 +3837,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 9,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testConsumerStats",
@@ -3854,10 +3854,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 8,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testDecodeFailed",
@@ -3877,10 +3877,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 948,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testLocalPolices",
@@ -3894,10 +3894,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 48,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testNamespaceIsolationData",
@@ -3911,10 +3911,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 76,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testSerialization",
@@ -3928,10 +3928,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 45,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testGcsConfiguration",
@@ -3975,10 +3975,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 216,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testPartitionedTopicStats",
@@ -3992,10 +3992,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 12,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testPersistencePolicies",
@@ -4009,10 +4009,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 19,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testPersistentOfflineTopicStats",
@@ -4026,10 +4026,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 29,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testPersistentTopicStatsAggregation",
@@ -4049,10 +4049,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 51,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "propertyAdmin",
@@ -4084,10 +4084,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1386,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testPublisherStats",
@@ -4107,10 +4107,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 37,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testReplicatorStatsAdd",
@@ -4130,10 +4130,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 30,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testResourceQuotaDefault",
@@ -4153,10 +4153,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 45,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testRetentionPolices",
@@ -4170,10 +4170,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 8,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testAutoFailoverPolicyFactory",
@@ -4187,10 +4187,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 22,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testMinAvailablePolicty",
@@ -4204,10 +4204,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testBrokerAssignment",
@@ -4257,10 +4257,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 265,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testFindBrokers",
@@ -4310,10 +4310,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 309.00000000000006,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testEncoder",
@@ -4333,10 +4333,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 4999.000000000001,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testSkipBrokerEntryMetadata",
@@ -4386,10 +4386,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 2812.0000000000005,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testSnapshot",
@@ -4433,10 +4433,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 3232.9999999999995,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testChannelRead",
@@ -4450,10 +4450,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 3690,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testInvokeJVMInternals",
@@ -4467,10 +4467,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 82,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testMap",
@@ -4484,10 +4484,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 242,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testFileNotModified",
@@ -4531,10 +4531,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 6161.000000000001,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testMultipleTryAcquire",
@@ -4608,10 +4608,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 7199.999999999998,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testCreateInstanceNoNoArgConstructor",
@@ -4691,10 +4691,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 172,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testParseRelativeTime",
@@ -4708,10 +4708,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 39,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testResetWords",
@@ -4731,10 +4731,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 13.000000000000002,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testRecycle",
@@ -4754,10 +4754,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 63,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testRehashingWithDeletes",
@@ -4843,10 +4843,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 28351.000000000004,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "concurrentInsertionsAndReads",
@@ -4944,10 +4944,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1519.0000000000002,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testRemove",
@@ -5027,10 +5027,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 9241,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "concurrentInsertions",
@@ -5104,10 +5104,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 7114.999999999999,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testAddForDifferentKey",
@@ -5193,10 +5193,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1355.9999999999998,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "concurrentInsertions",
@@ -5258,10 +5258,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 342,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testUpdateObject",
@@ -5281,10 +5281,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 64,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "removeTest",
@@ -5328,10 +5328,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 350,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testItems",
@@ -5429,10 +5429,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 3092.9999999999995,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testQueue",
@@ -5458,10 +5458,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 238,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "toCompletableFuture_shouldCompleteExceptionally_channelFutureCompletedAfter",
@@ -5499,10 +5499,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1549.0000000000002,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testRedirectUrlWithServerStarted",
@@ -5516,10 +5516,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 4904,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testAuthorizationWithAnonymousUser",
@@ -5539,10 +5539,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 27527,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testPulsarFunctionState",
@@ -5568,10 +5568,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 42238,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testFunctionsCreation",
@@ -5585,10 +5585,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 12012,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testAuthorization",
@@ -5602,10 +5602,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 30213,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "replayableProxyContentProviderTest",
@@ -5619,10 +5619,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 474,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testAuthenticatedProxyAsNonAdmin",
@@ -5636,10 +5636,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 2159,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testFunctionWorkerRedirect",
@@ -5653,10 +5653,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 10,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "test",
@@ -5670,10 +5670,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 125,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testTlsSyncProducerAndConsumer",
@@ -5687,10 +5687,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 2279,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testAuthentication",
@@ -5704,10 +5704,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 16696,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testInboundConnection",
@@ -5721,10 +5721,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1792,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testSimpleProduceAndConsume",
@@ -5738,10 +5738,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 511,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testForwardAuthData",
@@ -5755,10 +5755,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 31924,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testProxyToEndsInSlash",
@@ -5826,10 +5826,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1660,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testProducerFailed",
@@ -5855,10 +5855,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 6701.000000000001,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testPartitions",
@@ -5884,10 +5884,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 7220.000000000001,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testLookup",
@@ -5901,10 +5901,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 2568,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testRegexSubscription",
@@ -5942,10 +5942,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1345.9999999999998,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testIncorrectRoles",
@@ -5959,10 +5959,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 10403,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testChangeLogLevel",
@@ -5988,10 +5988,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 533,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testPartitions",
@@ -6035,10 +6035,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 2946.9999999999995,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testProducer",
@@ -6058,10 +6058,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 414.00000000000006,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testServiceStartup",
@@ -6075,10 +6075,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 4,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testProxyAuthorization",
@@ -6092,10 +6092,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 2128,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "tlsCiphersAndProtocols",
@@ -6181,10 +6181,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 32897,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testDiscoveryService",
@@ -6198,10 +6198,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 2045,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testAuthWithRandoCert",
@@ -6227,10 +6227,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 8235,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testUnauthenticatedProxy",
@@ -6250,10 +6250,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 113.99999999999999,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "test",
@@ -6267,10 +6267,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 29041,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testIsUsingAvroSchemaParser",
@@ -6296,10 +6296,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 30859,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testConsumerCompatibilityCheckCanReadLastTest",
@@ -6445,10 +6445,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 107437.00000000001,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testServiceException",
@@ -6468,10 +6468,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 40751.99999999999,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "shouldFailIfEnumParameterIsMissing",
@@ -6515,10 +6515,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 23,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "shouldNotDoAnythingWhenThereIsBeforeAndAfterMethod",
@@ -6556,10 +6556,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 36.00000000000001,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "shouldCallSetupBeforeRetrying",
@@ -6597,10 +6597,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 27,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "shouldCallSetupOnce1",
@@ -6626,10 +6626,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testHelp",
@@ -6649,10 +6649,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 17,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testBooleanFormat",
@@ -6684,10 +6684,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 49.99999999999999,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testLists",
@@ -6731,10 +6731,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 59,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "unauthenticatedSocketTest",
@@ -6766,10 +6766,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 28904.000000000004,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "test",
@@ -6783,10 +6783,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 1263,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "configTest",
@@ -6806,10 +6806,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 8943,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "socketTest",
@@ -6823,10 +6823,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 10821,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "socketTest",
@@ -6840,10 +6840,10 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 7280,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "anonymousSocketTest",
diff --git a/__tests__/__snapshots__/jest-junit.test.ts.snap b/__tests__/__snapshots__/jest-junit.test.ts.snap
index 3a04722..f29b5c8 100644
--- a/__tests__/__snapshots__/jest-junit.test.ts.snap
+++ b/__tests__/__snapshots__/jest-junit.test.ts.snap
@@ -1,14 +1,40 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
+exports[`jest-junit tests report from #235 testing react components named 1`] = `
+TestRunResult {
+ "path": "fixtures/external/jest/jest-react-component-test-results.xml",
+ "suites": [
+ TestSuiteResult {
+ "groups": [
+ TestGroupResult {
+ "name": "",
+ "tests": [
+ TestCaseResult {
+ "error": undefined,
+ "name": " should render properly",
+ "result": "success",
+ "time": 704,
+ },
+ ],
+ },
+ ],
+ "name": "\\",
+ "totalTime": 798,
+ },
+ ],
+ "totalTime": 1000,
+}
+`;
+
exports[`jest-junit tests report from ./reports/jest test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/jest-junit.xml",
- "suites": Array [
+ "suites": [
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Test 1",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Passing test",
@@ -19,18 +45,18 @@ TestRunResult {
},
TestGroupResult {
"name": "Test 1 › Test 1.1",
- "tests": Array [
+ "tests": [
TestCaseResult {
- "error": Object {
+ "error": {
"details": "Error: expect(received).toBeTruthy()
Received: false
- at Object. (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\__tests__\\\\main.test.js:10:21)
- at Object.asyncJestTest (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\jasmineAsyncInstall.js:106:37)
- at C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\queueRunner.js:45:12
+ at Object. (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\__tests__\\main.test.js:10:21)
+ at Object.asyncJestTest (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\jasmineAsyncInstall.js:106:37)
+ at C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\queueRunner.js:45:12
at new Promise ()
- at mapper (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\queueRunner.js:28:19)
- at C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\queueRunner.js:75:41
+ at mapper (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\queueRunner.js:28:19)
+ at C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\queueRunner.js:75:41
at processTicksAndRejections (internal/process/task_queues.js:97:5)",
"line": 10,
"path": "__tests__/main.test.js",
@@ -40,15 +66,15 @@ Received: false
"time": 2,
},
TestCaseResult {
- "error": Object {
+ "error": {
"details": "Error: Some error
- at Object.throwError (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\lib\\\\main.js:2:9)
- at Object. (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\__tests__\\\\main.test.js:14:11)
- at Object.asyncJestTest (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\jasmineAsyncInstall.js:106:37)
- at C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\queueRunner.js:45:12
+ at Object.throwError (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\lib\\main.js:2:9)
+ at Object. (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\__tests__\\main.test.js:14:11)
+ at Object.asyncJestTest (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\jasmineAsyncInstall.js:106:37)
+ at C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\queueRunner.js:45:12
at new Promise ()
- at mapper (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\queueRunner.js:28:19)
- at C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\queueRunner.js:75:41
+ at mapper (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\queueRunner.js:28:19)
+ at C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\queueRunner.js:75:41
at processTicksAndRejections (internal/process/task_queues.js:97:5)",
"line": 2,
"path": "lib/main.js",
@@ -61,16 +87,16 @@ Received: false
},
TestGroupResult {
"name": "Test 2",
- "tests": Array [
+ "tests": [
TestCaseResult {
- "error": Object {
+ "error": {
"details": "Error: Some error
- at Object. (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\__tests__\\\\main.test.js:21:11)
- at Object.asyncJestTest (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\jasmineAsyncInstall.js:106:37)
- at C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\queueRunner.js:45:12
+ at Object. (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\__tests__\\main.test.js:21:11)
+ at Object.asyncJestTest (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\jasmineAsyncInstall.js:106:37)
+ at C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\queueRunner.js:45:12
at new Promise ()
- at mapper (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\queueRunner.js:28:19)
- at C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\queueRunner.js:75:41
+ at mapper (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\queueRunner.js:28:19)
+ at C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\queueRunner.js:75:41
at processTicksAndRejections (internal/process/task_queues.js:97:5)",
"line": 21,
"path": "__tests__/main.test.js",
@@ -82,30 +108,30 @@ Received: false
],
},
],
- "name": "__tests__\\\\main.test.js",
+ "name": "__tests__\\main.test.js",
"totalTime": 486,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
- "error": Object {
+ "error": {
"details": ": Timeout - Async callback was not invoked within the 1 ms timeout specified by jest.setTimeout.Timeout - Async callback was not invoked within the 1 ms timeout specified by jest.setTimeout.Error:
- at new Spec (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\jasmine\\\\Spec.js:116:22)
- at new Spec (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\setup_jest_globals.js:78:9)
- at specFactory (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\jasmine\\\\Env.js:523:24)
- at Env.it (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\jasmine\\\\Env.js:592:24)
- at Env.it (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\jasmineAsyncInstall.js:134:23)
- at it (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\jasmine\\\\jasmineLight.js:100:21)
- at Object. (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\__tests__\\\\second.test.js:1:34)
- at Runtime._execModule (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-runtime\\\\build\\\\index.js:1245:24)
- at Runtime._loadModule (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-runtime\\\\build\\\\index.js:844:12)
- at Runtime.requireModule (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-runtime\\\\build\\\\index.js:694:10)
- at jasmine2 (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-jasmine2\\\\build\\\\index.js:230:13)
- at runTestInternal (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-runner\\\\build\\\\runTest.js:380:22)
- at runTest (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-check\\\\reports\\\\jest\\\\node_modules\\\\jest-runner\\\\build\\\\runTest.js:472:34)",
+ at new Spec (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\jasmine\\Spec.js:116:22)
+ at new Spec (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\setup_jest_globals.js:78:9)
+ at specFactory (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\jasmine\\Env.js:523:24)
+ at Env.it (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\jasmine\\Env.js:592:24)
+ at Env.it (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\jasmineAsyncInstall.js:134:23)
+ at it (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\jasmine\\jasmineLight.js:100:21)
+ at Object. (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\__tests__\\second.test.js:1:34)
+ at Runtime._execModule (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-runtime\\build\\index.js:1245:24)
+ at Runtime._loadModule (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-runtime\\build\\index.js:844:12)
+ at Runtime.requireModule (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-runtime\\build\\index.js:694:10)
+ at jasmine2 (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-jasmine2\\build\\index.js:230:13)
+ at runTestInternal (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-runner\\build\\runTest.js:380:22)
+ at runTest (C:\\Users\\Michal\\Workspace\\dorny\\test-check\\reports\\jest\\node_modules\\jest-runner\\build\\runTest.js:472:34)",
"line": 1,
"path": "__tests__/second.test.js",
},
@@ -122,7 +148,7 @@ Received: false
],
},
],
- "name": "__tests__\\\\second.test.js",
+ "name": "__tests__\\second.test.js",
"totalTime": 82,
},
],
@@ -133,12 +159,12 @@ Received: false
exports[`jest-junit tests report from facebook/jest test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/jest/jest-test-results.xml",
- "suites": Array [
+ "suites": [
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "picks a name based on the rootDir",
@@ -173,7 +199,7 @@ TestRunResult {
},
TestGroupResult {
"name": "rootDir",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws if the options is missing a rootDir property",
@@ -184,7 +210,7 @@ TestRunResult {
},
TestGroupResult {
"name": "automock",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "falsy automock is not overwritten",
@@ -195,7 +221,7 @@ TestRunResult {
},
TestGroupResult {
"name": "collectCoverageOnlyFrom",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "normalizes all paths relative to rootDir",
@@ -218,7 +244,7 @@ TestRunResult {
},
TestGroupResult {
"name": "collectCoverageFrom",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "substitutes tokens",
@@ -229,7 +255,7 @@ TestRunResult {
},
TestGroupResult {
"name": "findRelatedTests",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "it generates --coverageCoverageFrom patterns when needed",
@@ -240,7 +266,7 @@ TestRunResult {
},
TestGroupResult {
"name": "roots",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "normalizes all paths relative to rootDir",
@@ -263,7 +289,7 @@ TestRunResult {
},
TestGroupResult {
"name": "transform",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "normalizes the path",
@@ -280,7 +306,7 @@ TestRunResult {
},
TestGroupResult {
"name": "haste",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "normalizes the path for hasteImplModulePath",
@@ -291,7 +317,7 @@ TestRunResult {
},
TestGroupResult {
"name": "setupFilesAfterEnv",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "normalizes the path according to rootDir",
@@ -314,7 +340,7 @@ TestRunResult {
},
TestGroupResult {
"name": "setupTestFrameworkScriptFile",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "logs a deprecation warning when \`setupTestFrameworkScriptFile\` is used",
@@ -331,7 +357,7 @@ TestRunResult {
},
TestGroupResult {
"name": "coveragePathIgnorePatterns",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not normalize paths relative to rootDir",
@@ -354,7 +380,7 @@ TestRunResult {
},
TestGroupResult {
"name": "watchPathIgnorePatterns",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not normalize paths relative to rootDir",
@@ -377,7 +403,7 @@ TestRunResult {
},
TestGroupResult {
"name": "testPathIgnorePatterns",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not normalize paths relative to rootDir",
@@ -400,7 +426,7 @@ TestRunResult {
},
TestGroupResult {
"name": "modulePathIgnorePatterns",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not normalize paths relative to rootDir",
@@ -423,7 +449,7 @@ TestRunResult {
},
TestGroupResult {
"name": "testRunner",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "defaults to Circus",
@@ -446,7 +472,7 @@ TestRunResult {
},
TestGroupResult {
"name": "coverageDirectory",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "defaults to /coverage",
@@ -457,7 +483,7 @@ TestRunResult {
},
TestGroupResult {
"name": "testEnvironment",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "resolves to an environment and prefers jest-environment-\`name\`",
@@ -480,7 +506,7 @@ TestRunResult {
},
TestGroupResult {
"name": "babel-jest",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "correctly identifies and uses babel-jest",
@@ -497,7 +523,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Upgrade help",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "logs a warning when \`scriptPreprocessor\` and/or \`preprocessorIgnorePatterns\` are used",
@@ -508,7 +534,7 @@ TestRunResult {
},
TestGroupResult {
"name": "testRegex",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testRegex empty string is mapped to empty array",
@@ -531,7 +557,7 @@ TestRunResult {
},
TestGroupResult {
"name": "testMatch",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testMatch default not applied if testRegex is set",
@@ -560,7 +586,7 @@ TestRunResult {
},
TestGroupResult {
"name": "moduleDirectories",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "defaults to node_modules",
@@ -577,7 +603,7 @@ TestRunResult {
},
TestGroupResult {
"name": "preset",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws when preset not found",
@@ -586,7 +612,7 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "throws when module was found but no \\"jest-preset.js\\" or \\"jest-preset.json\\" files",
+ "name": "throws when module was found but no "jest-preset.js" or "jest-preset.json" files",
"result": "success",
"time": 1,
},
@@ -610,7 +636,7 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "works with \\"react-native\\"",
+ "name": "works with "react-native"",
"result": "success",
"time": 3,
},
@@ -648,7 +674,7 @@ TestRunResult {
},
TestGroupResult {
"name": "preset with globals",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should merge the globals preset correctly",
@@ -659,7 +685,7 @@ TestRunResult {
},
TestGroupResult {
"name": "preset without setupFiles",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should normalize setupFiles correctly",
@@ -670,7 +696,7 @@ TestRunResult {
},
TestGroupResult {
"name": "preset without setupFilesAfterEnv",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should normalize setupFilesAfterEnv correctly",
@@ -681,7 +707,7 @@ TestRunResult {
},
TestGroupResult {
"name": "runner",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "defaults to \`jest-runner\`",
@@ -710,7 +736,7 @@ TestRunResult {
},
TestGroupResult {
"name": "watchPlugins",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "defaults to undefined",
@@ -745,7 +771,7 @@ TestRunResult {
},
TestGroupResult {
"name": "testPathPattern",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "defaults to empty",
@@ -768,7 +794,7 @@ TestRunResult {
},
TestGroupResult {
"name": "testPathPattern --testPathPattern",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "uses --testPathPattern if set",
@@ -791,7 +817,7 @@ TestRunResult {
},
TestGroupResult {
"name": "testPathPattern --testPathPattern posix",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not escape the pattern",
@@ -802,10 +828,10 @@ TestRunResult {
},
TestGroupResult {
"name": "testPathPattern --testPathPattern win32",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "preserves any use of \\"\\\\\\"",
+ "name": "preserves any use of "\\"",
"result": "success",
"time": 1,
},
@@ -831,7 +857,7 @@ TestRunResult {
},
TestGroupResult {
"name": "testPathPattern ",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "uses if set",
@@ -854,7 +880,7 @@ TestRunResult {
},
TestGroupResult {
"name": "testPathPattern posix",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not escape the pattern",
@@ -865,10 +891,10 @@ TestRunResult {
},
TestGroupResult {
"name": "testPathPattern win32",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "preserves any use of \\"\\\\\\"",
+ "name": "preserves any use of "\\"",
"result": "success",
"time": 15,
},
@@ -894,7 +920,7 @@ TestRunResult {
},
TestGroupResult {
"name": "moduleFileExtensions",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "defaults to something useful",
@@ -917,7 +943,7 @@ TestRunResult {
},
TestGroupResult {
"name": "cwd",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "is set to process.cwd",
@@ -934,7 +960,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Defaults",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be accepted by normalize",
@@ -945,7 +971,7 @@ TestRunResult {
},
TestGroupResult {
"name": "displayName",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw an error when displayName is is an empty object",
@@ -1004,7 +1030,7 @@ TestRunResult {
},
TestGroupResult {
"name": "testTimeout",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return timeout value if defined",
@@ -1021,7 +1047,7 @@ TestRunResult {
},
TestGroupResult {
"name": "extensionsToTreatAsEsm",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should pass valid config through",
@@ -1059,10 +1085,10 @@ TestRunResult {
"totalTime": 798,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Repl cli",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs without errors",
@@ -1076,10 +1102,10 @@ TestRunResult {
"totalTime": 1172,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "chalk",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "level 0",
@@ -1108,7 +1134,7 @@ TestRunResult {
},
TestGroupResult {
"name": "matcher error toMatchInlineSnapshot",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Expected properties must be an object (non-null) without snapshot",
@@ -1137,7 +1163,7 @@ TestRunResult {
},
TestGroupResult {
"name": "matcher error toMatchSnapshot",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Expected properties must be an object (non-null)",
@@ -1166,7 +1192,7 @@ TestRunResult {
},
TestGroupResult {
"name": "matcher error toMatchSnapshot received value must be an object",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(non-null)",
@@ -1183,7 +1209,7 @@ TestRunResult {
},
TestGroupResult {
"name": "matcher error toThrowErrorMatchingInlineSnapshot",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Inline snapshot must be a string",
@@ -1200,7 +1226,7 @@ TestRunResult {
},
TestGroupResult {
"name": "matcher error toThrowErrorMatchingSnapshot",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Received value must be a function",
@@ -1217,7 +1243,7 @@ TestRunResult {
},
TestGroupResult {
"name": "other error toThrowErrorMatchingSnapshot",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Received function did not throw",
@@ -1228,7 +1254,7 @@ TestRunResult {
},
TestGroupResult {
"name": "pass false toMatchInlineSnapshot with properties equals false",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "with snapshot",
@@ -1245,7 +1271,7 @@ TestRunResult {
},
TestGroupResult {
"name": "pass false toMatchInlineSnapshot with properties",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "equals true",
@@ -1256,7 +1282,7 @@ TestRunResult {
},
TestGroupResult {
"name": "pass false toMatchSnapshot",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "New snapshot was not written (multi line)",
@@ -1273,7 +1299,7 @@ TestRunResult {
},
TestGroupResult {
"name": "pass false toMatchSnapshot with properties equals false",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isLineDiffable false",
@@ -1290,7 +1316,7 @@ TestRunResult {
},
TestGroupResult {
"name": "pass false toMatchSnapshot with properties",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "equals true",
@@ -1301,7 +1327,7 @@ TestRunResult {
},
TestGroupResult {
"name": "pass false toThrowErrorMatchingInlineSnapshot",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "with snapshot",
@@ -1312,7 +1338,7 @@ TestRunResult {
},
TestGroupResult {
"name": "pass true toMatchSnapshot",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "without properties",
@@ -1323,7 +1349,7 @@ TestRunResult {
},
TestGroupResult {
"name": "printPropertiesAndReceived",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "omit missing properties",
@@ -1334,7 +1360,7 @@ TestRunResult {
},
TestGroupResult {
"name": "printSnapshotAndReceived backtick",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "single line expected and received",
@@ -1345,7 +1371,7 @@ TestRunResult {
},
TestGroupResult {
"name": "printSnapshotAndReceived empty string",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "expected and received single line",
@@ -1362,7 +1388,7 @@ TestRunResult {
},
TestGroupResult {
"name": "printSnapshotAndReceived escape",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "double quote marks in string",
@@ -1391,7 +1417,7 @@ TestRunResult {
},
TestGroupResult {
"name": "printSnapshotAndReceived expand",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "false",
@@ -1408,7 +1434,7 @@ TestRunResult {
},
TestGroupResult {
"name": "printSnapshotAndReceived",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "fallback to line diff",
@@ -1437,7 +1463,7 @@ TestRunResult {
},
TestGroupResult {
"name": "printSnapshotAndReceived has no common after clean up chaff",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "array",
@@ -1454,7 +1480,7 @@ TestRunResult {
},
TestGroupResult {
"name": "printSnapshotAndReceived MAX_DIFF_STRING_LENGTH unquoted",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "both are less",
@@ -1477,7 +1503,7 @@ TestRunResult {
},
TestGroupResult {
"name": "printSnapshotAndReceived MAX_DIFF_STRING_LENGTH quoted",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "both are less",
@@ -1500,7 +1526,7 @@ TestRunResult {
},
TestGroupResult {
"name": "printSnapshotAndReceived isLineDiffable false",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "asymmetric matcher",
@@ -1541,7 +1567,7 @@ TestRunResult {
},
TestGroupResult {
"name": "printSnapshotAndReceived isLineDiffable true",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "array",
@@ -1570,7 +1596,7 @@ TestRunResult {
},
TestGroupResult {
"name": "printSnapshotAndReceived ignore indentation",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "markup delete",
@@ -1593,7 +1619,7 @@ TestRunResult {
},
TestGroupResult {
"name": "printSnapshotAndReceived ignore indentation object",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "delete",
@@ -1610,7 +1636,7 @@ TestRunResult {
},
TestGroupResult {
"name": "printSnapshotAndReceived without serialize",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "backtick single line expected and received",
@@ -1648,10 +1674,10 @@ TestRunResult {
"totalTime": 1188,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "get configuration defaults",
@@ -1665,10 +1691,10 @@ TestRunResult {
"totalTime": 672,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "isCoreModule",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns false if \`hasCoreModules\` is false.",
@@ -1697,7 +1723,7 @@ TestRunResult {
},
TestGroupResult {
"name": "findNodeModule",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "is possible to override the default resolver",
@@ -1714,7 +1740,7 @@ TestRunResult {
},
TestGroupResult {
"name": "resolveModule",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "is possible to resolve node modules",
@@ -1755,7 +1781,7 @@ TestRunResult {
},
TestGroupResult {
"name": "getMockModule",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "is possible to use custom resolver to resolve deps inside mock modules with moduleNameMapper",
@@ -1766,7 +1792,7 @@ TestRunResult {
},
TestGroupResult {
"name": "nodeModulesPaths",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "provides custom module paths after node_modules",
@@ -1777,16 +1803,16 @@ TestRunResult {
},
TestGroupResult {
"name": "Resolver.getModulePaths() -> nodeModulesPaths()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "can resolve node modules relative to absolute paths in \\"moduleDirectories\\" on Windows platforms",
+ "name": "can resolve node modules relative to absolute paths in "moduleDirectories" on Windows platforms",
"result": "success",
"time": 21,
},
TestCaseResult {
"error": undefined,
- "name": "can resolve node modules relative to absolute paths in \\"moduleDirectories\\" on Posix platforms",
+ "name": "can resolve node modules relative to absolute paths in "moduleDirectories" on Posix platforms",
"result": "success",
"time": 8,
},
@@ -1797,10 +1823,10 @@ TestRunResult {
"totalTime": 1308,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "toEqual",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be reflexive",
@@ -1820,10 +1846,10 @@ TestRunResult {
"totalTime": 1062,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "snapshots needs update with npm test",
@@ -1855,10 +1881,10 @@ TestRunResult {
"totalTime": 366,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "readConfigs() throws when called without project paths",
@@ -1884,10 +1910,10 @@ TestRunResult {
"totalTime": 135,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "generateEmptyCoverage",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "generates an empty coverage object for a file without running it",
@@ -1913,10 +1939,10 @@ TestRunResult {
"totalTime": 1129,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Any.asymmetricMatch()",
@@ -2152,10 +2178,10 @@ TestRunResult {
"totalTime": 207,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "onRunComplete",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "getLastError() returns an error when threshold is not met for global",
@@ -2240,10 +2266,10 @@ TestRunResult {
"totalTime": 397,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "saveInlineSnapshots() replaces empty function call with a template literal",
@@ -2383,10 +2409,10 @@ TestRunResult {
"totalTime": 1149,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "readConfig() throws when an object is passed without a file path",
@@ -2400,10 +2426,10 @@ TestRunResult {
"totalTime": 76,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "When offset is -1",
@@ -2441,10 +2467,10 @@ TestRunResult {
"totalTime": 57,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "toThrowError",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "to throw or not to throw",
@@ -2467,7 +2493,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrowError substring",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "passes",
@@ -2514,7 +2540,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrowError regexp",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "passes",
@@ -2555,7 +2581,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrowError error class",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "passes",
@@ -2602,7 +2628,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrowError error-message pass",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -2619,7 +2645,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrowError error-message fail",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -2642,7 +2668,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrowError asymmetric any-Class pass",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -2659,7 +2685,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrowError asymmetric any-Class fail",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -2676,7 +2702,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrowError asymmetric anything pass",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -2693,7 +2719,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrowError asymmetric anything fail",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -2710,7 +2736,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrowError asymmetric no-symbol pass",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -2727,7 +2753,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrowError asymmetric no-symbol fail",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -2744,7 +2770,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrowError asymmetric objectContaining pass",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -2761,7 +2787,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrowError asymmetric objectContaining fail",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -2778,7 +2804,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrowError promise/async throws if Error-like object is returned",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "passes",
@@ -2807,7 +2833,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrowError expected is undefined",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "threw, but should not have (non-error falsey)",
@@ -2818,7 +2844,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrow",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "to throw or not to throw",
@@ -2841,7 +2867,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrow substring",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "passes",
@@ -2888,7 +2914,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrow regexp",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "passes",
@@ -2929,7 +2955,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrow error class",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "passes",
@@ -2976,7 +3002,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrow error-message pass",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -2993,7 +3019,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrow error-message fail",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -3016,7 +3042,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrow asymmetric any-Class pass",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -3033,7 +3059,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrow asymmetric any-Class fail",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -3050,7 +3076,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrow asymmetric anything pass",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -3067,7 +3093,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrow asymmetric anything fail",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -3084,7 +3110,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrow asymmetric no-symbol pass",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -3101,7 +3127,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrow asymmetric no-symbol fail",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -3118,7 +3144,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrow asymmetric objectContaining pass",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -3135,7 +3161,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrow asymmetric objectContaining fail",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -3152,7 +3178,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrow promise/async throws if Error-like object is returned",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "passes",
@@ -3181,7 +3207,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toThrow expected is undefined",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "threw, but should not have (non-error falsey)",
@@ -3195,10 +3221,10 @@ TestRunResult {
"totalTime": 257,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "recursively validates default Jest config",
@@ -3333,7 +3359,7 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "Comments in config JSON using \\"//\\" key are not warned",
+ "name": "Comments in config JSON using "//" key are not warned",
"result": "success",
"time": 0,
},
@@ -3344,10 +3370,10 @@ TestRunResult {
"totalTime": 283,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "defaults",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns cached object if called multiple times",
@@ -3370,7 +3396,7 @@ TestRunResult {
},
TestGroupResult {
"name": "custom resolver in project config",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns cached object if called multiple times",
@@ -3393,7 +3419,7 @@ TestRunResult {
},
TestGroupResult {
"name": "malformed custom resolver in project config",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "missing resolveSnapshotPath throws",
@@ -3425,10 +3451,10 @@ TestRunResult {
"totalTime": 98,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "resolves to the result of generateEmptyCoverage upon success",
@@ -3448,10 +3474,10 @@ TestRunResult {
"totalTime": 199,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Returns source string with inline maps when no transformOptions is passed",
@@ -3468,28 +3494,28 @@ TestRunResult {
},
TestGroupResult {
"name": "caller option correctly merges from defaults and options",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "{\\"supportsDynamicImport\\":true,\\"supportsStaticESM\\":true} -> {\\"supportsDynamicImport\\":true,\\"supportsStaticESM\\":true}",
+ "name": "{"supportsDynamicImport":true,"supportsStaticESM":true} -> {"supportsDynamicImport":true,"supportsStaticESM":true}",
"result": "success",
"time": 6,
},
TestCaseResult {
"error": undefined,
- "name": "{\\"supportsDynamicImport\\":false,\\"supportsStaticESM\\":false} -> {\\"supportsDynamicImport\\":false,\\"supportsStaticESM\\":false}",
+ "name": "{"supportsDynamicImport":false,"supportsStaticESM":false} -> {"supportsDynamicImport":false,"supportsStaticESM":false}",
"result": "success",
"time": 11,
},
TestCaseResult {
"error": undefined,
- "name": "{\\"supportsStaticESM\\":false} -> {\\"supportsDynamicImport\\":false,\\"supportsStaticESM\\":false}",
+ "name": "{"supportsStaticESM":false} -> {"supportsDynamicImport":false,"supportsStaticESM":false}",
"result": "success",
"time": 13,
},
TestCaseResult {
"error": undefined,
- "name": "{\\"supportsDynamicImport\\":true} -> {\\"supportsDynamicImport\\":true,\\"supportsStaticESM\\":false}",
+ "name": "{"supportsDynamicImport":true} -> {"supportsDynamicImport":true,"supportsStaticESM":false}",
"result": "success",
"time": 11,
},
@@ -3500,19 +3526,19 @@ TestRunResult {
"totalTime": 371,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Resolve config path .js",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "file path with \\".js\\"",
+ "name": "file path with ".js"",
"result": "success",
"time": 9,
},
TestCaseResult {
"error": undefined,
- "name": "directory path with \\".js\\"",
+ "name": "directory path with ".js"",
"result": "success",
"time": 11,
},
@@ -3520,16 +3546,16 @@ TestRunResult {
},
TestGroupResult {
"name": "Resolve config path .ts",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "file path with \\".ts\\"",
+ "name": "file path with ".ts"",
"result": "success",
"time": 2,
},
TestCaseResult {
"error": undefined,
- "name": "directory path with \\".ts\\"",
+ "name": "directory path with ".ts"",
"result": "success",
"time": 3,
},
@@ -3537,16 +3563,16 @@ TestRunResult {
},
TestGroupResult {
"name": "Resolve config path .mjs",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "file path with \\".mjs\\"",
+ "name": "file path with ".mjs"",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "directory path with \\".mjs\\"",
+ "name": "directory path with ".mjs"",
"result": "success",
"time": 7,
},
@@ -3554,16 +3580,16 @@ TestRunResult {
},
TestGroupResult {
"name": "Resolve config path .cjs",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "file path with \\".cjs\\"",
+ "name": "file path with ".cjs"",
"result": "success",
"time": 2,
},
TestCaseResult {
"error": undefined,
- "name": "directory path with \\".cjs\\"",
+ "name": "directory path with ".cjs"",
"result": "success",
"time": 2,
},
@@ -3571,16 +3597,16 @@ TestRunResult {
},
TestGroupResult {
"name": "Resolve config path .json",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "file path with \\".json\\"",
+ "name": "file path with ".json"",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "directory path with \\".json\\"",
+ "name": "directory path with ".json"",
"result": "success",
"time": 3,
},
@@ -3591,10 +3617,10 @@ TestRunResult {
"totalTime": 183,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "groupTestsBySuites",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should handle empty results",
@@ -3668,10 +3694,10 @@ TestRunResult {
"totalTime": 425,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "keyToTestName()",
@@ -3740,7 +3766,7 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "serialize handles \\\\r\\\\n",
+ "name": "serialize handles \\r\\n",
"result": "success",
"time": 1,
},
@@ -3748,7 +3774,7 @@ TestRunResult {
},
TestGroupResult {
"name": "ExtraLineBreaks",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "0 empty string",
@@ -3795,7 +3821,7 @@ TestRunResult {
},
TestGroupResult {
"name": "removeLinesBeforeExternalMatcherTrap",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "contains external matcher trap",
@@ -3812,7 +3838,7 @@ TestRunResult {
},
TestGroupResult {
"name": "DeepMerge with property matchers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Correctly merges a nested object",
@@ -3850,10 +3876,10 @@ TestRunResult {
"totalTime": 214,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "test always",
@@ -3948,7 +3974,7 @@ TestRunResult {
},
TestGroupResult {
"name": "node-notifier is an optional dependency",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "without node-notifier uses mock function that throws an error",
@@ -3974,10 +4000,10 @@ TestRunResult {
"totalTime": 166,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime CLI",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "fails with no path",
@@ -4009,10 +4035,10 @@ TestRunResult {
"totalTime": 4094.0000000000005,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "is available globally when matcher is unary",
@@ -4080,10 +4106,10 @@ TestRunResult {
"totalTime": 99,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "getCallsite",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "without source map",
@@ -4109,10 +4135,10 @@ TestRunResult {
"totalTime": 86,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "maps special values to valid options",
@@ -4144,10 +4170,10 @@ TestRunResult {
"totalTime": 53,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls handler on change value",
@@ -4173,10 +4199,10 @@ TestRunResult {
"totalTime": 91,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "docblock",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "extracts valid docblock with line comment",
@@ -4400,10 +4426,10 @@ TestRunResult {
"totalTime": 177,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "dedentLines non-null",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "no lines",
@@ -4456,7 +4482,7 @@ TestRunResult {
},
TestGroupResult {
"name": "dedentLines null",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "object key multi-line",
@@ -4518,10 +4544,10 @@ TestRunResult {
"totalTime": 94,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "getMaxWorkers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Returns 1 when runInBand",
@@ -4550,7 +4576,7 @@ TestRunResult {
},
TestGroupResult {
"name": "getMaxWorkers % based",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "50% = 2 workers",
@@ -4576,10 +4602,10 @@ TestRunResult {
"totalTime": 67,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "wrapAnsiString()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "wraps a long string containing ansi chars",
@@ -4596,7 +4622,7 @@ TestRunResult {
},
TestGroupResult {
"name": "trimAndFormatPath()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "trims dirname",
@@ -4631,7 +4657,7 @@ TestRunResult {
},
TestGroupResult {
"name": "printDisplayName",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should default displayName color to white when displayName is a string",
@@ -4657,10 +4683,10 @@ TestRunResult {
"totalTime": 85,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throw matcher can take func",
@@ -4671,7 +4697,7 @@ TestRunResult {
},
TestGroupResult {
"name": "throw matcher from promise",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "can take error",
@@ -4691,10 +4717,10 @@ TestRunResult {
"totalTime": 481,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "matcher returns matcher name, expected and actual values",
@@ -4708,10 +4734,10 @@ TestRunResult {
"totalTime": 131,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "validate pattern function",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "without passed args returns true",
@@ -4743,10 +4769,10 @@ TestRunResult {
"totalTime": 52,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "isBuiltinModule",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return true for the \`path\` module",
@@ -4778,10 +4804,10 @@ TestRunResult {
"totalTime": 36,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throw when directly imported",
@@ -4795,10 +4821,10 @@ TestRunResult {
"totalTime": 533,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "replacePathSepForRegex() posix",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the path",
@@ -4809,7 +4835,7 @@ TestRunResult {
},
TestGroupResult {
"name": "replacePathSepForRegex() win32",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should replace POSIX path separators",
@@ -4859,10 +4885,10 @@ TestRunResult {
"totalTime": 56,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "mock with 0 calls and default name",
@@ -4930,10 +4956,10 @@ TestRunResult {
"totalTime": 45,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "getWatermarks",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "that watermarks use thresholds as upper target",
@@ -4953,10 +4979,10 @@ TestRunResult {
"totalTime": 37,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "normal output, everything goes to stdout",
@@ -4976,10 +5002,10 @@ TestRunResult {
"totalTime": 148,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should call \`terminal-link\` correctly",
@@ -5011,10 +5037,10 @@ TestRunResult {
"totalTime": 30,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "toEqual duck type Text",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -5031,7 +5057,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toEqual duck type Element",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -5048,7 +5074,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toEqual duck type Fragment",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -5065,7 +5091,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toEqual document createTextNode",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -5082,7 +5108,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toEqual document createElement",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -5099,7 +5125,7 @@ TestRunResult {
},
TestGroupResult {
"name": "toEqual document createDocumentFragment",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "isNot false",
@@ -5119,10 +5145,10 @@ TestRunResult {
"totalTime": 99,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "NodeEnvironment",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "uses a copy of the process object",
@@ -5166,10 +5192,10 @@ TestRunResult {
"totalTime": 184,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Retrieves the snapshot status",
@@ -5195,10 +5221,10 @@ TestRunResult {
"totalTime": 28,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "validates yargs special options",
@@ -5242,10 +5268,10 @@ TestRunResult {
"totalTime": 83,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "creates a snapshot summary",
@@ -5277,10 +5303,10 @@ TestRunResult {
"totalTime": 49,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "for multiline test name returns",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "test name with highlighted pattern and replaced line breaks",
@@ -5291,7 +5317,7 @@ TestRunResult {
},
TestGroupResult {
"name": "for one line test name with pattern in the head returns",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "test name with highlighted pattern",
@@ -5314,7 +5340,7 @@ TestRunResult {
},
TestGroupResult {
"name": "for one line test name pattern in the middle",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "test name with highlighted pattern returns",
@@ -5343,7 +5369,7 @@ TestRunResult {
},
TestGroupResult {
"name": "for one line test name pattern in the tail returns",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "test name with highlighted pattern",
@@ -5369,10 +5395,10 @@ TestRunResult {
"totalTime": 129,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "shouldInstrument should return true",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "when testRegex is provided and file is not a test file",
@@ -5443,7 +5469,7 @@ TestRunResult {
},
TestGroupResult {
"name": "shouldInstrument should return false",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "if collectCoverage is falsy",
@@ -5535,10 +5561,10 @@ TestRunResult {
"totalTime": 155,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "ScriptTransformer",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "transforms a file properly",
@@ -5678,10 +5704,10 @@ TestRunResult {
"totalTime": 1660,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "simple test",
@@ -5701,10 +5727,10 @@ TestRunResult {
"totalTime": 2902,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "moduleMocker getMetadata",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns the function \`name\` property",
@@ -5769,7 +5795,7 @@ TestRunResult {
},
TestGroupResult {
"name": "moduleMocker generateFromMetadata",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "forwards the function name property",
@@ -5972,7 +5998,7 @@ TestRunResult {
},
TestGroupResult {
"name": "moduleMocker generateFromMetadata mocked functions",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "tracks calls to mocks",
@@ -6043,7 +6069,7 @@ TestRunResult {
},
TestGroupResult {
"name": "moduleMocker generateFromMetadata return values",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "tracks return values",
@@ -6066,7 +6092,7 @@ TestRunResult {
},
TestGroupResult {
"name": "moduleMocker generateFromMetadata invocationCallOrder",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "tracks invocationCallOrder made by mocks",
@@ -6095,7 +6121,7 @@ TestRunResult {
},
TestGroupResult {
"name": "moduleMocker getMockImplementation",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should mock calls to a mock function",
@@ -6106,7 +6132,7 @@ TestRunResult {
},
TestGroupResult {
"name": "moduleMocker mockImplementationOnce",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should mock constructor",
@@ -6129,7 +6155,7 @@ TestRunResult {
},
TestGroupResult {
"name": "moduleMocker",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "mockReturnValue does not override mockImplementationOnce",
@@ -6182,7 +6208,7 @@ TestRunResult {
},
TestGroupResult {
"name": "moduleMocker spyOn",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should work",
@@ -6211,7 +6237,7 @@ TestRunResult {
},
TestGroupResult {
"name": "moduleMocker spyOnProperty",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should work - getter",
@@ -6261,10 +6287,10 @@ TestRunResult {
"totalTime": 509,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "beforeEach is executed before each test in current/child describe blocks",
@@ -6290,10 +6316,10 @@ TestRunResult {
"totalTime": 3762,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime requireModule",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "finds haste modules",
@@ -6454,7 +6480,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runtime requireModule on node >=12.12.0",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "overrides module.createRequire",
@@ -6468,10 +6494,10 @@ TestRunResult {
"totalTime": 2439,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime jest.mock",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "uses explicitly set mocks instead of automocking",
@@ -6494,7 +6520,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runtime jest.setMock",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "uses explicitly set mocks instead of automocking",
@@ -6508,10 +6534,10 @@ TestRunResult {
"totalTime": 743,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "test/it error throwing",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "it doesn't throw an error with valid arguments",
@@ -6567,10 +6593,10 @@ TestRunResult {
"totalTime": 300,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "BaseWorkerPool",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws error when createWorker is not defined",
@@ -6623,7 +6649,7 @@ TestRunResult {
},
TestGroupResult {
"name": "BaseWorkerPool end",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "ends all workers",
@@ -6649,10 +6675,10 @@ TestRunResult {
"totalTime": 653,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not incorrectly match identity-obj-proxy as Immutable object",
@@ -6663,7 +6689,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Immutable.OrderedSet",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports an empty collection {min: true}",
@@ -6740,7 +6766,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Immutable.List",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports an empty collection {min: true}",
@@ -6817,7 +6843,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Immutable.Stack",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports an empty collection {min: true}",
@@ -6894,7 +6920,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Immutable.Set",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports an empty collection {min: true}",
@@ -6971,7 +6997,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Immutable.Map",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports an empty collection {min: true}",
@@ -7030,7 +7056,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Immutable.OrderedMap",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports an empty collection {min: true}",
@@ -7095,7 +7121,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Immutable.Record",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports an empty record {min: true}",
@@ -7160,7 +7186,7 @@ TestRunResult {
},
TestGroupResult {
"name": "indentation of heterogeneous collections",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "empty Immutable.List as child of Object",
@@ -7189,7 +7215,7 @@ TestRunResult {
},
TestGroupResult {
"name": "indent option",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "default implicit: 2 spaces",
@@ -7218,7 +7244,7 @@ TestRunResult {
},
TestGroupResult {
"name": "maxDepth option",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Immutable.List as child of Object",
@@ -7247,7 +7273,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Immutable.Seq",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports an empty sequence from array {min: true}",
@@ -7336,7 +7362,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Immutable.Seq lazy entries",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "from object properties",
@@ -7353,7 +7379,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Immutable.Seq lazy values",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "from Immutable.Range",
@@ -7391,10 +7417,10 @@ TestRunResult {
"totalTime": 443,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime requireModule",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "installs source maps if available",
@@ -7408,10 +7434,10 @@ TestRunResult {
"totalTime": 584,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "JSDomEnvironment",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should configure setTimeout/setInterval to use the browser api",
@@ -7431,10 +7457,10 @@ TestRunResult {
"totalTime": 783,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "babel-plugin-jest-hoist",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "automatic react runtime",
@@ -7466,10 +7492,10 @@ TestRunResult {
"totalTime": 347,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "passes fork options down to child_process.fork, adding the defaults",
@@ -7579,10 +7605,10 @@ TestRunResult {
"totalTime": 184,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "jest-each .test",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws an error when not called with an array",
@@ -7653,7 +7679,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .test.concurrent",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws an error when not called with an array",
@@ -7724,7 +7750,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .test.concurrent.only",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws an error when not called with an array",
@@ -7795,7 +7821,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .test.concurrent.skip",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws an error when not called with an array",
@@ -7890,7 +7916,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .test.only",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws an error when not called with an array",
@@ -7961,7 +7987,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .it",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws an error when not called with an array",
@@ -8032,7 +8058,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .fit",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws an error when not called with an array",
@@ -8103,7 +8129,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .it.only",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws an error when not called with an array",
@@ -8174,7 +8200,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .describe",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws an error when not called with an array",
@@ -8245,7 +8271,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .fdescribe",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws an error when not called with an array",
@@ -8316,7 +8342,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .describe.only",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws an error when not called with an array",
@@ -8387,7 +8413,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each done callback",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls [ 'test' ] with done when cb function has more args than params of given test row",
@@ -8452,7 +8478,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .xtest",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls global with given title",
@@ -8481,7 +8507,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .test.skip",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls global with given title",
@@ -8510,7 +8536,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .xit",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls global with given title",
@@ -8539,7 +8565,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .it.skip",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls global with given title",
@@ -8568,7 +8594,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .xdescribe",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls global with given title",
@@ -8597,7 +8623,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .describe.skip",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls global with given title",
@@ -8629,10 +8655,10 @@ TestRunResult {
"totalTime": 192,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "jest-each .test",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws error when there are additional words in first column heading",
@@ -8751,7 +8777,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .test.concurrent",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws error when there are additional words in first column heading",
@@ -8888,7 +8914,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .test.concurrent.only",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws error when there are additional words in first column heading",
@@ -9007,7 +9033,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .test.concurrent.skip",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws error when there are additional words in first column heading",
@@ -9144,7 +9170,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .test.only",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws error when there are additional words in first column heading",
@@ -9263,7 +9289,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .it",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws error when there are additional words in first column heading",
@@ -9382,7 +9408,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .fit",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws error when there are additional words in first column heading",
@@ -9501,7 +9527,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .it.only",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws error when there are additional words in first column heading",
@@ -9620,7 +9646,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .describe",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws error when there are additional words in first column heading",
@@ -9739,7 +9765,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .fdescribe",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws error when there are additional words in first column heading",
@@ -9858,7 +9884,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .describe.only",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws error when there are additional words in first column heading",
@@ -9977,7 +10003,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each done callback",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls [ 'test' ] with done when cb function has more args than params of given test row",
@@ -10036,7 +10062,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .xtest",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls global with given title",
@@ -10059,7 +10085,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .test.skip",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls global with given title",
@@ -10082,7 +10108,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .xit",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls global with given title",
@@ -10105,7 +10131,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .it.skip",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls global with given title",
@@ -10128,7 +10154,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .xdescribe",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls global with given title",
@@ -10151,7 +10177,7 @@ TestRunResult {
},
TestGroupResult {
"name": "jest-each .describe.skip",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls global with given title",
@@ -10177,10 +10203,10 @@ TestRunResult {
"totalTime": 483,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports a single element with no props or children",
@@ -10393,7 +10419,7 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "throws if theme option is not of type \\"object\\"",
+ "name": "throws if theme option is not of type "object"",
"result": "success",
"time": 0,
},
@@ -10449,7 +10475,7 @@ TestRunResult {
},
TestGroupResult {
"name": "test object for subset match",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "undefined props",
@@ -10466,7 +10492,7 @@ TestRunResult {
},
TestGroupResult {
"name": "indent option",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "default implicit: 2 spaces",
@@ -10495,7 +10521,7 @@ TestRunResult {
},
TestGroupResult {
"name": "maxDepth option",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "elements",
@@ -10512,7 +10538,7 @@ TestRunResult {
},
TestGroupResult {
"name": "React.memo without displayName",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "renders the component name",
@@ -10523,7 +10549,7 @@ TestRunResult {
},
TestGroupResult {
"name": "React.memo with displayName",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "renders the displayName of component before memoizing",
@@ -10543,10 +10569,10 @@ TestRunResult {
"totalTime": 325,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "tests are not marked done until their parent afterAll runs",
@@ -10590,10 +10616,10 @@ TestRunResult {
"totalTime": 5755,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "prettyFormat()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "prints empty arguments",
@@ -11072,7 +11098,7 @@ TestRunResult {
},
TestGroupResult {
"name": "prettyFormat() indent option",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "default implicit: 2 spaces",
@@ -11101,7 +11127,7 @@ TestRunResult {
},
TestGroupResult {
"name": "prettyFormat() min",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "prints some basic values in min mode",
@@ -11127,10 +11153,10 @@ TestRunResult {
"totalTime": 219,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Farm",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "sends a request to one worker",
@@ -11198,10 +11224,10 @@ TestRunResult {
"totalTime": 158,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "lazily requires the file",
@@ -11228,13 +11254,13 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "calls the main module if the method call is \\"default\\"",
+ "name": "calls the main module if the method call is "default"",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "calls the main export if the method call is \\"default\\" and it is a Babel transpiled one",
+ "name": "calls the main export if the method call is "default" and it is a Babel transpiled one",
"result": "success",
"time": 1,
},
@@ -11269,10 +11295,10 @@ TestRunResult {
"totalTime": 120,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "queueRunner",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs every function in the queue.",
@@ -11316,10 +11342,10 @@ TestRunResult {
"totalTime": 93,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "passes fork options down to child_process.fork, adding the defaults",
@@ -11417,10 +11443,10 @@ TestRunResult {
"totalTime": 258,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "lazily requires the file",
@@ -11447,13 +11473,13 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "calls the main module if the method call is \\"default\\"",
+ "name": "calls the main module if the method call is "default"",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "calls the main export if the method call is \\"default\\" and it is a Babel transpiled one",
+ "name": "calls the main export if the method call is "default" and it is a Babel transpiled one",
"result": "success",
"time": 1,
},
@@ -11488,10 +11514,10 @@ TestRunResult {
"totalTime": 135,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "injects the serializable module map into each worker in watch mode",
@@ -11511,13 +11537,13 @@ TestRunResult {
"totalTime": 905,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "beforeEach hooks error throwing",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "beforeEach throws an error when \\"String\\" is provided as a first argument to it",
+ "name": "beforeEach throws an error when "String" is provided as a first argument to it",
"result": "success",
"time": 19,
},
@@ -11567,10 +11593,10 @@ TestRunResult {
},
TestGroupResult {
"name": "beforeAll hooks error throwing",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "beforeAll throws an error when \\"String\\" is provided as a first argument to it",
+ "name": "beforeAll throws an error when "String" is provided as a first argument to it",
"result": "success",
"time": 0,
},
@@ -11620,10 +11646,10 @@ TestRunResult {
},
TestGroupResult {
"name": "afterEach hooks error throwing",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "afterEach throws an error when \\"String\\" is provided as a first argument to it",
+ "name": "afterEach throws an error when "String" is provided as a first argument to it",
"result": "success",
"time": 0,
},
@@ -11673,10 +11699,10 @@ TestRunResult {
},
TestGroupResult {
"name": "afterAll hooks error throwing",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "afterAll throws an error when \\"String\\" is provided as a first argument to it",
+ "name": "afterAll throws an error when "String" is provided as a first argument to it",
"result": "success",
"time": 1,
},
@@ -11729,10 +11755,10 @@ TestRunResult {
"totalTime": 127,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Using V8 implementation",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws the error with an invalid serialization",
@@ -11743,7 +11769,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Using V8 implementation Object 0",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "serializes/deserializes in memory",
@@ -11760,7 +11786,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Using V8 implementation Object 1",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "serializes/deserializes in memory",
@@ -11777,7 +11803,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Using V8 implementation Object 2",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "serializes/deserializes in memory",
@@ -11794,7 +11820,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Using V8 implementation Object 3",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "serializes/deserializes in memory",
@@ -11811,7 +11837,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Using V8 implementation Object 4",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "serializes/deserializes in memory",
@@ -11828,7 +11854,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Using V8 implementation Object 5",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "serializes/deserializes in memory",
@@ -11845,7 +11871,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Using V8 implementation Object 6",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "serializes/deserializes in memory",
@@ -11862,7 +11888,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Using V8 implementation Object 7",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "serializes/deserializes in memory",
@@ -11882,10 +11908,10 @@ TestRunResult {
"totalTime": 158,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "CustomConsole assert",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "do not log when the assertion is truthy",
@@ -11914,7 +11940,7 @@ TestRunResult {
},
TestGroupResult {
"name": "CustomConsole count",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "count using the default counter",
@@ -11943,7 +11969,7 @@ TestRunResult {
},
TestGroupResult {
"name": "CustomConsole group",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "group without label",
@@ -11972,7 +11998,7 @@ TestRunResult {
},
TestGroupResult {
"name": "CustomConsole time",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the time between time() and timeEnd() on default timer",
@@ -11989,7 +12015,7 @@ TestRunResult {
},
TestGroupResult {
"name": "CustomConsole dir",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should print the deepest value",
@@ -12000,7 +12026,7 @@ TestRunResult {
},
TestGroupResult {
"name": "CustomConsole timeLog",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the time between time() and timeEnd() on default timer",
@@ -12029,7 +12055,7 @@ TestRunResult {
},
TestGroupResult {
"name": "CustomConsole console",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be able to initialize console instance",
@@ -12043,10 +12069,10 @@ TestRunResult {
"totalTime": 171,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "CustomConsole log",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should print to stdout",
@@ -12057,7 +12083,7 @@ TestRunResult {
},
TestGroupResult {
"name": "CustomConsole error",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should print to stderr",
@@ -12068,7 +12094,7 @@ TestRunResult {
},
TestGroupResult {
"name": "CustomConsole warn",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should print to stderr",
@@ -12079,7 +12105,7 @@ TestRunResult {
},
TestGroupResult {
"name": "CustomConsole assert",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "do not log when the assertion is truthy",
@@ -12108,7 +12134,7 @@ TestRunResult {
},
TestGroupResult {
"name": "CustomConsole count",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "count using the default counter",
@@ -12137,7 +12163,7 @@ TestRunResult {
},
TestGroupResult {
"name": "CustomConsole group",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "group without label",
@@ -12166,7 +12192,7 @@ TestRunResult {
},
TestGroupResult {
"name": "CustomConsole time",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the time between time() and timeEnd() on default timer",
@@ -12183,7 +12209,7 @@ TestRunResult {
},
TestGroupResult {
"name": "CustomConsole dir",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should print the deepest value",
@@ -12194,7 +12220,7 @@ TestRunResult {
},
TestGroupResult {
"name": "CustomConsole timeLog",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the time between time() and timeEnd() on default timer",
@@ -12223,7 +12249,7 @@ TestRunResult {
},
TestGroupResult {
"name": "CustomConsole console",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be able to initialize console instance",
@@ -12237,10 +12263,10 @@ TestRunResult {
"totalTime": 115,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "DOMCollection plugin for object properties",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports DOMStringMap",
@@ -12263,7 +12289,7 @@ TestRunResult {
},
TestGroupResult {
"name": "DOMCollection plugin for list items",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports HTMLCollection for getElementsByTagName",
@@ -12313,10 +12339,10 @@ TestRunResult {
"totalTime": 64,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "test/it.todo error throwing",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "todo throws error when given no arguments",
@@ -12342,10 +12368,10 @@ TestRunResult {
"totalTime": 81,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "sorts by file size if there is no timing information",
@@ -12401,10 +12427,10 @@ TestRunResult {
"totalTime": 251,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Suite",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "doesn't throw on addExpectationResult when there are no children",
@@ -12418,10 +12444,10 @@ TestRunResult {
"totalTime": 84,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports any(String)",
@@ -12618,7 +12644,7 @@ TestRunResult {
},
TestGroupResult {
"name": "indent option",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "default implicit: 2 spaces",
@@ -12647,7 +12673,7 @@ TestRunResult {
},
TestGroupResult {
"name": "maxDepth option",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "matchers as leaf nodes",
@@ -12667,10 +12693,10 @@ TestRunResult {
"totalTime": 137,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "ConvertAnsi plugin",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports style.red",
@@ -12714,10 +12740,10 @@ TestRunResult {
"totalTime": 43,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "getConsoleOutput",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "takes noStackTrace and pass it on for assert",
@@ -12797,10 +12823,10 @@ TestRunResult {
"totalTime": 56,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "expectationResultFactory",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns the result if passed.",
@@ -12850,10 +12876,10 @@ TestRunResult {
"totalTime": 70,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "array .add",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns the result of adding 0 to 0",
@@ -12876,7 +12902,7 @@ TestRunResult {
},
TestGroupResult {
"name": "concurrent .add",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns the result of adding 0 to 0",
@@ -12899,7 +12925,7 @@ TestRunResult {
},
TestGroupResult {
"name": "template .add",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns 0 when given 0 and 0",
@@ -12922,7 +12948,7 @@ TestRunResult {
},
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws an error when not called with the right number of arguments",
@@ -12936,10 +12962,10 @@ TestRunResult {
"totalTime": 44,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "pretty-format",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "prints global window as constructor name alone",
@@ -12950,7 +12976,7 @@ TestRunResult {
},
TestGroupResult {
"name": "DOMElement Plugin",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports a single HTML element",
@@ -13105,7 +13131,7 @@ TestRunResult {
},
TestGroupResult {
"name": "DOMElement Plugin matches constructor name of SVG elements",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "jsdom 9 and 10",
@@ -13125,10 +13151,10 @@ TestRunResult {
"totalTime": 148,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "formatTestResults",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "includes test full name",
@@ -13142,10 +13168,10 @@ TestRunResult {
"totalTime": 53,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "exposes the right API using default working",
@@ -13201,13 +13227,13 @@ TestRunResult {
"totalTime": 230,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "beforeEach hooks error throwing",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "beforeEach throws an error when \\"String\\" is provided as a first argument to it",
+ "name": "beforeEach throws an error when "String" is provided as a first argument to it",
"result": "success",
"time": 2,
},
@@ -13257,10 +13283,10 @@ TestRunResult {
},
TestGroupResult {
"name": "beforeAll hooks error throwing",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "beforeAll throws an error when \\"String\\" is provided as a first argument to it",
+ "name": "beforeAll throws an error when "String" is provided as a first argument to it",
"result": "success",
"time": 0,
},
@@ -13310,10 +13336,10 @@ TestRunResult {
},
TestGroupResult {
"name": "afterEach hooks error throwing",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "afterEach throws an error when \\"String\\" is provided as a first argument to it",
+ "name": "afterEach throws an error when "String" is provided as a first argument to it",
"result": "success",
"time": 0,
},
@@ -13363,10 +13389,10 @@ TestRunResult {
},
TestGroupResult {
"name": "afterAll hooks error throwing",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "afterAll throws an error when \\"String\\" is provided as a first argument to it",
+ "name": "afterAll throws an error when "String" is provided as a first argument to it",
"result": "success",
"time": 1,
},
@@ -13419,10 +13445,10 @@ TestRunResult {
"totalTime": 51,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Jasmine2Reporter",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "reports nested suites",
@@ -13436,10 +13462,10 @@ TestRunResult {
"totalTime": 107,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "test/it.todo error throwing",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "it throws error when given no arguments",
@@ -13465,10 +13491,10 @@ TestRunResult {
"totalTime": 27,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "test/it error throwing",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "it throws error with missing callback function",
@@ -13512,10 +13538,10 @@ TestRunResult {
"totalTime": 32,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "iterators",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works for arrays",
@@ -13547,10 +13573,10 @@ TestRunResult {
"totalTime": 43,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "pTimeout",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls \`clearTimeout\` and resolves when \`promise\` resolves.",
@@ -13576,10 +13602,10 @@ TestRunResult {
"totalTime": 44,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "creation of a cache key",
@@ -13593,10 +13619,10 @@ TestRunResult {
"totalTime": 75,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "concurrent",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should add 1 to number",
@@ -13622,10 +13648,10 @@ TestRunResult {
"totalTime": 24,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "global.test",
@@ -13639,10 +13665,10 @@ TestRunResult {
"totalTime": 23,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "ReactElement Plugin",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "serializes forwardRef without displayName",
@@ -13668,10 +13694,10 @@ TestRunResult {
"totalTime": 64,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns the shared tasks in FIFO ordering",
@@ -13697,10 +13723,10 @@ TestRunResult {
"totalTime": 48,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns the tasks in order",
@@ -13738,10 +13764,10 @@ TestRunResult {
"totalTime": 63,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "SearchSource isTestFilePath",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports ../ paths and unix separators via testRegex",
@@ -13764,7 +13790,7 @@ TestRunResult {
},
TestGroupResult {
"name": "SearchSource testPathsMatching",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "finds tests matching a pattern via testRegex",
@@ -13847,7 +13873,7 @@ TestRunResult {
},
TestGroupResult {
"name": "SearchSource findRelatedTests",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "makes sure a file is related to itself",
@@ -13870,7 +13896,7 @@ TestRunResult {
},
TestGroupResult {
"name": "SearchSource findRelatedTestsFromPattern",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns empty search result for empty input",
@@ -13911,7 +13937,7 @@ TestRunResult {
},
TestGroupResult {
"name": "SearchSource findRelatedSourcesFromTestsInChangedFiles",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "return empty set if no SCM",
@@ -13931,10 +13957,10 @@ TestRunResult {
"totalTime": 2596,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw if passed two arguments",
@@ -13945,7 +13971,7 @@ TestRunResult {
},
TestGroupResult {
"name": ".rejects",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should reject",
@@ -13966,13 +13992,13 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "fails non-promise value \\"a\\" synchronously",
+ "name": "fails non-promise value "a" synchronously",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "fails non-promise value \\"a\\"",
+ "name": "fails non-promise value "a"",
"result": "success",
"time": 2,
},
@@ -14002,13 +14028,13 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "fails non-promise value {\\"a\\": 1} synchronously",
+ "name": "fails non-promise value {"a": 1} synchronously",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "fails non-promise value {\\"a\\": 1}",
+ "name": "fails non-promise value {"a": 1}",
"result": "success",
"time": 0,
},
@@ -14070,7 +14096,7 @@ TestRunResult {
},
TestGroupResult {
"name": ".resolves",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should resolve",
@@ -14079,13 +14105,13 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "fails non-promise value \\"a\\" synchronously",
+ "name": "fails non-promise value "a" synchronously",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "fails non-promise value \\"a\\"",
+ "name": "fails non-promise value "a"",
"result": "success",
"time": 0,
},
@@ -14115,13 +14141,13 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "fails non-promise value {\\"a\\": 1} synchronously",
+ "name": "fails non-promise value {"a": 1} synchronously",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "fails non-promise value {\\"a\\": 1}",
+ "name": "fails non-promise value {"a": 1}",
"result": "success",
"time": 0,
},
@@ -14183,7 +14209,7 @@ TestRunResult {
},
TestGroupResult {
"name": ".toBe()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not throw",
@@ -14216,25 +14242,25 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "fails for: {\\"a\\": 1} and {\\"a\\": 1}",
+ "name": "fails for: {"a": 1} and {"a": 1}",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "fails for: {\\"a\\": 1} and {\\"a\\": 5}",
+ "name": "fails for: {"a": 1} and {"a": 5}",
"result": "success",
"time": 2,
},
TestCaseResult {
"error": undefined,
- "name": "fails for: {\\"a\\": [Function a], \\"b\\": 2} and {\\"a\\": Any, \\"b\\": 2}",
+ "name": "fails for: {"a": [Function a], "b": 2} and {"a": Any, "b": 2}",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "fails for: {\\"a\\": undefined, \\"b\\": 2} and {\\"b\\": 2}",
+ "name": "fails for: {"a": undefined, "b": 2} and {"b": 2}",
"result": "success",
"time": 8,
},
@@ -14270,37 +14296,37 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "fails for: \\"abc\\" and \\"cde\\"",
+ "name": "fails for: "abc" and "cde"",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "fails for: \\"painless JavaScript testing\\" and \\"delightful JavaScript testing\\"",
+ "name": "fails for: "painless JavaScript testing" and "delightful JavaScript testing"",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "fails for: \\"\\" and \\"compare one-line string to empty string\\"",
+ "name": "fails for: "" and "compare one-line string to empty string"",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "fails for: \\"with
-trailing space\\" and \\"without trailing space\\"",
+ "name": "fails for: "with
+trailing space" and "without trailing space"",
"result": "success",
"time": 8,
},
TestCaseResult {
"error": undefined,
- "name": "fails for: \\"four
+ "name": "fails for: "four
4
line
-string\\" and \\"3
+string" and "3
line
-string\\"",
+string"",
"result": "success",
"time": 0,
},
@@ -14330,7 +14356,7 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "fails for: {\\"a\\": 1n} and {\\"a\\": 1n}",
+ "name": "fails for: {"a": 1n} and {"a": 1n}",
"result": "success",
"time": 0,
},
@@ -14348,7 +14374,7 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "fails for '\\"a\\"' with '.not'",
+ "name": "fails for '"a"' with '.not'",
"result": "success",
"time": 0,
},
@@ -14404,7 +14430,7 @@ string\\"",
},
TestGroupResult {
"name": ".toStrictEqual()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not ignore keys with undefined values",
@@ -14481,7 +14507,7 @@ string\\"",
},
TestGroupResult {
"name": ".toEqual()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "{pass: false} expect(true).toEqual(false)",
@@ -14532,13 +14558,13 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(\\"abc\\").toEqual({\\"0\\": \\"a\\", \\"1\\": \\"b\\", \\"2\\": \\"c\\"})",
+ "name": "{pass: false} expect("abc").toEqual({"0": "a", "1": "b", "2": "c"})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"0\\": \\"a\\", \\"1\\": \\"b\\", \\"2\\": \\"c\\"}).toEqual(\\"abc\\")",
+ "name": "{pass: false} expect({"0": "a", "1": "b", "2": "c"}).toEqual("abc")",
"result": "success",
"time": 1,
},
@@ -14550,75 +14576,75 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": 1}).toEqual({\\"a\\": 2})",
+ "name": "{pass: false} expect({"a": 1}).toEqual({"a": 2})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": 5}).toEqual({\\"b\\": 6})",
+ "name": "{pass: false} expect({"a": 5}).toEqual({"b": 6})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"foo\\": {\\"bar\\": 1}}).toEqual({\\"foo\\": {}})",
+ "name": "{pass: false} expect({"foo": {"bar": 1}}).toEqual({"foo": {}})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"getterAndSetter\\": {}}).toEqual({\\"getterAndSetter\\": {\\"foo\\": \\"bar\\"}})",
+ "name": "{pass: false} expect({"getterAndSetter": {}}).toEqual({"getterAndSetter": {"foo": "bar"}})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"frozenGetterAndSetter\\": {}}).toEqual({\\"frozenGetterAndSetter\\": {\\"foo\\": \\"bar\\"}})",
+ "name": "{pass: false} expect({"frozenGetterAndSetter": {}}).toEqual({"frozenGetterAndSetter": {"foo": "bar"}})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"getter\\": {}}).toEqual({\\"getter\\": {\\"foo\\": \\"bar\\"}})",
+ "name": "{pass: false} expect({"getter": {}}).toEqual({"getter": {"foo": "bar"}})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"frozenGetter\\": {}}).toEqual({\\"frozenGetter\\": {\\"foo\\": \\"bar\\"}})",
+ "name": "{pass: false} expect({"frozenGetter": {}}).toEqual({"frozenGetter": {"foo": "bar"}})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"setter\\": undefined}).toEqual({\\"setter\\": {\\"foo\\": \\"bar\\"}})",
+ "name": "{pass: false} expect({"setter": undefined}).toEqual({"setter": {"foo": "bar"}})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"frozenSetter\\": undefined}).toEqual({\\"frozenSetter\\": {\\"foo\\": \\"bar\\"}})",
+ "name": "{pass: false} expect({"frozenSetter": undefined}).toEqual({"frozenSetter": {"foo": "bar"}})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(\\"banana\\").toEqual(\\"apple\\")",
+ "name": "{pass: false} expect("banana").toEqual("apple")",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(\\"1 234,57 $\\").toEqual(\\"1 234,57 $\\")",
+ "name": "{pass: false} expect("1 234,57 $").toEqual("1 234,57 $")",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(\\"type TypeName = T extends Function ? \\\\\\"function\\\\\\" : \\\\\\"object\\\\\\";\\").toEqual(\\"type TypeName = T extends Function
-? \\\\\\"function\\\\\\"
-: \\\\\\"object\\\\\\";\\")",
+ "name": "{pass: false} expect("type TypeName = T extends Function ? \\"function\\" : \\"object\\";").toEqual("type TypeName = T extends Function
+? \\"function\\"
+: \\"object\\";")",
"result": "success",
"time": 1,
},
@@ -14708,55 +14734,55 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(Map {1 => \\"one\\", 2 => \\"two\\"}).toEqual(Map {1 => \\"one\\"})",
+ "name": "{pass: false} expect(Map {1 => "one", 2 => "two"}).toEqual(Map {1 => "one"})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(Map {\\"a\\" => 0}).toEqual(Map {\\"b\\" => 0})",
+ "name": "{pass: false} expect(Map {"a" => 0}).toEqual(Map {"b" => 0})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(Map {\\"v\\" => 1}).toEqual(Map {\\"v\\" => 2})",
+ "name": "{pass: false} expect(Map {"v" => 1}).toEqual(Map {"v" => 2})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(Map {[\\"v\\"] => 1}).toEqual(Map {[\\"v\\"] => 2})",
+ "name": "{pass: false} expect(Map {["v"] => 1}).toEqual(Map {["v"] => 2})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(Map {[1] => Map {[1] => \\"one\\"}}).toEqual(Map {[1] => Map {[1] => \\"two\\"}})",
+ "name": "{pass: false} expect(Map {[1] => Map {[1] => "one"}}).toEqual(Map {[1] => Map {[1] => "two"}})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(Immutable.Map {\\"a\\": 0}).toEqual(Immutable.Map {\\"b\\": 0})",
+ "name": "{pass: false} expect(Immutable.Map {"a": 0}).toEqual(Immutable.Map {"b": 0})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(Immutable.Map {\\"v\\": 1}).toEqual(Immutable.Map {\\"v\\": 2})",
+ "name": "{pass: false} expect(Immutable.Map {"v": 1}).toEqual(Immutable.Map {"v": 2})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(Immutable.OrderedMap {1: \\"one\\", 2: \\"two\\"}).toEqual(Immutable.OrderedMap {2: \\"two\\", 1: \\"one\\"})",
+ "name": "{pass: false} expect(Immutable.OrderedMap {1: "one", 2: "two"}).toEqual(Immutable.OrderedMap {2: "two", 1: "one"})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(Immutable.Map {\\"1\\": Immutable.Map {\\"2\\": {\\"a\\": 99}}}).toEqual(Immutable.Map {\\"1\\": Immutable.Map {\\"2\\": {\\"a\\": 11}}})",
+ "name": "{pass: false} expect(Immutable.Map {"1": Immutable.Map {"2": {"a": 99}}}).toEqual(Immutable.Map {"1": Immutable.Map {"2": {"a": 11}}})",
"result": "success",
"time": 1,
},
@@ -14768,13 +14794,13 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": 1, \\"b\\": 2}).toEqual(ObjectContaining {\\"a\\": 2})",
+ "name": "{pass: false} expect({"a": 1, "b": 2}).toEqual(ObjectContaining {"a": 2})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(false).toEqual(ObjectContaining {\\"a\\": 2})",
+ "name": "{pass: false} expect(false).toEqual(ObjectContaining {"a": 2})",
"result": "success",
"time": 0,
},
@@ -14792,13 +14818,13 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(\\"abd\\").toEqual(StringContaining \\"bc\\")",
+ "name": "{pass: false} expect("abd").toEqual(StringContaining "bc")",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(\\"abd\\").toEqual(StringMatching /bc/i)",
+ "name": "{pass: false} expect("abd").toEqual(StringMatching /bc/i)",
"result": "success",
"time": 0,
},
@@ -14816,19 +14842,19 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(\\"Eve\\").toEqual({\\"asymmetricMatch\\": [Function asymmetricMatch]})",
+ "name": "{pass: false} expect("Eve").toEqual({"asymmetricMatch": [Function asymmetricMatch]})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"target\\": {\\"nodeType\\": 1, \\"value\\": \\"a\\"}}).toEqual({\\"target\\": {\\"nodeType\\": 1, \\"value\\": \\"b\\"}})",
+ "name": "{pass: false} expect({"target": {"nodeType": 1, "value": "a"}}).toEqual({"target": {"nodeType": 1, "value": "b"}})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"nodeName\\": \\"div\\", \\"nodeType\\": 1}).toEqual({\\"nodeName\\": \\"p\\", \\"nodeType\\": 1})",
+ "name": "{pass: false} expect({"nodeName": "div", "nodeType": 1}).toEqual({"nodeName": "p", "nodeType": 1})",
"result": "success",
"time": 1,
},
@@ -14888,19 +14914,19 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(\\"abc\\").not.toEqual(\\"abc\\")",
+ "name": "{pass: true} expect("abc").not.toEqual("abc")",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(\\"abc\\").not.toEqual(\\"abc\\")",
+ "name": "{pass: true} expect("abc").not.toEqual("abc")",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(\\"abc\\").not.toEqual(\\"abc\\")",
+ "name": "{pass: true} expect("abc").not.toEqual("abc")",
"result": "success",
"time": 1,
},
@@ -14936,7 +14962,7 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": 99}).not.toEqual({\\"a\\": 99})",
+ "name": "{pass: true} expect({"a": 99}).not.toEqual({"a": 99})",
"result": "success",
"time": 1,
},
@@ -14978,7 +15004,7 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(Set {{\\"a\\": 1}, {\\"b\\": 2}}).not.toEqual(Set {{\\"b\\": 2}, {\\"a\\": 1}})",
+ "name": "{pass: true} expect(Set {{"a": 1}, {"b": 2}}).not.toEqual(Set {{"b": 2}, {"a": 1}})",
"result": "success",
"time": 0,
},
@@ -15020,43 +15046,43 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(Map {1 => \\"one\\", 2 => \\"two\\"}).not.toEqual(Map {1 => \\"one\\", 2 => \\"two\\"})",
+ "name": "{pass: true} expect(Map {1 => "one", 2 => "two"}).not.toEqual(Map {1 => "one", 2 => "two"})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(Map {1 => \\"one\\", 2 => \\"two\\"}).not.toEqual(Map {2 => \\"two\\", 1 => \\"one\\"})",
+ "name": "{pass: true} expect(Map {1 => "one", 2 => "two"}).not.toEqual(Map {2 => "two", 1 => "one"})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(Map {[1] => \\"one\\", [2] => \\"two\\", [3] => \\"three\\", [3] => \\"four\\"}).not.toEqual(Map {[3] => \\"three\\", [3] => \\"four\\", [2] => \\"two\\", [1] => \\"one\\"})",
+ "name": "{pass: true} expect(Map {[1] => "one", [2] => "two", [3] => "three", [3] => "four"}).not.toEqual(Map {[3] => "three", [3] => "four", [2] => "two", [1] => "one"})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(Map {[1] => Map {[1] => \\"one\\"}, [2] => Map {[2] => \\"two\\"}}).not.toEqual(Map {[2] => Map {[2] => \\"two\\"}, [1] => Map {[1] => \\"one\\"}})",
+ "name": "{pass: true} expect(Map {[1] => Map {[1] => "one"}, [2] => Map {[2] => "two"}}).not.toEqual(Map {[2] => Map {[2] => "two"}, [1] => Map {[1] => "one"}})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(Map {[1] => \\"one\\", [2] => \\"two\\"}).not.toEqual(Map {[2] => \\"two\\", [1] => \\"one\\"})",
+ "name": "{pass: true} expect(Map {[1] => "one", [2] => "two"}).not.toEqual(Map {[2] => "two", [1] => "one"})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(Map {{\\"a\\": 1} => \\"one\\", {\\"b\\": 2} => \\"two\\"}).not.toEqual(Map {{\\"b\\": 2} => \\"two\\", {\\"a\\": 1} => \\"one\\"})",
+ "name": "{pass: true} expect(Map {{"a": 1} => "one", {"b": 2} => "two"}).not.toEqual(Map {{"b": 2} => "two", {"a": 1} => "one"})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(Map {1 => [\\"one\\"], 2 => [\\"two\\"]}).not.toEqual(Map {2 => [\\"two\\"], 1 => [\\"one\\"]})",
+ "name": "{pass: true} expect(Map {1 => ["one"], 2 => ["two"]}).not.toEqual(Map {2 => ["two"], 1 => ["one"]})",
"result": "success",
"time": 1,
},
@@ -15068,25 +15094,25 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(Immutable.Map {1: \\"one\\", 2: \\"two\\"}).not.toEqual(Immutable.Map {1: \\"one\\", 2: \\"two\\"})",
+ "name": "{pass: true} expect(Immutable.Map {1: "one", 2: "two"}).not.toEqual(Immutable.Map {1: "one", 2: "two"})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(Immutable.Map {1: \\"one\\", 2: \\"two\\"}).not.toEqual(Immutable.Map {2: \\"two\\", 1: \\"one\\"})",
+ "name": "{pass: true} expect(Immutable.Map {1: "one", 2: "two"}).not.toEqual(Immutable.Map {2: "two", 1: "one"})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(Immutable.OrderedMap {1: \\"one\\", 2: \\"two\\"}).not.toEqual(Immutable.OrderedMap {1: \\"one\\", 2: \\"two\\"})",
+ "name": "{pass: true} expect(Immutable.OrderedMap {1: "one", 2: "two"}).not.toEqual(Immutable.OrderedMap {1: "one", 2: "two"})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(Immutable.Map {\\"1\\": Immutable.Map {\\"2\\": {\\"a\\": 99}}}).not.toEqual(Immutable.Map {\\"1\\": Immutable.Map {\\"2\\": {\\"a\\": 99}}})",
+ "name": "{pass: true} expect(Immutable.Map {"1": Immutable.Map {"2": {"a": 99}}}).not.toEqual(Immutable.Map {"1": Immutable.Map {"2": {"a": 99}}})",
"result": "success",
"time": 1,
},
@@ -15098,7 +15124,7 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": 1, \\"b\\": 2}).not.toEqual(ObjectContaining {\\"a\\": 1})",
+ "name": "{pass: true} expect({"a": 1, "b": 2}).not.toEqual(ObjectContaining {"a": 1})",
"result": "success",
"time": 0,
},
@@ -15110,13 +15136,13 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(\\"abcd\\").not.toEqual(StringContaining \\"bc\\")",
+ "name": "{pass: true} expect("abcd").not.toEqual(StringContaining "bc")",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(\\"abcd\\").not.toEqual(StringMatching /bc/)",
+ "name": "{pass: true} expect("abcd").not.toEqual(StringMatching /bc/)",
"result": "success",
"time": 1,
},
@@ -15134,19 +15160,19 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": 1, \\"b\\": [Function b], \\"c\\": true}).not.toEqual({\\"a\\": 1, \\"b\\": Any, \\"c\\": Anything})",
+ "name": "{pass: true} expect({"a": 1, "b": [Function b], "c": true}).not.toEqual({"a": 1, "b": Any, "c": Anything})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(\\"Alice\\").not.toEqual({\\"asymmetricMatch\\": [Function asymmetricMatch]})",
+ "name": "{pass: true} expect("Alice").not.toEqual({"asymmetricMatch": [Function asymmetricMatch]})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"nodeName\\": \\"div\\", \\"nodeType\\": 1}).not.toEqual({\\"nodeName\\": \\"div\\", \\"nodeType\\": 1})",
+ "name": "{pass: true} expect({"nodeName": "div", "nodeType": 1}).not.toEqual({"nodeName": "div", "nodeType": 1})",
"result": "success",
"time": 0,
},
@@ -15188,7 +15214,7 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": 99n}).not.toEqual({\\"a\\": 99n})",
+ "name": "{pass: true} expect({"a": 99n}).not.toEqual({"a": 99n})",
"result": "success",
"time": 0,
},
@@ -15226,7 +15252,7 @@ string\\"",
},
TestGroupResult {
"name": ".toEqual() cyclic object equality",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "properties with the same circularity are equal",
@@ -15249,7 +15275,7 @@ string\\"",
},
TestGroupResult {
"name": ".toBeInstanceOf()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "passing Map {} and [Function Map]",
@@ -15300,7 +15326,7 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "failing \\"a\\" and [Function String]",
+ "name": "failing "a" and [Function String]",
"result": "success",
"time": 0,
},
@@ -15342,7 +15368,7 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "failing /\\\\w+/ and [Function anonymous]",
+ "name": "failing /\\w+/ and [Function anonymous]",
"result": "success",
"time": 1,
},
@@ -15362,7 +15388,7 @@ string\\"",
},
TestGroupResult {
"name": ".toBeTruthy(), .toBeFalsy()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not accept arguments",
@@ -15395,7 +15421,7 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "'\\"a\\"' is truthy",
+ "name": "'"a"' is truthy",
"result": "success",
"time": 1,
},
@@ -15455,7 +15481,7 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "'\\"\\"' is falsy",
+ "name": "'""' is falsy",
"result": "success",
"time": 1,
},
@@ -15475,7 +15501,7 @@ string\\"",
},
TestGroupResult {
"name": ".toBeNaN()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "{pass: true} expect(NaN).toBeNaN()",
@@ -15492,7 +15518,7 @@ string\\"",
},
TestGroupResult {
"name": ".toBeNull()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "fails for '{}'",
@@ -15519,7 +15545,7 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "fails for '\\"a\\"'",
+ "name": "fails for '"a"'",
"result": "success",
"time": 0,
},
@@ -15563,7 +15589,7 @@ string\\"",
},
TestGroupResult {
"name": ".toBeDefined(), .toBeUndefined()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "'{}' is defined",
@@ -15590,7 +15616,7 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "'\\"a\\"' is defined",
+ "name": "'"a"' is defined",
"result": "success",
"time": 0,
},
@@ -15634,7 +15660,7 @@ string\\"",
},
TestGroupResult {
"name": ".toBeGreaterThan(), .toBeLessThan(), .toBeGreaterThanOrEqual(), .toBeLessThanOrEqual()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "{pass: true} expect(1).toBeLessThan(2)",
@@ -16227,7 +16253,7 @@ string\\"",
},
TestGroupResult {
"name": ".toContain(), .toContainEqual()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "iterable",
@@ -16242,7 +16268,7 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "'[\\"a\\", \\"b\\", \\"c\\", \\"d\\"]' contains '\\"a\\"'",
+ "name": "'["a", "b", "c", "d"]' contains '"a"'",
"result": "success",
"time": 1,
},
@@ -16266,19 +16292,19 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "'\\"abcdef\\"' contains '\\"abc\\"'",
+ "name": "'"abcdef"' contains '"abc"'",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "'\\"11112111\\"' contains '\\"2\\"'",
+ "name": "'"11112111"' contains '"2"'",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "'Set {\\"abc\\", \\"def\\"}' contains '\\"abc\\"'",
+ "name": "'Set {"abc", "def"}' contains '"abc"'",
"result": "success",
"time": 0,
},
@@ -16344,7 +16370,7 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "'[\\"a\\", \\"b\\", \\"c\\", \\"d\\"]' contains a value equal to '\\"a\\"'",
+ "name": "'["a", "b", "c", "d"]' contains a value equal to '"a"'",
"result": "success",
"time": 0,
},
@@ -16368,7 +16394,7 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "'[{\\"a\\": \\"b\\"}, {\\"a\\": \\"c\\"}]' contains a value equal to '{\\"a\\": \\"b\\"}'",
+ "name": "'[{"a": "b"}, {"a": "c"}]' contains a value equal to '{"a": "b"}'",
"result": "success",
"time": 1,
},
@@ -16386,7 +16412,7 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "'[{\\"a\\": \\"b\\"}, {\\"a\\": \\"c\\"}]' does not contain a value equal to'{\\"a\\": \\"d\\"}'",
+ "name": "'[{"a": "b"}, {"a": "c"}]' does not contain a value equal to'{"a": "d"}'",
"result": "success",
"time": 2,
},
@@ -16400,7 +16426,7 @@ string\\"",
},
TestGroupResult {
"name": ".toBeCloseTo",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "{pass: true} expect(0).toBeCloseTo(0)",
@@ -16525,7 +16551,7 @@ string\\"",
},
TestGroupResult {
"name": ".toBeCloseTo throws: Matcher error",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "promise empty isNot false received",
@@ -16566,7 +16592,7 @@ string\\"",
},
TestGroupResult {
"name": ".toMatch()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "{pass: true} expect(foo).toMatch(foo)",
@@ -16593,79 +16619,79 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "throws if non String actual value passed: [1, \\"foo\\"]",
+ "name": "throws if non String actual value passed: [1, "foo"]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "throws if non String actual value passed: [{}, \\"foo\\"]",
+ "name": "throws if non String actual value passed: [{}, "foo"]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "throws if non String actual value passed: [[], \\"foo\\"]",
+ "name": "throws if non String actual value passed: [[], "foo"]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "throws if non String actual value passed: [true, \\"foo\\"]",
+ "name": "throws if non String actual value passed: [true, "foo"]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "throws if non String actual value passed: [/foo/i, \\"foo\\"]",
+ "name": "throws if non String actual value passed: [/foo/i, "foo"]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "throws if non String actual value passed: [[Function anonymous], \\"foo\\"]",
+ "name": "throws if non String actual value passed: [[Function anonymous], "foo"]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "throws if non String actual value passed: [undefined, \\"foo\\"]",
+ "name": "throws if non String actual value passed: [undefined, "foo"]",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "throws if non String/RegExp expected value passed: [\\"foo\\", 1]",
+ "name": "throws if non String/RegExp expected value passed: ["foo", 1]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "throws if non String/RegExp expected value passed: [\\"foo\\", {}]",
+ "name": "throws if non String/RegExp expected value passed: ["foo", {}]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "throws if non String/RegExp expected value passed: [\\"foo\\", []]",
+ "name": "throws if non String/RegExp expected value passed: ["foo", []]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "throws if non String/RegExp expected value passed: [\\"foo\\", true]",
+ "name": "throws if non String/RegExp expected value passed: ["foo", true]",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "throws if non String/RegExp expected value passed: [\\"foo\\", [Function anonymous]]",
+ "name": "throws if non String/RegExp expected value passed: ["foo", [Function anonymous]]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "throws if non String/RegExp expected value passed: [\\"foo\\", undefined]",
+ "name": "throws if non String/RegExp expected value passed: ["foo", undefined]",
"result": "success",
"time": 0,
},
@@ -16685,7 +16711,7 @@ string\\"",
},
TestGroupResult {
"name": ".toHaveLength",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "{pass: true} expect([1, 2]).toHaveLength(2)",
@@ -16700,19 +16726,19 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect([\\"a\\", \\"b\\"]).toHaveLength(2)",
+ "name": "{pass: true} expect(["a", "b"]).toHaveLength(2)",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(\\"abc\\").toHaveLength(3)",
+ "name": "{pass: true} expect("abc").toHaveLength(3)",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(\\"\\").toHaveLength(0)",
+ "name": "{pass: true} expect("").toHaveLength(0)",
"result": "success",
"time": 0,
},
@@ -16736,19 +16762,19 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect([\\"a\\", \\"b\\"]).toHaveLength(99)",
+ "name": "{pass: false} expect(["a", "b"]).toHaveLength(99)",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(\\"abc\\").toHaveLength(66)",
+ "name": "{pass: false} expect("abc").toHaveLength(66)",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(\\"\\").toHaveLength(1)",
+ "name": "{pass: false} expect("").toHaveLength(1)",
"result": "success",
"time": 1,
},
@@ -16762,7 +16788,7 @@ string\\"",
},
TestGroupResult {
"name": ".toHaveLength matcher error expected length",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "not number",
@@ -16797,64 +16823,64 @@ string\\"",
},
TestGroupResult {
"name": ".toHaveProperty()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": {\\"b\\": {\\"c\\": {\\"d\\": 1}}}}).toHaveProperty('a.b.c.d', 1)",
+ "name": "{pass: true} expect({"a": {"b": {"c": {"d": 1}}}}).toHaveProperty('a.b.c.d', 1)",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": {\\"b\\": {\\"c\\": {\\"d\\": 1}}}}).toHaveProperty('a,b,c,d', 1)",
+ "name": "{pass: true} expect({"a": {"b": {"c": {"d": 1}}}}).toHaveProperty('a,b,c,d', 1)",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a.b.c.d\\": 1}).toHaveProperty('a.b.c.d', 1)",
+ "name": "{pass: true} expect({"a.b.c.d": 1}).toHaveProperty('a.b.c.d', 1)",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": {\\"b\\": [1, 2, 3]}}).toHaveProperty('a,b,1', 2)",
+ "name": "{pass: true} expect({"a": {"b": [1, 2, 3]}}).toHaveProperty('a,b,1', 2)",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": {\\"b\\": [1, 2, 3]}}).toHaveProperty('a,b,1', Any)",
+ "name": "{pass: true} expect({"a": {"b": [1, 2, 3]}}).toHaveProperty('a,b,1', Any)",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": 0}).toHaveProperty('a', 0)",
+ "name": "{pass: true} expect({"a": 0}).toHaveProperty('a', 0)",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": {\\"b\\": undefined}}).toHaveProperty('a.b', undefined)",
+ "name": "{pass: true} expect({"a": {"b": undefined}}).toHaveProperty('a.b', undefined)",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": {}}).toHaveProperty('a.b', undefined)",
+ "name": "{pass: true} expect({"a": {}}).toHaveProperty('a.b', undefined)",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": {\\"b\\": {\\"c\\": 5}}}).toHaveProperty('a.b', {\\"c\\": 5})",
+ "name": "{pass: true} expect({"a": {"b": {"c": 5}}}).toHaveProperty('a.b', {"c": 5})",
"result": "success",
"time": 2,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"property\\": 1}).toHaveProperty('property', 1)",
+ "name": "{pass: true} expect({"property": 1}).toHaveProperty('property', 1)",
"result": "success",
"time": 0,
},
@@ -16866,7 +16892,7 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({}).toHaveProperty('b', \\"b\\")",
+ "name": "{pass: true} expect({}).toHaveProperty('b', "b")",
"result": "success",
"time": 0,
},
@@ -16878,31 +16904,31 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"val\\": true}).toHaveProperty('a', undefined)",
+ "name": "{pass: true} expect({"val": true}).toHaveProperty('a', undefined)",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"val\\": true}).toHaveProperty('c', \\"c\\")",
+ "name": "{pass: true} expect({"val": true}).toHaveProperty('c', "c")",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"val\\": true}).toHaveProperty('val', true)",
+ "name": "{pass: true} expect({"val": true}).toHaveProperty('val', true)",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"nodeName\\": \\"DIV\\"}).toHaveProperty('nodeType', 1)",
+ "name": "{pass: true} expect({"nodeName": "DIV"}).toHaveProperty('nodeType', 1)",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect(\\"\\").toHaveProperty('length', 0)",
+ "name": "{pass: true} expect("").toHaveProperty('length', 0)",
"result": "success",
"time": 0,
},
@@ -16914,95 +16940,95 @@ string\\"",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": {\\"b\\": {\\"c\\": {\\"d\\": 1}}}}).toHaveProperty('a.b.ttt.d', 1)",
+ "name": "{pass: false} expect({"a": {"b": {"c": {"d": 1}}}}).toHaveProperty('a.b.ttt.d', 1)",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": {\\"b\\": {\\"c\\": {\\"d\\": 1}}}}).toHaveProperty('a.b.c.d', 2)",
+ "name": "{pass: false} expect({"a": {"b": {"c": {"d": 1}}}}).toHaveProperty('a.b.c.d', 2)",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a.b.c.d\\": 1}).toHaveProperty('a.b.c.d', 2)",
+ "name": "{pass: false} expect({"a.b.c.d": 1}).toHaveProperty('a.b.c.d', 2)",
"result": "success",
"time": 2,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a.b.c.d\\": 1}).toHaveProperty('a.b.c.d', 2)",
+ "name": "{pass: false} expect({"a.b.c.d": 1}).toHaveProperty('a.b.c.d', 2)",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"children\\": [\\"\\\\\\"That cartoon\\\\\\"\\"], \\"props\\": null, \\"type\\": \\"p\\"}).toHaveProperty('children,0', \\"\\\\\\"That cat cartoon\\\\\\"\\")",
+ "name": "{pass: false} expect({"children": ["\\"That cartoon\\""], "props": null, "type": "p"}).toHaveProperty('children,0', "\\"That cat cartoon\\"")",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"children\\": [\\"Roses are red.
+ "name": "{pass: false} expect({"children": ["Roses are red.
Violets are blue.
-Testing with Jest is good for you.\\"], \\"props\\": null, \\"type\\": \\"pre\\"}).toHaveProperty('children,0', \\"Roses are red, violets are blue.
+Testing with Jest is good for you."], "props": null, "type": "pre"}).toHaveProperty('children,0', "Roses are red, violets are blue.
Testing with Jest
-Is good for you.\\")",
+Is good for you.")",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": {\\"b\\": {\\"c\\": {\\"d\\": 1}}}}).toHaveProperty('a,b,c,d', 2)",
+ "name": "{pass: false} expect({"a": {"b": {"c": {"d": 1}}}}).toHaveProperty('a,b,c,d', 2)",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": {\\"b\\": {\\"c\\": {}}}}).toHaveProperty('a.b.c.d', 1)",
+ "name": "{pass: false} expect({"a": {"b": {"c": {}}}}).toHaveProperty('a.b.c.d', 1)",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": 1}).toHaveProperty('a.b.c.d', 5)",
+ "name": "{pass: false} expect({"a": 1}).toHaveProperty('a.b.c.d', 5)",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({}).toHaveProperty('a', \\"test\\")",
+ "name": "{pass: false} expect({}).toHaveProperty('a', "test")",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": {\\"b\\": 3}}).toHaveProperty('a.b', undefined)",
+ "name": "{pass: false} expect({"a": {"b": 3}}).toHaveProperty('a.b', undefined)",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(1).toHaveProperty('a.b.c', \\"test\\")",
+ "name": "{pass: false} expect(1).toHaveProperty('a.b.c', "test")",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(\\"abc\\").toHaveProperty('a.b.c', {\\"a\\": 5})",
+ "name": "{pass: false} expect("abc").toHaveProperty('a.b.c', {"a": 5})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": {\\"b\\": {\\"c\\": 5}}}).toHaveProperty('a.b', {\\"c\\": 4})",
+ "name": "{pass: false} expect({"a": {"b": {"c": 5}}}).toHaveProperty('a.b', {"c": 4})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({}).toHaveProperty('a', \\"a\\")",
+ "name": "{pass: false} expect({}).toHaveProperty('a', "a")",
"result": "success",
"time": 1,
},
@@ -17014,49 +17040,49 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": {\\"b\\": {\\"c\\": {\\"d\\": 1}}}}).toHaveProperty('a.b.c.d')",
+ "name": "{pass: true} expect({"a": {"b": {"c": {"d": 1}}}}).toHaveProperty('a.b.c.d')",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": {\\"b\\": {\\"c\\": {\\"d\\": 1}}}}).toHaveProperty('a,b,c,d')",
+ "name": "{pass: true} expect({"a": {"b": {"c": {"d": 1}}}}).toHaveProperty('a,b,c,d')",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a.b.c.d\\": 1}).toHaveProperty('a.b.c.d')",
+ "name": "{pass: true} expect({"a.b.c.d": 1}).toHaveProperty('a.b.c.d')",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": {\\"b\\": [1, 2, 3]}}).toHaveProperty('a,b,1')",
+ "name": "{pass: true} expect({"a": {"b": [1, 2, 3]}}).toHaveProperty('a,b,1')",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": 0}).toHaveProperty('a')",
+ "name": "{pass: true} expect({"a": 0}).toHaveProperty('a')",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": {\\"b\\": undefined}}).toHaveProperty('a.b')",
+ "name": "{pass: true} expect({"a": {"b": undefined}}).toHaveProperty('a.b')",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": {\\"b\\": {\\"c\\": {}}}}).toHaveProperty('a.b.c.d')",
+ "name": "{pass: false} expect({"a": {"b": {"c": {}}}}).toHaveProperty('a.b.c.d')",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": 1}).toHaveProperty('a.b.c.d')",
+ "name": "{pass: false} expect({"a": 1}).toHaveProperty('a.b.c.d')",
"result": "success",
"time": 0,
},
@@ -17074,7 +17100,7 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(\\"abc\\").toHaveProperty('a.b.c')",
+ "name": "{pass: false} expect("abc").toHaveProperty('a.b.c')",
"result": "success",
"time": 0,
},
@@ -17092,7 +17118,7 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect(\\"\\").toHaveProperty('key')",
+ "name": "{pass: false} expect("").toHaveProperty('key')",
"result": "success",
"time": 0,
},
@@ -17104,7 +17130,7 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"key\\": 1}).toHaveProperty('not')",
+ "name": "{pass: false} expect({"key": 1}).toHaveProperty('not')",
"result": "success",
"time": 0,
},
@@ -17122,19 +17148,19 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "{error} expect({\\"a\\": {\\"b\\": {}}}).toHaveProperty('undefined')",
+ "name": "{error} expect({"a": {"b": {}}}).toHaveProperty('undefined')",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{error} expect({\\"a\\": {\\"b\\": {}}}).toHaveProperty('null')",
+ "name": "{error} expect({"a": {"b": {}}}).toHaveProperty('null')",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{error} expect({\\"a\\": {\\"b\\": {}}}).toHaveProperty('1')",
+ "name": "{error} expect({"a": {"b": {}}}).toHaveProperty('1')",
"result": "success",
"time": 0,
},
@@ -17148,34 +17174,34 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toMatchObject() circular references simple circular references",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": \\"hello\\", \\"ref\\": [Circular]}).toMatchObject({})",
+ "name": "{pass: true} expect({"a": "hello", "ref": [Circular]}).toMatchObject({})",
"result": "success",
"time": 3,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": \\"hello\\", \\"ref\\": [Circular]}).toMatchObject({\\"a\\": \\"hello\\", \\"ref\\": [Circular]})",
+ "name": "{pass: true} expect({"a": "hello", "ref": [Circular]}).toMatchObject({"a": "hello", "ref": [Circular]})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({}).toMatchObject({\\"a\\": \\"hello\\", \\"ref\\": [Circular]})",
+ "name": "{pass: false} expect({}).toMatchObject({"a": "hello", "ref": [Circular]})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": \\"hello\\", \\"ref\\": [Circular]}).toMatchObject({\\"a\\": \\"world\\", \\"ref\\": [Circular]})",
+ "name": "{pass: false} expect({"a": "hello", "ref": [Circular]}).toMatchObject({"a": "world", "ref": [Circular]})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"ref\\": \\"not a ref\\"}).toMatchObject({\\"a\\": \\"hello\\", \\"ref\\": [Circular]})",
+ "name": "{pass: false} expect({"ref": "not a ref"}).toMatchObject({"a": "hello", "ref": [Circular]})",
"result": "success",
"time": 1,
},
@@ -17183,34 +17209,34 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toMatchObject() circular references transitive circular references",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": \\"hello\\", \\"nestedObj\\": {\\"parentObj\\": [Circular]}}).toMatchObject({})",
+ "name": "{pass: true} expect({"a": "hello", "nestedObj": {"parentObj": [Circular]}}).toMatchObject({})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": \\"hello\\", \\"nestedObj\\": {\\"parentObj\\": [Circular]}}).toMatchObject({\\"a\\": \\"hello\\", \\"nestedObj\\": {\\"parentObj\\": [Circular]}})",
+ "name": "{pass: true} expect({"a": "hello", "nestedObj": {"parentObj": [Circular]}}).toMatchObject({"a": "hello", "nestedObj": {"parentObj": [Circular]}})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({}).toMatchObject({\\"a\\": \\"hello\\", \\"nestedObj\\": {\\"parentObj\\": [Circular]}})",
+ "name": "{pass: false} expect({}).toMatchObject({"a": "hello", "nestedObj": {"parentObj": [Circular]}})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": \\"world\\", \\"nestedObj\\": {\\"parentObj\\": [Circular]}}).toMatchObject({\\"a\\": \\"hello\\", \\"nestedObj\\": {\\"parentObj\\": [Circular]}})",
+ "name": "{pass: false} expect({"a": "world", "nestedObj": {"parentObj": [Circular]}}).toMatchObject({"a": "hello", "nestedObj": {"parentObj": [Circular]}})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"nestedObj\\": {\\"parentObj\\": \\"not the parent ref\\"}}).toMatchObject({\\"a\\": \\"hello\\", \\"nestedObj\\": {\\"parentObj\\": [Circular]}})",
+ "name": "{pass: false} expect({"nestedObj": {"parentObj": "not the parent ref"}}).toMatchObject({"a": "hello", "nestedObj": {"parentObj": [Circular]}})",
"result": "success",
"time": 1,
},
@@ -17218,52 +17244,52 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toMatchObject()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": \\"b\\", \\"c\\": \\"d\\"}).toMatchObject({\\"a\\": \\"b\\"})",
+ "name": "{pass: true} expect({"a": "b", "c": "d"}).toMatchObject({"a": "b"})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": \\"b\\", \\"c\\": \\"d\\"}).toMatchObject({\\"a\\": \\"b\\", \\"c\\": \\"d\\"})",
+ "name": "{pass: true} expect({"a": "b", "c": "d"}).toMatchObject({"a": "b", "c": "d"})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": \\"b\\", \\"t\\": {\\"x\\": {\\"r\\": \\"r\\"}, \\"z\\": \\"z\\"}}).toMatchObject({\\"a\\": \\"b\\", \\"t\\": {\\"z\\": \\"z\\"}})",
+ "name": "{pass: true} expect({"a": "b", "t": {"x": {"r": "r"}, "z": "z"}}).toMatchObject({"a": "b", "t": {"z": "z"}})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": \\"b\\", \\"t\\": {\\"x\\": {\\"r\\": \\"r\\"}, \\"z\\": \\"z\\"}}).toMatchObject({\\"t\\": {\\"x\\": {\\"r\\": \\"r\\"}}})",
+ "name": "{pass: true} expect({"a": "b", "t": {"x": {"r": "r"}, "z": "z"}}).toMatchObject({"t": {"x": {"r": "r"}}})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": [3, 4, 5], \\"b\\": \\"b\\"}).toMatchObject({\\"a\\": [3, 4, 5]})",
+ "name": "{pass: true} expect({"a": [3, 4, 5], "b": "b"}).toMatchObject({"a": [3, 4, 5]})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": [3, 4, 5, \\"v\\"], \\"b\\": \\"b\\"}).toMatchObject({\\"a\\": [3, 4, 5, \\"v\\"]})",
+ "name": "{pass: true} expect({"a": [3, 4, 5, "v"], "b": "b"}).toMatchObject({"a": [3, 4, 5, "v"]})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": 1, \\"c\\": 2}).toMatchObject({\\"a\\": Any})",
+ "name": "{pass: true} expect({"a": 1, "c": 2}).toMatchObject({"a": Any})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": {\\"x\\": \\"x\\", \\"y\\": \\"y\\"}}).toMatchObject({\\"a\\": {\\"x\\": Any}})",
+ "name": "{pass: true} expect({"a": {"x": "x", "y": "y"}}).toMatchObject({"a": {"x": Any}})",
"result": "success",
"time": 0,
},
@@ -17287,25 +17313,25 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": 2015-11-30T00:00:00.000Z, \\"b\\": \\"b\\"}).toMatchObject({\\"a\\": 2015-11-30T00:00:00.000Z})",
+ "name": "{pass: true} expect({"a": 2015-11-30T00:00:00.000Z, "b": "b"}).toMatchObject({"a": 2015-11-30T00:00:00.000Z})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": null, \\"b\\": \\"b\\"}).toMatchObject({\\"a\\": null})",
+ "name": "{pass: true} expect({"a": null, "b": "b"}).toMatchObject({"a": null})",
"result": "success",
"time": 2,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": undefined, \\"b\\": \\"b\\"}).toMatchObject({\\"a\\": undefined})",
+ "name": "{pass: true} expect({"a": undefined, "b": "b"}).toMatchObject({"a": undefined})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": [{\\"a\\": \\"a\\", \\"b\\": \\"b\\"}]}).toMatchObject({\\"a\\": [{\\"a\\": \\"a\\"}]})",
+ "name": "{pass: true} expect({"a": [{"a": "a", "b": "b"}]}).toMatchObject({"a": [{"a": "a"}]})",
"result": "success",
"time": 1,
},
@@ -17317,7 +17343,7 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": undefined}).toMatchObject({\\"a\\": undefined})",
+ "name": "{pass: true} expect({"a": undefined}).toMatchObject({"a": undefined})",
"result": "success",
"time": 1,
},
@@ -17335,109 +17361,109 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect([Error: bar]).toMatchObject({\\"message\\": \\"bar\\"})",
+ "name": "{pass: true} expect([Error: bar]).toMatchObject({"message": "bar"})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({}).toMatchObject({\\"a\\": undefined, \\"b\\": \\"b\\"})",
+ "name": "{pass: true} expect({}).toMatchObject({"a": undefined, "b": "b"})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": \\"b\\"}).toMatchObject({\\"a\\": \\"b\\"})",
+ "name": "{pass: true} expect({"a": "b"}).toMatchObject({"a": "b"})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": \\"b\\", \\"c\\": \\"d\\", Symbol(jest): \\"jest\\"}).toMatchObject({\\"a\\": \\"b\\", Symbol(jest): \\"jest\\"})",
+ "name": "{pass: true} expect({"a": "b", "c": "d", Symbol(jest): "jest"}).toMatchObject({"a": "b", Symbol(jest): "jest"})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": \\"b\\", \\"c\\": \\"d\\", Symbol(jest): \\"jest\\"}).toMatchObject({\\"a\\": \\"b\\", \\"c\\": \\"d\\", Symbol(jest): \\"jest\\"})",
+ "name": "{pass: true} expect({"a": "b", "c": "d", Symbol(jest): "jest"}).toMatchObject({"a": "b", "c": "d", Symbol(jest): "jest"})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({}).toMatchObject({\\"a\\": undefined, \\"b\\": \\"b\\", \\"c\\": \\"c\\"})",
+ "name": "{pass: true} expect({}).toMatchObject({"a": undefined, "b": "b", "c": "c"})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({}).toMatchObject({\\"d\\": 4})",
+ "name": "{pass: true} expect({}).toMatchObject({"d": 4})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: true} expect({\\"a\\": \\"b\\", \\"toString\\": [Function toString]}).toMatchObject({\\"toString\\": Any})",
+ "name": "{pass: true} expect({"a": "b", "toString": [Function toString]}).toMatchObject({"toString": Any})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": \\"b\\", \\"c\\": \\"d\\"}).toMatchObject({\\"e\\": \\"b\\"})",
+ "name": "{pass: false} expect({"a": "b", "c": "d"}).toMatchObject({"e": "b"})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": \\"b\\", \\"c\\": \\"d\\"}).toMatchObject({\\"a\\": \\"b!\\", \\"c\\": \\"d\\"})",
+ "name": "{pass: false} expect({"a": "b", "c": "d"}).toMatchObject({"a": "b!", "c": "d"})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": \\"a\\", \\"c\\": \\"d\\"}).toMatchObject({\\"a\\": Any})",
+ "name": "{pass: false} expect({"a": "a", "c": "d"}).toMatchObject({"a": Any})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": \\"b\\", \\"t\\": {\\"x\\": {\\"r\\": \\"r\\"}, \\"z\\": \\"z\\"}}).toMatchObject({\\"a\\": \\"b\\", \\"t\\": {\\"z\\": [3]}})",
+ "name": "{pass: false} expect({"a": "b", "t": {"x": {"r": "r"}, "z": "z"}}).toMatchObject({"a": "b", "t": {"z": [3]}})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": \\"b\\", \\"t\\": {\\"x\\": {\\"r\\": \\"r\\"}, \\"z\\": \\"z\\"}}).toMatchObject({\\"t\\": {\\"l\\": {\\"r\\": \\"r\\"}}})",
+ "name": "{pass: false} expect({"a": "b", "t": {"x": {"r": "r"}, "z": "z"}}).toMatchObject({"t": {"l": {"r": "r"}}})",
"result": "success",
"time": 2,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": [3, 4, 5], \\"b\\": \\"b\\"}).toMatchObject({\\"a\\": [3, 4, 5, 6]})",
+ "name": "{pass: false} expect({"a": [3, 4, 5], "b": "b"}).toMatchObject({"a": [3, 4, 5, 6]})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": [3, 4, 5], \\"b\\": \\"b\\"}).toMatchObject({\\"a\\": [3, 4]})",
+ "name": "{pass: false} expect({"a": [3, 4, 5], "b": "b"}).toMatchObject({"a": [3, 4]})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": [3, 4, \\"v\\"], \\"b\\": \\"b\\"}).toMatchObject({\\"a\\": [\\"v\\"]})",
+ "name": "{pass: false} expect({"a": [3, 4, "v"], "b": "b"}).toMatchObject({"a": ["v"]})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": [3, 4, 5], \\"b\\": \\"b\\"}).toMatchObject({\\"a\\": {\\"b\\": 4}})",
+ "name": "{pass: false} expect({"a": [3, 4, 5], "b": "b"}).toMatchObject({"a": {"b": 4}})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": [3, 4, 5], \\"b\\": \\"b\\"}).toMatchObject({\\"a\\": {\\"b\\": Any}})",
+ "name": "{pass: false} expect({"a": [3, 4, 5], "b": "b"}).toMatchObject({"a": {"b": Any}})",
"result": "success",
"time": 1,
},
@@ -17467,43 +17493,43 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": 2015-11-30T00:00:00.000Z, \\"b\\": \\"b\\"}).toMatchObject({\\"a\\": 2015-10-10T00:00:00.000Z})",
+ "name": "{pass: false} expect({"a": 2015-11-30T00:00:00.000Z, "b": "b"}).toMatchObject({"a": 2015-10-10T00:00:00.000Z})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": null, \\"b\\": \\"b\\"}).toMatchObject({\\"a\\": \\"4\\"})",
+ "name": "{pass: false} expect({"a": null, "b": "b"}).toMatchObject({"a": "4"})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": null, \\"b\\": \\"b\\"}).toMatchObject({\\"a\\": undefined})",
+ "name": "{pass: false} expect({"a": null, "b": "b"}).toMatchObject({"a": undefined})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": undefined}).toMatchObject({\\"a\\": null})",
+ "name": "{pass: false} expect({"a": undefined}).toMatchObject({"a": null})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": [{\\"a\\": \\"a\\", \\"b\\": \\"b\\"}]}).toMatchObject({\\"a\\": [{\\"a\\": \\"c\\"}]})",
+ "name": "{pass: false} expect({"a": [{"a": "a", "b": "b"}]}).toMatchObject({"a": [{"a": "c"}]})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": 1, \\"b\\": 1, \\"c\\": 1, \\"d\\": {\\"e\\": {\\"f\\": 555}}}).toMatchObject({\\"d\\": {\\"e\\": {\\"f\\": 222}}})",
+ "name": "{pass: false} expect({"a": 1, "b": 1, "c": 1, "d": {"e": {"f": 555}}}).toMatchObject({"d": {"e": {"f": 222}}})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({}).toMatchObject({\\"a\\": undefined})",
+ "name": "{pass: false} expect({}).toMatchObject({"a": undefined})",
"result": "success",
"time": 0,
},
@@ -17527,19 +17553,19 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": \\"b\\"}).toMatchObject({\\"c\\": \\"d\\"})",
+ "name": "{pass: false} expect({"a": "b"}).toMatchObject({"c": "d"})",
"result": "success",
"time": 4,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": \\"b\\", \\"c\\": \\"d\\", Symbol(jest): \\"jest\\"}).toMatchObject({\\"a\\": \\"c\\", Symbol(jest): Any})",
+ "name": "{pass: false} expect({"a": "b", "c": "d", Symbol(jest): "jest"}).toMatchObject({"a": "c", Symbol(jest): Any})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "{pass: false} expect({\\"a\\": \\"b\\"}).toMatchObject({\\"toString\\": Any})",
+ "name": "{pass: false} expect({"a": "b"}).toMatchObject({"toString": Any})",
"result": "success",
"time": 0,
},
@@ -17557,7 +17583,7 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "throws expect(\\"44\\").toMatchObject({})",
+ "name": "throws expect("44").toMatchObject({})",
"result": "success",
"time": 0,
},
@@ -17587,7 +17613,7 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "throws expect({}).toMatchObject(\\"some string\\")",
+ "name": "throws expect({}).toMatchObject("some string")",
"result": "success",
"time": 0,
},
@@ -17616,10 +17642,10 @@ Is good for you.\\")",
"totalTime": 862,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "understands dependencies using jest.requireActual",
@@ -17633,10 +17659,10 @@ Is good for you.\\")",
"totalTime": 1665,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Watch mode flows",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Correctly passing test path pattern",
@@ -18029,43 +18055,43 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "Pressing \\"o\\" runs test in \\"only changed files\\" mode",
+ "name": "Pressing "o" runs test in "only changed files" mode",
"result": "success",
"time": 69,
},
TestCaseResult {
"error": undefined,
- "name": "Pressing \\"a\\" runs test in \\"watch all\\" mode",
+ "name": "Pressing "a" runs test in "watch all" mode",
"result": "success",
"time": 62,
},
TestCaseResult {
"error": undefined,
- "name": "Pressing \\"ENTER\\" reruns the tests",
+ "name": "Pressing "ENTER" reruns the tests",
"result": "success",
"time": 108,
},
TestCaseResult {
"error": undefined,
- "name": "Pressing \\"t\\" reruns the tests in \\"test name pattern\\" mode",
+ "name": "Pressing "t" reruns the tests in "test name pattern" mode",
"result": "success",
"time": 93,
},
TestCaseResult {
"error": undefined,
- "name": "Pressing \\"p\\" reruns the tests in \\"filename pattern\\" mode",
+ "name": "Pressing "p" reruns the tests in "filename pattern" mode",
"result": "success",
"time": 80,
},
TestCaseResult {
"error": undefined,
- "name": "Can combine \\"p\\" and \\"t\\" filters",
+ "name": "Can combine "p" and "t" filters",
"result": "success",
"time": 68,
},
TestCaseResult {
"error": undefined,
- "name": "Pressing \\"u\\" reruns the tests in \\"update snapshot\\" mode",
+ "name": "Pressing "u" reruns the tests in "update snapshot" mode",
"result": "success",
"time": 67,
},
@@ -18077,7 +18103,7 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "shows the correct usage for the f key in \\"only failed tests\\" mode",
+ "name": "shows the correct usage for the f key in "only failed tests" mode",
"result": "success",
"time": 62,
},
@@ -18085,7 +18111,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "Watch mode flows when dealing with potential watch plugin key conflicts",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "forbids WatchPlugins overriding reserved internal plugins",
@@ -18129,10 +18155,10 @@ Is good for you.\\")",
"totalTime": 6755,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "transitive dependencies",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "mocks a manually mocked and mapped module",
@@ -18176,10 +18202,10 @@ Is good for you.\\")",
"totalTime": 2366,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Watch mode flows with changed files",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should correct require new files without legacy cache",
@@ -18193,10 +18219,10 @@ Is good for you.\\")",
"totalTime": 1514,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "HasteMap",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "exports constants",
@@ -18301,7 +18327,7 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "throws on duplicate module ids if \\"throwOnModuleCollision\\" is set to true",
+ "name": "throws on duplicate module ids if "throwOnModuleCollision" is set to true",
"result": "success",
"time": 5,
},
@@ -18387,7 +18413,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "HasteMap builds a haste map on a fresh cache with SHA-1s",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "uses watchman: false",
@@ -18404,7 +18430,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "HasteMap duplicate modules",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "recovers when a duplicate file is deleted",
@@ -18427,7 +18453,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "HasteMap file system changes processing",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "provides a new set of hasteHS and moduleMap",
@@ -18462,7 +18488,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "HasteMap file system changes processing recovery from duplicate module IDs",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "recovers when the oldest version of the duplicates is fixed",
@@ -18488,10 +18514,10 @@ Is good for you.\\")",
"totalTime": 1145,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "code coverage for Handlebars",
@@ -18505,10 +18531,10 @@ Is good for you.\\")",
"totalTime": 1873,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not crash when expect involving a DOM node fails",
@@ -18522,10 +18548,10 @@ Is good for you.\\")",
"totalTime": 1407,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "check is not leaking memory",
@@ -18539,10 +18565,10 @@ Is good for you.\\")",
"totalTime": 1744,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "resolve platform modules",
@@ -18556,10 +18582,10 @@ Is good for you.\\")",
"totalTime": 1863,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Snapshot",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "stores new snapshots on the first run",
@@ -18588,7 +18614,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "Snapshot Validation",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not save snapshots in CI mode by default",
@@ -18626,10 +18652,10 @@ Is good for you.\\")",
"totalTime": 13899,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "mocks modules by default when using automocking",
@@ -18694,7 +18720,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "resetModules",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "resets all the modules",
@@ -18705,7 +18731,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "isolateModules",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "keeps it's registry isolated from global one",
@@ -18740,7 +18766,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "isolateModules can use isolateModules from a beforeEach block",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "can use the required module from beforeEach and re-require it",
@@ -18754,10 +18780,10 @@ Is good for you.\\")",
"totalTime": 1223,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime requireMock",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "uses manual mocks before attempting to automock",
@@ -18843,10 +18869,10 @@ Is good for you.\\")",
"totalTime": 962,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "uses NODE_PATH to find modules",
@@ -18878,10 +18904,10 @@ Is good for you.\\")",
"totalTime": 1088,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime require.resolve",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "resolves a module path",
@@ -18898,7 +18924,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "Runtime require.resolve with the jest-resolve-outside-vm-option",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "forwards to the real Node require in an internal context",
@@ -18924,10 +18950,10 @@ Is good for you.\\")",
"totalTime": 707,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "complains if the value is a primitive",
@@ -18971,10 +18997,10 @@ Is good for you.\\")",
"totalTime": 986,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "async jasmine with pending during test",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not work on jest-circus",
@@ -18994,10 +19020,10 @@ Is good for you.\\")",
"totalTime": 72,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "config for reporters supports \`default\`",
@@ -19053,10 +19079,10 @@ Is good for you.\\")",
"totalTime": 520,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "expect works correctly with RegExps created inside a VM",
@@ -19070,10 +19096,10 @@ Is good for you.\\")",
"totalTime": 1527,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime requireActual",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "requires node module when manual mock exists",
@@ -19093,10 +19119,10 @@ Is good for you.\\")",
"totalTime": 478,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime require.cache",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "require.cache returns loaded module list as native Nodejs require does",
@@ -19116,10 +19142,10 @@ Is good for you.\\")",
"totalTime": 454,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "uses configured moduleDirectories",
@@ -19151,10 +19177,10 @@ Is good for you.\\")",
"totalTime": 525,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime internalModule",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "loads modules and applies transforms",
@@ -19186,10 +19212,10 @@ Is good for you.\\")",
"totalTime": 727,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "check",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns true if the arguments are valid",
@@ -19240,31 +19266,31 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "allows using \\"js\\" file for --config option",
+ "name": "allows using "js" file for --config option",
"result": "success",
"time": 10,
},
TestCaseResult {
"error": undefined,
- "name": "allows using \\"ts\\" file for --config option",
+ "name": "allows using "ts" file for --config option",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "allows using \\"mjs\\" file for --config option",
+ "name": "allows using "mjs" file for --config option",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "allows using \\"cjs\\" file for --config option",
+ "name": "allows using "cjs" file for --config option",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "allows using \\"json\\" file for --config option",
+ "name": "allows using "json" file for --config option",
"result": "success",
"time": 0,
},
@@ -19290,7 +19316,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "buildArgv",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return only camelcased args",
@@ -19304,10 +19330,10 @@ Is good for you.\\")",
"totalTime": 345,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime jest.spyOn",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls the original function",
@@ -19318,7 +19344,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "Runtime jest.spyOnProperty",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls the original function",
@@ -19332,10 +19358,10 @@ Is good for you.\\")",
"totalTime": 521,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "resolves no dependencies for non-existent path",
@@ -19409,10 +19435,10 @@ Is good for you.\\")",
"totalTime": 666,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should exclude jasmine from stack trace for Unix paths.",
@@ -19465,7 +19491,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "formatStackTrace",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "prints code frame and stacktrace",
@@ -19491,10 +19517,10 @@ Is good for you.\\")",
"totalTime": 205,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "different types",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "'1' and 'a'",
@@ -19529,10 +19555,10 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "no visual difference",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "'\\"a\\"' and '\\"a\\"'",
+ "name": "'"a"' and '"a"'",
"result": "success",
"time": 2,
},
@@ -19598,13 +19624,13 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "'{\\"a\\":1}' and '{\\"a\\":1}'",
+ "name": "'{"a":1}' and '{"a":1}'",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "'{\\"a\\":{\\"b\\":5}}' and '{\\"a\\":{\\"b\\":5}}'",
+ "name": "'{"a":{"b":5}}' and '{"a":{"b":5}}'",
"result": "success",
"time": 0,
},
@@ -19624,7 +19650,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "oneline strings",
@@ -19659,7 +19685,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "falls back to not call toJSON if serialization has no differences",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "but then objects have differences",
@@ -19676,7 +19702,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "falls back to not call toJSON if it throws",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "and then objects have differences",
@@ -19693,7 +19719,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "multiline strings",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(unexpanded)",
@@ -19710,7 +19736,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "objects",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(unexpanded)",
@@ -19727,7 +19753,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "multiline string non-snapshot",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(unexpanded)",
@@ -19744,7 +19770,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "multiline string snapshot",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(unexpanded)",
@@ -19761,7 +19787,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "React elements",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(unexpanded)",
@@ -19778,7 +19804,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "multiline string as value of object property (non-snapshot)",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(unexpanded)",
@@ -19795,7 +19821,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "multiline string as value of object property (snapshot)",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(unexpanded)",
@@ -19812,7 +19838,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "indentation in JavaScript structures from less to more",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(unexpanded)",
@@ -19829,7 +19855,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "indentation in JavaScript structures from more to less",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(unexpanded)",
@@ -19846,7 +19872,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "color of text",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(expanded)",
@@ -19863,7 +19889,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "indentation in React elements (non-snapshot) from less to more",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(unexpanded)",
@@ -19880,7 +19906,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "indentation in React elements (non-snapshot) from more to less",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(unexpanded)",
@@ -19897,7 +19923,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "indentation in React elements (snapshot) from less to more",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(unexpanded)",
@@ -19914,7 +19940,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "indentation in React elements (snapshot) from more to less",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(unexpanded)",
@@ -19931,7 +19957,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "outer React element (non-snapshot) from less to more",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(unexpanded)",
@@ -19948,7 +19974,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "outer React element (non-snapshot) from more to less",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(unexpanded)",
@@ -19965,7 +19991,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "trailing newline in multiline string not enclosed in quotes from less to more",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(unexpanded)",
@@ -19982,7 +20008,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "trailing newline in multiline string not enclosed in quotes from more to less",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "(unexpanded)",
@@ -19999,7 +20025,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "context",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "number of lines: -1 (5 default)",
@@ -20040,7 +20066,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "diffLinesUnified edge cases",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "a empty string b empty string",
@@ -20069,7 +20095,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "diffLinesUnified2 edge cases",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "a empty string b empty string",
@@ -20098,7 +20124,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "diffLinesUnified2 edge cases lengths not equal",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "a",
@@ -20115,7 +20141,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "diffStringsUnified edge cases",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "empty both a and b",
@@ -20156,7 +20182,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "options 7980",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "diff",
@@ -20173,7 +20199,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "options change indicators",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "diff",
@@ -20184,7 +20210,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "options change color",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "diffStringsUnified",
@@ -20201,7 +20227,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "options common",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "diff",
@@ -20218,7 +20244,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "options includeChangeCounts false",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "diffLinesUnified",
@@ -20235,7 +20261,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "options includeChangeCounts true padding",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "diffLinesUnified a has 2 digits",
@@ -20258,7 +20284,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "options omitAnnotationLines true",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "diff",
@@ -20281,7 +20307,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "options trailingSpaceFormatter",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "diffDefault default no color",
@@ -20304,7 +20330,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "options emptyFirstOrLastLinePlaceholder default empty string",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "diffDefault",
@@ -20324,13 +20350,13 @@ Is good for you.\\")",
"totalTime": 625,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Watch mode flows",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "Pressing \\"T\\" enters pattern mode",
+ "name": "Pressing "T" enters pattern mode",
"result": "success",
"time": 8,
},
@@ -20341,10 +20367,10 @@ Is good for you.\\")",
"totalTime": 246,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "toBeCalled",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works only on spies or jest.fn",
@@ -20385,7 +20411,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toHaveBeenCalled",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works only on spies or jest.fn",
@@ -20426,7 +20452,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toBeCalledTimes",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": ".not works only on spies or jest.fn",
@@ -20473,7 +20499,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toHaveBeenCalledTimes",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": ".not works only on spies or jest.fn",
@@ -20520,7 +20546,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "lastCalledWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works only on spies or jest.fn",
@@ -20597,7 +20623,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toHaveBeenLastCalledWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works only on spies or jest.fn",
@@ -20674,7 +20700,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "nthCalledWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works only on spies or jest.fn",
@@ -20763,7 +20789,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toHaveBeenNthCalledWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works only on spies or jest.fn",
@@ -20852,7 +20878,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toBeCalledWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works only on spies or jest.fn",
@@ -20929,7 +20955,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toHaveBeenCalledWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works only on spies or jest.fn",
@@ -21006,7 +21032,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toReturn",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": ".not works only on jest.fn",
@@ -21083,7 +21109,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toHaveReturned",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": ".not works only on jest.fn",
@@ -21160,7 +21186,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toReturnTimes",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throw matcher error if received is spy",
@@ -21231,7 +21257,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toHaveReturnedTimes",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throw matcher error if received is spy",
@@ -21302,7 +21328,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "lastReturnedWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works only on spies or jest.fn",
@@ -21385,7 +21411,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "lastReturnedWith lastReturnedWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with three calls",
@@ -21402,7 +21428,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toHaveLastReturnedWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works only on spies or jest.fn",
@@ -21485,7 +21511,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toHaveLastReturnedWith lastReturnedWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with three calls",
@@ -21502,7 +21528,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "nthReturnedWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works only on spies or jest.fn",
@@ -21585,7 +21611,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "nthReturnedWith nthReturnedWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with three calls",
@@ -21632,7 +21658,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toHaveNthReturnedWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works only on spies or jest.fn",
@@ -21715,7 +21741,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toHaveNthReturnedWith nthReturnedWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with three calls",
@@ -21762,7 +21788,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toReturnWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works only on spies or jest.fn",
@@ -21845,7 +21871,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toReturnWith returnedWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with more calls than the limit",
@@ -21862,7 +21888,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toHaveReturnedWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works only on spies or jest.fn",
@@ -21945,7 +21971,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toHaveReturnedWith returnedWith",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with more calls than the limit",
@@ -21965,10 +21991,10 @@ Is good for you.\\")",
"totalTime": 395,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "gets hg SCM roots and dedupes them",
@@ -22007,12 +22033,12 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "handles a bad revision for \\"changedSince\\", for git",
+ "name": "handles a bad revision for "changedSince", for git",
"result": "success",
"time": 878,
},
TestCaseResult {
- "error": Object {
+ "error": {
"details": "Error: abort: empty revision range
at makeError (/home/dorny/dorny/jest/node_modules/execa/lib/error.js:59:11)
at handlePromise (/home/dorny/dorny/jest/node_modules/execa/index.js:114:26)
@@ -22039,7 +22065,7 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "handles a bad revision for \\"changedSince\\", for hg",
+ "name": "handles a bad revision for "changedSince", for hg",
"result": "success",
"time": 949,
},
@@ -22050,10 +22076,10 @@ Is good for you.\\")",
"totalTime": 9045,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "stringify()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "[]",
@@ -22098,7 +22124,7 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "\\"abc\\"",
+ "name": ""abc"",
"result": "success",
"time": 0,
},
@@ -22128,7 +22154,7 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "/ab\\\\.c/gi",
+ "name": "/ab\\.c/gi",
"result": "success",
"time": 0,
},
@@ -22172,7 +22198,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "ensureNumbers()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "dont throw error when variables are numbers",
@@ -22195,7 +22221,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "ensureNumbers() with options",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "promise empty isNot false received",
@@ -22236,7 +22262,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "ensureNoExpected()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "dont throw error when undefined",
@@ -22259,7 +22285,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "diff",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "forwards to jest-diff",
@@ -22288,7 +22314,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "pluralize()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "one",
@@ -22311,7 +22337,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "getLabelPrinter",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "0 args",
@@ -22358,7 +22384,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "matcherHint",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "expectedColor",
@@ -22384,19 +22410,19 @@ Is good for you.\\")",
"totalTime": 391,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Watch mode flows",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "Pressing \\"P\\" enters pattern mode",
+ "name": "Pressing "P" enters pattern mode",
"result": "success",
"time": 7,
},
TestCaseResult {
"error": undefined,
- "name": "Pressing \\"c\\" clears the filters",
+ "name": "Pressing "c" clears the filters",
"result": "success",
"time": 1,
},
@@ -22407,10 +22433,10 @@ Is good for you.\\")",
"totalTime": 165,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime createMockFromModule",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not cause side effects in the rest of the module system when generating a mock",
@@ -22427,7 +22453,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "Runtime",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "creates mock objects in the right environment",
@@ -22441,10 +22467,10 @@ Is good for you.\\")",
"totalTime": 606,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime requireModule",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "emulates a node stack trace during module load",
@@ -22464,10 +22490,10 @@ Is good for you.\\")",
"totalTime": 497,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "watchman crawler and node crawler both include dotfiles",
@@ -22481,10 +22507,10 @@ Is good for you.\\")",
"totalTime": 337,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "watchman watch",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns a list of all files when there are no clocks",
@@ -22540,10 +22566,10 @@ Is good for you.\\")",
"totalTime": 153,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "FakeTimers construction",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "installs setTimeout mock",
@@ -22596,7 +22622,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers runAllTicks",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs all ticks, in order",
@@ -22655,7 +22681,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers runAllTimers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs all timers in order",
@@ -22708,7 +22734,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers advanceTimersByTime",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs timers in order",
@@ -22731,7 +22757,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers advanceTimersToNextTimer",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs timers in order",
@@ -22760,7 +22786,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers reset",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "resets all pending setTimeouts",
@@ -22789,7 +22815,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers runOnlyPendingTimers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs all timers in order",
@@ -22806,7 +22832,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers runWithRealTimers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "executes callback with native timers",
@@ -22829,7 +22855,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers useRealTimers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "resets native timer APIs",
@@ -22852,7 +22878,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers useFakeTimers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "resets mock timer APIs",
@@ -22875,7 +22901,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers getTimerCount",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns the correct count",
@@ -22901,10 +22927,10 @@ Is good for you.\\")",
"totalTime": 302,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime jest.fn",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "creates mock functions",
@@ -22921,7 +22947,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "Runtime jest.isMockFunction",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "recognizes a mocked function",
@@ -22932,7 +22958,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "Runtime jest.clearAllMocks",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "clears all mocks",
@@ -22946,10 +22972,10 @@ Is good for you.\\")",
"totalTime": 479,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "toStrictEqual",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be reflexive",
@@ -22966,7 +22992,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "toStrictEqual on node >=9",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be equivalent to Node deepStrictEqual",
@@ -22980,10 +23006,10 @@ Is good for you.\\")",
"totalTime": 394,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be reflexive",
@@ -23033,10 +23059,10 @@ Is good for you.\\")",
"totalTime": 357,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "FakeTimers construction",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "installs setTimeout mock",
@@ -23083,7 +23109,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers runAllTicks",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs all ticks, in order",
@@ -23112,7 +23138,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers runAllTimers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs all timers in order",
@@ -23165,7 +23191,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers advanceTimersByTime",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs timers in order",
@@ -23182,7 +23208,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers advanceTimersToNextTimer",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs timers in order",
@@ -23211,7 +23237,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers reset",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "resets all pending setTimeouts",
@@ -23240,7 +23266,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers runOnlyPendingTimers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs all timers in order",
@@ -23257,7 +23283,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers useRealTimers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "resets native timer APIs",
@@ -23280,7 +23306,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers useFakeTimers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "resets mock timer APIs",
@@ -23303,7 +23329,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "FakeTimers getTimerCount",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns the correct count",
@@ -23329,10 +23355,10 @@ Is good for you.\\")",
"totalTime": 317,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "toContain",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should always find the value when inside the array",
@@ -23352,10 +23378,10 @@ Is good for you.\\")",
"totalTime": 236,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime wrapCodeInModuleWrapper",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "generates the correct args for the module wrapper",
@@ -23364,7 +23390,7 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "injects \\"extra globals\\"",
+ "name": "injects "extra globals"",
"result": "success",
"time": 43,
},
@@ -23375,10 +23401,10 @@ Is good for you.\\")",
"totalTime": 263,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "toContainEqual",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should always find the value when inside the array",
@@ -23398,10 +23424,10 @@ Is good for you.\\")",
"totalTime": 287,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "invalid arg length",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "is not a number",
@@ -23442,7 +23468,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "invalid arg callback",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "null is not a function",
@@ -23459,7 +23485,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "input callback encapsulates comparison zero and negative zero",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "are not common according to Object.is method",
@@ -23476,7 +23502,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "input callback encapsulates comparison Not a Number",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "is common according to Object.is method",
@@ -23493,7 +23519,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "input callback encapsulates sequences",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "arrays of strings",
@@ -23516,7 +23542,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "no common items negative zero is equivalent to zero for length",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "of a",
@@ -23539,7 +23565,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "no common items",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "a empty and b empty",
@@ -23562,7 +23588,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "no common items a non-empty and b non-empty",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "baDeltaLength 0 even",
@@ -23591,7 +23617,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "only common items",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "length 1",
@@ -23608,7 +23634,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "all common items outside",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "preceding changes",
@@ -23631,7 +23657,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "some common items inside and outside",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "preceding changes adjacent to common in both sequences",
@@ -23648,7 +23674,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "all common items inside non-recursive",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "move from start to end relative to change",
@@ -23689,7 +23715,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "all common items inside recursive",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "prev reverse at depth 1 and preceding at depth 2",
@@ -23736,7 +23762,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "common substrings",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "progress",
@@ -23762,10 +23788,10 @@ Is good for you.\\")",
"totalTime": 195,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": ".getType()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "null",
@@ -23857,10 +23883,10 @@ Is good for you.\\")",
"totalTime": 45,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
- "name": "init project with package.json and no jest config all questions answered with answer: \\"No\\"",
- "tests": Array [
+ "name": "init project with package.json and no jest config all questions answered with answer: "No"",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the default configuration (an empty config)",
@@ -23876,8 +23902,8 @@ Is good for you.\\")",
],
},
TestGroupResult {
- "name": "init project with package.json and no jest config some questions answered with answer: \\"Yes\\"",
- "tests": Array [
+ "name": "init project with package.json and no jest config some questions answered with answer: "Yes"",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should create configuration for {clearMocks: true}",
@@ -23892,25 +23918,25 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "should create configuration for {coverageProvider: \\"babel\\"}",
+ "name": "should create configuration for {coverageProvider: "babel"}",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "should create configuration for {coverageProvider: \\"v8\\"}",
+ "name": "should create configuration for {coverageProvider: "v8"}",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "should create configuration for {environment: \\"jsdom\\"}",
+ "name": "should create configuration for {environment: "jsdom"}",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "should create configuration for {environment: \\"node\\"}",
+ "name": "should create configuration for {environment: "node"}",
"result": "success",
"time": 1,
},
@@ -23924,7 +23950,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "init no package json",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw an error if there is no package.json file",
@@ -23935,16 +23961,16 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "init has-jest-config-file-js ask the user whether to override config or not",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "user answered with \\"Yes\\"",
+ "name": "user answered with "Yes"",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "user answered with \\"No\\"",
+ "name": "user answered with "No"",
"result": "success",
"time": 0,
},
@@ -23952,16 +23978,16 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "init has-jest-config-file-ts ask the user whether to override config or not",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "user answered with \\"Yes\\"",
+ "name": "user answered with "Yes"",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "user answered with \\"No\\"",
+ "name": "user answered with "No"",
"result": "success",
"time": 0,
},
@@ -23969,16 +23995,16 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "init has-jest-config-file-mjs ask the user whether to override config or not",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "user answered with \\"Yes\\"",
+ "name": "user answered with "Yes"",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "user answered with \\"No\\"",
+ "name": "user answered with "No"",
"result": "success",
"time": 1,
},
@@ -23986,16 +24012,16 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "init has-jest-config-file-cjs ask the user whether to override config or not",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "user answered with \\"Yes\\"",
+ "name": "user answered with "Yes"",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "user answered with \\"No\\"",
+ "name": "user answered with "No"",
"result": "success",
"time": 1,
},
@@ -24003,16 +24029,16 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "init has-jest-config-file-json ask the user whether to override config or not",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "user answered with \\"Yes\\"",
+ "name": "user answered with "Yes"",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "user answered with \\"No\\"",
+ "name": "user answered with "No"",
"result": "success",
"time": 0,
},
@@ -24020,16 +24046,16 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "init project using jest.config.ts ask the user whether he wants to use Typescript or not",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "user answered with \\"Yes\\"",
+ "name": "user answered with "Yes"",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "user answered with \\"No\\"",
+ "name": "user answered with "No"",
"result": "success",
"time": 1,
},
@@ -24037,7 +24063,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "init has jest config in package.json",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should ask the user whether to override config or not",
@@ -24047,11 +24073,11 @@ Is good for you.\\")",
],
},
TestGroupResult {
- "name": "init already has \\"jest\\" in packageJson.scripts.test",
- "tests": Array [
+ "name": "init already has "jest" in packageJson.scripts.test",
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should not ask \\"test script question\\"",
+ "name": "should not ask "test script question"",
"result": "success",
"time": 0,
},
@@ -24062,10 +24088,10 @@ Is good for you.\\")",
"totalTime": 119,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime requireModule with no extension",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws error pointing out file with extension",
@@ -24079,10 +24105,10 @@ Is good for you.\\")",
"totalTime": 261,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns the same value for primitive or function values",
@@ -24162,10 +24188,10 @@ Is good for you.\\")",
"totalTime": 86,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with jest.config.js",
@@ -24179,10 +24205,10 @@ Is good for you.\\")",
"totalTime": 138,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "creates a process object that looks like the original one",
@@ -24191,7 +24217,7 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "fakes require(\\"process\\") so it is equal to \\"global.process\\"",
+ "name": "fakes require("process") so it is equal to "global.process"",
"result": "success",
"time": 0,
},
@@ -24214,10 +24240,10 @@ Is good for you.\\")",
"totalTime": 81,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "instruments files",
@@ -24231,10 +24257,10 @@ Is good for you.\\")",
"totalTime": 275,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "runJest",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "when watch is set then exit process",
@@ -24254,10 +24280,10 @@ Is good for you.\\")",
"totalTime": 261,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Replaceable constructor",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "init with object",
@@ -24286,7 +24312,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "Replaceable get",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "get object item",
@@ -24309,7 +24335,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "Replaceable set",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "set object item",
@@ -24332,7 +24358,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "Replaceable forEach",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "object forEach",
@@ -24361,7 +24387,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "Replaceable isReplaceable",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return true if two object types equal and support",
@@ -24387,10 +24413,10 @@ Is good for you.\\")",
"totalTime": 111,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "defaults to milliseconds",
@@ -24464,10 +24490,10 @@ Is good for you.\\")",
"totalTime": 82,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Jest Worker Process Integration",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls a single method from the worker",
@@ -24511,10 +24537,10 @@ Is good for you.\\")",
"totalTime": 114,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "worker",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "parses JavaScript files and extracts module information",
@@ -24564,10 +24590,10 @@ Is good for you.\\")",
"totalTime": 100,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "printDiffOrStringify",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "expected is empty and received is single line",
@@ -24614,7 +24640,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "printDiffOrStringify MAX_DIFF_STRING_LENGTH",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "both are less",
@@ -24637,7 +24663,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "printDiffOrStringify asymmetricMatcher",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "minimal test",
@@ -24711,10 +24737,10 @@ Is good for you.\\")",
"totalTime": 114,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "getPath()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "property exists",
@@ -24761,22 +24787,22 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "getObjectSubset",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "expect(getObjectSubset({\\"a\\": \\"b\\", \\"c\\": \\"d\\"}, {\\"a\\": \\"d\\"})).toEqual({\\"a\\": \\"b\\"})",
+ "name": "expect(getObjectSubset({"a": "b", "c": "d"}, {"a": "d"})).toEqual({"a": "b"})",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "expect(getObjectSubset({\\"a\\": [1, 2], \\"b\\": \\"b\\"}, {\\"a\\": [3, 4]})).toEqual({\\"a\\": [1, 2]})",
+ "name": "expect(getObjectSubset({"a": [1, 2], "b": "b"}, {"a": [3, 4]})).toEqual({"a": [1, 2]})",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "expect(getObjectSubset([{\\"a\\": \\"b\\", \\"c\\": \\"d\\"}], [{\\"a\\": \\"z\\"}])).toEqual([{\\"a\\": \\"b\\"}])",
+ "name": "expect(getObjectSubset([{"a": "b", "c": "d"}], [{"a": "z"}])).toEqual([{"a": "b"}])",
"result": "success",
"time": 1,
},
@@ -24788,7 +24814,7 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "expect(getObjectSubset({\\"a\\": [1]}, {\\"a\\": [1, 2]})).toEqual({\\"a\\": [1]})",
+ "name": "expect(getObjectSubset({"a": [1]}, {"a": [1, 2]})).toEqual({"a": [1]})",
"result": "success",
"time": 0,
},
@@ -24802,7 +24828,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "getObjectSubset returns the object instance if the subset has no extra properties",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Date",
@@ -24813,7 +24839,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "getObjectSubset returns the subset instance if its property values are equal",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Object",
@@ -24824,7 +24850,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "getObjectSubset returns the subset instance if its property values are equal Uint8Array",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "expected",
@@ -24841,7 +24867,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "getObjectSubset calculating subsets of objects with circular references",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "simple circular references",
@@ -24858,7 +24884,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "emptyObject()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "matches an empty object",
@@ -24881,7 +24907,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "subsetEquality()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "matching object returns true",
@@ -24916,7 +24942,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "subsetEquality() matching subsets with circular references",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "simple circular references",
@@ -24939,7 +24965,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "iterableEquality",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns true when given circular iterators",
@@ -25013,10 +25039,10 @@ Is good for you.\\")",
"totalTime": 147,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns the same value for primitive or function values",
@@ -25090,10 +25116,10 @@ Is good for you.\\")",
"totalTime": 49,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "isError",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not assume objects are errors",
@@ -25125,10 +25151,10 @@ Is good for you.\\")",
"totalTime": 43,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": ".isPrimitive()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns true when given primitive value of: null",
@@ -25227,7 +25253,7 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "returns false when given non primitive value of: \\"2021-01-24T19:22:19.272Z\\"",
+ "name": "returns false when given non primitive value of: "2021-01-24T19:22:19.272Z"",
"result": "success",
"time": 0,
},
@@ -25244,10 +25270,10 @@ Is good for you.\\")",
"totalTime": 36,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "node crawler",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "crawls for files based on patterns",
@@ -25280,7 +25306,7 @@ Is good for you.\\")",
},
TestCaseResult {
"error": undefined,
- "name": "uses node fs APIs if \\"forceNodeFilesystemAPI\\" is set to true, regardless of platform",
+ "name": "uses node fs APIs if "forceNodeFilesystemAPI" is set to true, regardless of platform",
"result": "success",
"time": 6,
},
@@ -25300,7 +25326,7 @@ Is good for you.\\")",
},
TestGroupResult {
"name": "node crawler readdir withFileTypes support",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls lstat for directories and symlinks if readdir withFileTypes is not supported",
@@ -25320,13 +25346,13 @@ Is good for you.\\")",
"totalTime": 170,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "run for \\"onlyChanged\\" and \\"changedSince\\"",
+ "name": "run for "onlyChanged" and "changedSince"",
"result": "success",
"time": 1464,
},
@@ -25367,14 +25393,14 @@ Is good for you.\\")",
"time": 7023,
},
TestCaseResult {
- "error": Object {
+ "error": {
"details": "Error: expect(received).toMatch(expected)
-Expected pattern: /PASS __tests__(\\\\/|\\\\\\\\)file2.test.js/
-Received string: \\"·
+Expected pattern: /PASS __tests__(\\/|\\\\)file2.test.js/
+Received string: "·
● Test suite failed to run·
abort: empty revision range
-\\"
+"
at Object.toMatch (/home/dorny/dorny/jest/e2e/__tests__/onlyChanged.test.ts:360:18)
at Promise.then.completed (/home/dorny/dorny/jest/packages/jest-circus/build/utils.js:307:28)
at new Promise ()
@@ -25405,10 +25431,10 @@ Received string: \\"·
"totalTime": 22281,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "joinAlignedDiffsExpand",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "first line is empty common",
@@ -25419,7 +25445,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "joinAlignedDiffsNoExpand",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "patch 0 with context 1 and change at start and end",
@@ -25457,10 +25483,10 @@ Received string: \\"·
"totalTime": 44,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "SnapshotInteractiveMode",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "is inactive at construction",
@@ -25546,10 +25572,10 @@ Received string: \\"·
"totalTime": 89,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime statics",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Runtime.createHasteMap passes correct ignore files to HasteMap",
@@ -25569,10 +25595,10 @@ Received string: \\"·
"totalTime": 162,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "getAlignedDiffs lines",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "change preceding and following common",
@@ -25607,7 +25633,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "getAlignedDiffs newline",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "delete only",
@@ -25648,7 +25674,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "getAlignedDiffs substrings first",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "common when both current change lines are empty",
@@ -25677,7 +25703,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "getAlignedDiffs substrings middle",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "is empty in delete between common",
@@ -25706,7 +25732,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "getAlignedDiffs substrings last",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "is empty in delete at end",
@@ -25729,7 +25755,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "getAlignedDiffs strings",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "change at start and delete or insert at end",
@@ -25749,10 +25775,10 @@ Received string: \\"·
"totalTime": 72,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": ".assertions()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not throw",
@@ -25775,7 +25801,7 @@ Received string: \\"·
},
TestGroupResult {
"name": ".hasAssertions()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not throw if there is an assertion",
@@ -25801,10 +25827,10 @@ Received string: \\"·
"totalTime": 60,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "getPlatformExtension",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should get platform ext",
@@ -25818,10 +25844,10 @@ Received string: \\"·
"totalTime": 35,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "isRegExpSupported",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return true when passing valid regular expression",
@@ -25841,10 +25867,10 @@ Received string: \\"·
"totalTime": 31,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "stack trace points to correct location when using matchers",
@@ -25870,10 +25896,10 @@ Received string: \\"·
"totalTime": 69,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Jest Worker Integration",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls a single method from the worker",
@@ -25911,10 +25937,10 @@ Received string: \\"·
"totalTime": 62,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "is valid when it is a file inside roots",
@@ -25940,10 +25966,10 @@ Received string: \\"·
"totalTime": 166,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "ErrorWithStack",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls Error.captureStackTrace with given callsite when capture exists",
@@ -25957,10 +25983,10 @@ Received string: \\"·
"totalTime": 41,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Returns true when running on interactive environment",
@@ -25980,10 +26006,10 @@ Received string: \\"·
"totalTime": 35,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "diffStringsRaw",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "one-line with cleanup",
@@ -26003,10 +26029,10 @@ Received string: \\"·
"totalTime": 55,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works like micromatch with only positive globs",
@@ -26038,10 +26064,10 @@ Received string: \\"·
"totalTime": 56,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "shouldRunInBand() - should return true for runInBand mode",
@@ -26121,10 +26147,10 @@ Received string: \\"·
"totalTime": 48,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns the passed object",
@@ -26144,10 +26170,10 @@ Received string: \\"·
"totalTime": 68,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "getMockName",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "extracts mock name from file path",
@@ -26161,10 +26187,10 @@ Received string: \\"·
"totalTime": 22,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "WorkerPool",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should create a ChildProcessWorker and send to it",
@@ -26190,10 +26216,10 @@ Received string: \\"·
"totalTime": 51,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "dependencyExtractor",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not extract dependencies inside comments",
@@ -26291,10 +26317,10 @@ Received string: \\"·
"totalTime": 56,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "normalizePathSep",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does nothing on posix",
@@ -26314,10 +26340,10 @@ Received string: \\"·
"totalTime": 35,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "fastPath.relative",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should get relative paths inside the root",
@@ -26340,7 +26366,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "fastPath.resolve",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should get the absolute path for paths inside the root",
@@ -26360,10 +26386,10 @@ Received string: \\"·
"totalTime": 29,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "FailedTestsCache",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should filter tests",
@@ -26377,10 +26403,10 @@ Received string: \\"·
"totalTime": 25,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "getNoTestsFoundMessage",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns correct message when monitoring only failures",
@@ -26418,10 +26444,10 @@ Received string: \\"·
"totalTime": 61,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Common globals",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "check process",
@@ -26435,10 +26461,10 @@ Received string: \\"·
"totalTime": 22,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "prints the jest version",
@@ -26464,10 +26490,10 @@ Received string: \\"·
"totalTime": 48,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "not a Promise: ",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "undefined",
@@ -26494,7 +26520,7 @@ Received string: \\"·
},
TestCaseResult {
"error": undefined,
- "name": "\\"1337\\"",
+ "name": ""1337"",
"result": "success",
"time": 0,
},
@@ -26520,7 +26546,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "a resolved Promise",
@@ -26540,10 +26566,10 @@ Received string: \\"·
"totalTime": 30,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should remove jest config if exists",
@@ -26575,10 +26601,10 @@ Received string: \\"·
"totalTime": 30,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Symbol in objects",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should compare objects with Symbol keys",
@@ -26604,10 +26630,10 @@ Received string: \\"·
"totalTime": 33,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "stub",
@@ -26621,10 +26647,10 @@ Received string: \\"·
"totalTime": 37,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "stub",
@@ -26638,10 +26664,10 @@ Received string: \\"·
"totalTime": 19,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "dummy-test",
@@ -26655,10 +26681,10 @@ Received string: \\"·
"totalTime": 31,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should copy dom element",
@@ -26678,10 +26704,10 @@ Received string: \\"·
"totalTime": 48,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "on node ^12.16.0 || >=13.7.0",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs TS test with native ESM",
@@ -26695,10 +26721,10 @@ Received string: \\"·
"totalTime": 956,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "maps code coverage against original source",
@@ -26712,10 +26738,10 @@ Received string: \\"·
"totalTime": 12701,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "globalTeardown is triggered once after all test suites",
@@ -26765,10 +26791,10 @@ Received string: \\"·
"totalTime": 11886,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "--listTests doesn't duplicate the test files",
@@ -26783,7 +26809,7 @@ Received string: \\"·
},
TestCaseResult {
"error": undefined,
- "name": "\\"No tests found\\" message for projects",
+ "name": ""No tests found" message for projects",
"result": "success",
"time": 977,
},
@@ -26845,7 +26871,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "doesn't bleed module file extensions resolution with multiple workers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "external config files",
@@ -26865,10 +26891,10 @@ Received string: \\"·
"totalTime": 16360,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "basic support",
@@ -26930,10 +26956,10 @@ Received string: \\"·
"totalTime": 17025,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "prints message about flag on slow tests",
@@ -26968,7 +26994,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "on node >=11.10.0",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not report ELD histograms",
@@ -26979,7 +27005,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "notify",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not report --notify flag",
@@ -26990,7 +27016,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "on node >=11",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not report timeouts using unref",
@@ -27004,10 +27030,10 @@ Received string: \\"·
"totalTime": 7528,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "not throwing Error objects",
@@ -27057,10 +27083,10 @@ Received string: \\"·
"totalTime": 10353,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "console printing",
@@ -27110,10 +27136,10 @@ Received string: \\"·
"totalTime": 8071.999999999999,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "basic test constructs",
@@ -27181,10 +27207,10 @@ Received string: \\"·
"totalTime": 7505,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "exits with 1 if coverage threshold is not met",
@@ -27222,10 +27248,10 @@ Received string: \\"·
"totalTime": 4868,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "prints a message with path pattern at the end",
@@ -27239,10 +27265,10 @@ Received string: \\"·
"totalTime": 3076,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Dynamic test filtering",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "uses the default JSON option",
@@ -27292,10 +27318,10 @@ Received string: \\"·
"totalTime": 5422,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Given a config with two named projects, first-project and second-project when Jest is started with \`--selectProjects first-project\`",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs the tests in the first project only",
@@ -27312,7 +27338,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "Given a config with two named projects, first-project and second-project when Jest is started with \`--selectProjects second-project\`",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs the tests in the second project only",
@@ -27329,7 +27355,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "Given a config with two named projects, first-project and second-project when Jest is started with \`--selectProjects first-project second-project\`",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs the tests in the first and second projects",
@@ -27346,7 +27372,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "Given a config with two named projects, first-project and second-project when Jest is started without providing \`--selectProjects\`",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs the tests in the first and second projects",
@@ -27363,7 +27389,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "Given a config with two named projects, first-project and second-project when Jest is started with \`--selectProjects third-project\`",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "fails",
@@ -27380,7 +27406,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "Given a config with two projects, first-project and an unnamed project when Jest is started with \`--selectProjects first-project\`",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs the tests in the first project only",
@@ -27403,7 +27429,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "Given a config with two projects, first-project and an unnamed project when Jest is started without providing \`--selectProjects\`",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs the tests in the first and second projects",
@@ -27420,7 +27446,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "Given a config with two projects, first-project and an unnamed project when Jest is started with \`--selectProjects third-project\`",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "fails",
@@ -27446,10 +27472,10 @@ Received string: \\"·
"totalTime": 5236,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "suite without mock name, mock called",
@@ -27505,10 +27531,10 @@ Received string: \\"·
"totalTime": 6771,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Stack Trace",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "prints a stack trace for runtime errors",
@@ -27558,10 +27584,10 @@ Received string: \\"·
"totalTime": 4725,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with passing tests",
@@ -27611,10 +27637,10 @@ Received string: \\"·
"totalTime": 4721,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works fine when function throws error",
@@ -27646,10 +27672,10 @@ Received string: \\"·
"totalTime": 3562,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not re-run tests when only access time is modified",
@@ -27663,10 +27689,10 @@ Received string: \\"·
"totalTime": 4370,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "successfully runs the tests inside \`pnp/\`",
@@ -27680,10 +27706,10 @@ Received string: \\"·
"totalTime": 2715,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "errors if describe returns a Promise",
@@ -27709,10 +27735,10 @@ Received string: \\"·
"totalTime": 2389,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Wrong globals for environment",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "print useful error for window",
@@ -27750,10 +27776,10 @@ Received string: \\"·
"totalTime": 3877,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "setupFilesAfterEnv",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "requires multiple setup files before each file in the suite",
@@ -27773,10 +27799,10 @@ Received string: \\"·
"totalTime": 1967,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "No tests are found",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "fails the test suite in standard situation",
@@ -27814,10 +27840,10 @@ Received string: \\"·
"totalTime": 2739,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with custom matchers",
@@ -27837,10 +27863,10 @@ Received string: \\"·
"totalTime": 1539,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "print correct error message with nested test definitions outside describe",
@@ -27872,10 +27898,10 @@ Received string: \\"·
"totalTime": 4641,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "suite with auto-clear",
@@ -27895,10 +27921,10 @@ Received string: \\"·
"totalTime": 1681,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "exits the process after test are done but before timers complete",
@@ -27912,10 +27938,10 @@ Received string: \\"·
"totalTime": 2208,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "exits with a specified code when test fail",
@@ -27935,10 +27961,10 @@ Received string: \\"·
"totalTime": 4476,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "",
@@ -27952,10 +27978,10 @@ Received string: \\"·
"totalTime": 967,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "run timers after resetAllMocks test",
@@ -27975,10 +28001,10 @@ Received string: \\"·
"totalTime": 1878,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "triggers setup/teardown hooks",
@@ -27992,10 +28018,10 @@ Received string: \\"·
"totalTime": 1493,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "show error message with matching files",
@@ -28015,10 +28041,10 @@ Received string: \\"·
"totalTime": 1263,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with nested event loops",
@@ -28032,10 +28058,10 @@ Received string: \\"·
"totalTime": 1422,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "no tests found message is redirected to stderr",
@@ -28049,10 +28075,10 @@ Received string: \\"·
"totalTime": 1352,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should work without error",
@@ -28066,10 +28092,10 @@ Received string: \\"·
"totalTime": 1361,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "console printing",
@@ -28083,10 +28109,10 @@ Received string: \\"·
"totalTime": 1341,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs tests in only test.js and spec.js",
@@ -28100,10 +28126,10 @@ Received string: \\"·
"totalTime": 1008.9999999999999,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "\`require.main\` on using \`jest.isolateModules\` should not be undefined",
@@ -28117,10 +28143,10 @@ Received string: \\"·
"totalTime": 976,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "jest --clearCache",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "normal run results in cache directory being written",
@@ -28140,10 +28166,10 @@ Received string: \\"·
"totalTime": 1004,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runtime Internal Module Registry",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "correctly makes use of internal module registry when requiring modules",
@@ -28157,10 +28183,10 @@ Received string: \\"·
"totalTime": 1202,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "resolve node module",
@@ -28174,10 +28200,10 @@ Received string: \\"·
"totalTime": 943,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "setImmediate",
@@ -28191,10 +28217,10 @@ Received string: \\"·
"totalTime": 904,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "JSON Reporter",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "fails the test suite if it contains no tests",
@@ -28208,10 +28234,10 @@ Received string: \\"·
"totalTime": 885,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "suite with test cases that contain malformed sourcemaps",
@@ -28225,13 +28251,13 @@ Received string: \\"·
"totalTime": 858,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "useRealTimers cancels \\"timers\\": \\"fake\\" for whole test file",
+ "name": "useRealTimers cancels "timers": "fake" for whole test file",
"result": "success",
"time": 986,
},
@@ -28242,10 +28268,10 @@ Received string: \\"·
"totalTime": 1018,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "can redefine global",
@@ -28259,10 +28285,10 @@ Received string: \\"·
"totalTime": 40,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "run Jest programmatically cjs",
@@ -28282,10 +28308,10 @@ Received string: \\"·
"totalTime": 575,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Symbol deletion",
@@ -28299,10 +28325,10 @@ Received string: \\"·
"totalTime": 49,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not warn when a mock file changes",
@@ -28316,10 +28342,10 @@ Received string: \\"·
"totalTime": 379,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "error when rootDir does not exist",
@@ -28351,10 +28377,10 @@ Received string: \\"·
"totalTime": 627,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "\`done()\` works properly in hooks",
@@ -28368,10 +28394,10 @@ Received string: \\"·
"totalTime": 855,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls asynchronous handleTestEvent in testEnvironment",
@@ -28385,10 +28411,10 @@ Received string: \\"·
"totalTime": 1507,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "moduleNameMapper wrong configuration",
@@ -28426,10 +28452,10 @@ Received string: \\"·
"totalTime": 5395,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Custom Reporters Integration",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "valid string format for adding reporters",
@@ -28491,10 +28517,10 @@ Received string: \\"·
"totalTime": 6553,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "--findRelatedTests flag",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs tests related to filename",
@@ -28532,10 +28558,10 @@ Received string: \\"·
"totalTime": 6230,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Environment override",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "uses jsdom when specified",
@@ -28564,7 +28590,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "Environment equivalent",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "uses jsdom",
@@ -28584,10 +28610,10 @@ Received string: \\"·
"totalTime": 5221,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "globalSetup is triggered once before all test suites",
@@ -28655,10 +28681,10 @@ Received string: \\"·
"totalTime": 13926,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "prints a warning if a worker is force exited",
@@ -28678,10 +28704,10 @@ Received string: \\"·
"totalTime": 4751,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "successfully runs the tests inside \`babel-plugin-jest-hoist/\`",
@@ -28695,10 +28721,10 @@ Received string: \\"·
"totalTime": 6249,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "code coverage for transform instrumented code",
@@ -28712,10 +28738,10 @@ Received string: \\"·
"totalTime": 5029,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not work on jest-circus",
@@ -28747,10 +28773,10 @@ Received string: \\"·
"totalTime": 71,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "successfully transpiles async",
@@ -28764,10 +28790,10 @@ Received string: \\"·
"totalTime": 4127,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works fine when function throws error",
@@ -28805,10 +28831,10 @@ Received string: \\"·
"totalTime": 3524,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with all statuses",
@@ -28846,10 +28872,10 @@ Received string: \\"·
"totalTime": 3573,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "onlyFailures flag works in non-watch mode",
@@ -28863,10 +28889,10 @@ Received string: \\"·
"totalTime": 2893,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports json preset",
@@ -28886,10 +28912,10 @@ Received string: \\"·
"totalTime": 1966,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with passing tests",
@@ -28909,10 +28935,10 @@ Received string: \\"·
"totalTime": 2732,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "babel-jest",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs transpiled code",
@@ -28929,7 +28955,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "babel-jest ignored",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "tells user to match ignored files",
@@ -28940,7 +28966,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "babel-jest with manual transformer",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs transpiled code",
@@ -28951,7 +28977,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "no babel-jest",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "fails with syntax error on flow types",
@@ -28968,7 +28994,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "custom transformer",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "proprocesses files",
@@ -28985,7 +29011,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "multiple-transformers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "transforms dependencies using specific transformers",
@@ -28996,7 +29022,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "ecmascript-modules-support",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs transpiled code",
@@ -29007,7 +29033,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "transformer-config",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs transpiled code",
@@ -29024,7 +29050,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "transformer caching",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not rerun transform within worker",
@@ -29035,7 +29061,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "transform-environment",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should transform the environment",
@@ -29046,7 +29072,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "transform-runner",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should transform runner",
@@ -29057,7 +29083,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "transform-testrunner",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should transform testRunner",
@@ -29071,10 +29097,10 @@ Received string: \\"·
"totalTime": 26740,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "chai assertion errors should display properly",
@@ -29088,10 +29114,10 @@ Received string: \\"·
"totalTime": 1902,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "processes stack traces and code frames with source maps with coverage",
@@ -29105,10 +29131,10 @@ Received string: \\"·
"totalTime": 2444,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "run prioritySequence first sync",
@@ -29134,10 +29160,10 @@ Received string: \\"·
"totalTime": 2757,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Correct BeforeAll run",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "ensures the BeforeAll of ignored suite is not run",
@@ -29151,10 +29177,10 @@ Received string: \\"·
"totalTime": 958,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Fake promises",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be possible to resolve with fake timers using immediates",
@@ -29174,10 +29200,10 @@ Received string: \\"·
"totalTime": 1716,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "processes stack traces and code frames with source maps",
@@ -29191,10 +29217,10 @@ Received string: \\"·
"totalTime": 2185,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "successfully runs tests with dynamic dependencies",
@@ -29208,10 +29234,10 @@ Received string: \\"·
"totalTime": 847,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "overriding native promise does not freeze Jest",
@@ -29231,10 +29257,10 @@ Received string: \\"·
"totalTime": 2045.9999999999998,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Tests are executed only once even in an MPR",
@@ -29248,10 +29274,10 @@ Received string: \\"·
"totalTime": 976,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "module.parent should be null in test files",
@@ -29265,10 +29291,10 @@ Received string: \\"·
"totalTime": 886,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "prints useful error for requires after test is done",
@@ -29282,10 +29308,10 @@ Received string: \\"·
"totalTime": 921,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "suite with invalid assertions in afterAll",
@@ -29299,10 +29325,10 @@ Received string: \\"·
"totalTime": 861,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "exits the process after test are done but before timers complete",
@@ -29316,10 +29342,10 @@ Received string: \\"·
"totalTime": 298,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "JSON is available in the global scope",
@@ -29339,10 +29365,10 @@ Received string: \\"·
"totalTime": 29,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testNamePattern",
@@ -29356,10 +29382,10 @@ Received string: \\"·
"totalTime": 910,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "jest --debug",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "outputs debugging info before running the test",
@@ -29373,10 +29399,10 @@ Received string: \\"·
"totalTime": 899,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "imported globals",
@@ -29390,10 +29416,10 @@ Received string: \\"·
"totalTime": 1043,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "use the custom resolver",
@@ -29407,10 +29433,10 @@ Received string: \\"·
"totalTime": 826,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "handles circular inequality properly",
@@ -29424,10 +29450,10 @@ Received string: \\"·
"totalTime": 1451,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "globals are undefined if passed \`false\` from CLI",
@@ -29447,10 +29473,10 @@ Received string: \\"·
"totalTime": 1860,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "correctly skip \`beforeAll\`s in skipped tests",
@@ -29464,10 +29490,10 @@ Received string: \\"·
"totalTime": 1061,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with custom inline snapshot matchers",
@@ -29481,10 +29507,10 @@ Received string: \\"·
"totalTime": 2206,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Regex Char In Path",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "parses paths containing regex chars correctly",
@@ -29498,10 +29524,10 @@ Received string: \\"·
"totalTime": 962,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Coverage Report",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "outputs coverage report",
@@ -29512,7 +29538,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "File path not found in mulit-project scenario",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "outputs coverage report",
@@ -29526,10 +29552,10 @@ Received string: \\"·
"totalTime": 1063,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "test config is without transform",
@@ -29540,7 +29566,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "on node ^12.16.0 || >=13.7.0",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs test with native ESM",
@@ -29551,7 +29577,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "on node >=14.3.0",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports top-level await",
@@ -29565,10 +29591,10 @@ Received string: \\"·
"totalTime": 905,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with passing tests",
@@ -29588,10 +29614,10 @@ Received string: \\"·
"totalTime": 968,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should transform linked modules",
@@ -29605,10 +29631,10 @@ Received string: \\"·
"totalTime": 783,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testNamePattern",
@@ -29622,10 +29648,10 @@ Received string: \\"·
"totalTime": 859,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "mocks async functions",
@@ -29639,10 +29665,10 @@ Received string: \\"·
"totalTime": 55,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "reports the correct file size",
@@ -29662,10 +29688,10 @@ Received string: \\"·
"totalTime": 397,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "defining tests and hooks asynchronously throws",
@@ -29679,10 +29705,10 @@ Received string: \\"·
"totalTime": 869,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls testEnvironment handleTestEvent",
@@ -29696,10 +29722,10 @@ Received string: \\"·
"totalTime": 1501,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "\`done()\` should not be called more than once",
@@ -29713,10 +29739,10 @@ Received string: \\"·
"totalTime": 882,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "hook in empty describe",
@@ -29748,10 +29774,10 @@ Received string: \\"·
"totalTime": 2886,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not work on jest-circus",
@@ -29909,10 +29935,10 @@ Received string: \\"·
"totalTime": 56,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Test Retries",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "retries failed tests",
@@ -29944,10 +29970,10 @@ Received string: \\"·
"totalTime": 3277,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with jest.config.ts",
@@ -29985,10 +30011,10 @@ Received string: \\"·
"totalTime": 14322,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "outputs coverage report",
@@ -30068,10 +30094,10 @@ Received string: \\"·
"totalTime": 22264,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "basic support",
@@ -30151,10 +30177,10 @@ Received string: \\"·
"totalTime": 23917,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "renders correctly",
@@ -30186,10 +30212,10 @@ Received string: \\"·
"totalTime": 8559,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with a single snapshot",
@@ -30215,10 +30241,10 @@ Received string: \\"·
"totalTime": 4670,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "exceeds the timeout",
@@ -30250,10 +30276,10 @@ Received string: \\"·
"totalTime": 4029,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "empty external",
@@ -30279,10 +30305,10 @@ Received string: \\"·
"totalTime": 3544,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "async jasmine",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with beforeAll",
@@ -30380,10 +30406,10 @@ Received string: \\"·
"totalTime": 28291,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "config as JSON",
@@ -30427,10 +30453,10 @@ Received string: \\"·
"totalTime": 3945,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "fails a test that terminates the worker with a fatal error",
@@ -30444,10 +30470,10 @@ Received string: \\"·
"totalTime": 3167,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with a single snapshot",
@@ -30467,19 +30493,19 @@ Received string: \\"·
"totalTime": 4435,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "can press \\"p\\" to filter by file name",
+ "name": "can press "p" to filter by file name",
"result": "success",
"time": 1471,
},
TestCaseResult {
"error": undefined,
- "name": "can press \\"t\\" to filter by test name",
+ "name": "can press "t" to filter by test name",
"result": "success",
"time": 1775,
},
@@ -30490,10 +30516,10 @@ Received string: \\"·
"totalTime": 3503,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "triggers unexpected token error message for non-JS assets",
@@ -30519,10 +30545,10 @@ Received string: \\"·
"totalTime": 3411,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "understands dependencies using jest.requireMock",
@@ -30536,10 +30562,10 @@ Received string: \\"·
"totalTime": 2119,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "instruments and collects coverage for typescript files",
@@ -30553,10 +30579,10 @@ Received string: \\"·
"totalTime": 2893,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with jest.config.js",
@@ -30582,10 +30608,10 @@ Received string: \\"·
"totalTime": 2134,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "\`require.main\` on using \`--resetModules='true'\` should not be undefined",
@@ -30605,10 +30631,10 @@ Received string: \\"·
"totalTime": 1961,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "modern implementation of fake timers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be possible to use modern implementation from config",
@@ -30628,10 +30654,10 @@ Received string: \\"·
"totalTime": 1680,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "runs tests by exact path",
@@ -30645,10 +30671,10 @@ Received string: \\"·
"totalTime": 1999,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "respects testEnvironment docblock",
@@ -30662,10 +30688,10 @@ Received string: \\"·
"totalTime": 1628,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with concurrent.each",
@@ -30685,10 +30711,10 @@ Received string: \\"·
"totalTime": 1591,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "suite with auto-restore",
@@ -30708,10 +30734,10 @@ Received string: \\"·
"totalTime": 1797,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "JSON Reporter",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "writes test result to sum.result.json",
@@ -30731,10 +30757,10 @@ Received string: \\"·
"totalTime": 1514,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "suite with auto-reset",
@@ -30754,13 +30780,13 @@ Received string: \\"·
"totalTime": 1666,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "runs only \\"it.only\\" tests",
+ "name": "runs only "it.only" tests",
"result": "success",
"time": 834,
},
@@ -30771,13 +30797,13 @@ Received string: \\"·
"totalTime": 888,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "can press \\"f\\" to run only failed tests",
+ "name": "can press "f" to run only failed tests",
"result": "success",
"time": 1341,
},
@@ -30788,10 +30814,10 @@ Received string: \\"·
"totalTime": 1394,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not work on jest-circus",
@@ -30802,7 +30828,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "Correct beforeEach order",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "ensures the correct order for beforeEach",
@@ -30816,10 +30842,10 @@ Received string: \\"·
"totalTime": 55,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with injected globals",
@@ -30833,10 +30859,10 @@ Received string: \\"·
"totalTime": 1010.9999999999999,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Snapshot serializers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "renders snapshot",
@@ -30856,10 +30882,10 @@ Received string: \\"·
"totalTime": 2065,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "defaults to null for location",
@@ -30879,10 +30905,10 @@ Received string: \\"·
"totalTime": 1764,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "prints console.logs when run with forceExit",
@@ -30896,10 +30922,10 @@ Received string: \\"·
"totalTime": 793,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not require project modules from inside node_modules",
@@ -30913,10 +30939,10 @@ Received string: \\"·
"totalTime": 833,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "produces code coverage for uncovered files without transformer",
@@ -30930,10 +30956,10 @@ Received string: \\"·
"totalTime": 1075,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "CLI accepts exact file names if matchers matched",
@@ -30953,10 +30979,10 @@ Received string: \\"·
"totalTime": 1230,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "on node >=12.2.0",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "\`require.main\` not undefined after createRequire",
@@ -30970,10 +30996,10 @@ Received string: \\"·
"totalTime": 966,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "jest.resetModules should not error when _isMockFunction is defined but not boolean",
@@ -30987,13 +31013,13 @@ Received string: \\"·
"totalTime": 926,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "can press \\"u\\" to update snapshots",
+ "name": "can press "u" to update snapshots",
"result": "success",
"time": 993,
},
@@ -31004,10 +31030,10 @@ Received string: \\"·
"totalTime": 1075,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "mock works with generator",
@@ -31021,10 +31047,10 @@ Received string: \\"·
"totalTime": 1027,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "require.resolve.paths",
@@ -31038,10 +31064,10 @@ Received string: \\"·
"totalTime": 1155,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "prints coverage with missing sourcemaps",
@@ -31061,10 +31087,10 @@ Received string: \\"·
"totalTime": 2412,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "require.resolve with paths",
@@ -31078,10 +31104,10 @@ Received string: \\"·
"totalTime": 1170,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "logs memory usage",
@@ -31095,10 +31121,10 @@ Received string: \\"·
"totalTime": 884,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "provides \`require.main\` set to test suite module",
@@ -31112,10 +31138,10 @@ Received string: \\"·
"totalTime": 1137,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "store snapshot even if fs is mocked",
@@ -31129,10 +31155,10 @@ Received string: \\"·
"totalTime": 883,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Snapshot serializers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "renders snapshot",
@@ -31146,10 +31172,10 @@ Received string: \\"·
"totalTime": 838,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "AppComponent",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should create the app",
@@ -31175,10 +31201,10 @@ Received string: \\"·
"totalTime": 654,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "prints a usable stack trace even if no Error.captureStackTrace",
@@ -31192,10 +31218,10 @@ Received string: \\"·
"totalTime": 899,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "testNamePattern skipped",
@@ -31209,10 +31235,10 @@ Received string: \\"·
"totalTime": 991,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "that the failureDetails property is set",
@@ -31226,10 +31252,10 @@ Received string: \\"·
"totalTime": 907,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Custom snapshot resolver",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Resolves snapshot files using custom resolver",
@@ -31243,10 +31269,10 @@ Received string: \\"·
"totalTime": 823,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "prints stack trace pointing to process.exit call",
@@ -31260,10 +31286,10 @@ Received string: \\"·
"totalTime": 1070,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "supports NODE_PATH",
@@ -31277,10 +31303,10 @@ Received string: \\"·
"totalTime": 866,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Verbose Reporter",
@@ -31294,10 +31320,10 @@ Received string: \\"·
"totalTime": 683,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "--listTests flag",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "causes tests to be printed in different lines",
@@ -31317,10 +31343,10 @@ Received string: \\"·
"totalTime": 945,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "allows retrieving the current domain",
@@ -31334,10 +31360,10 @@ Received string: \\"·
"totalTime": 908,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "CheckboxWithLabel changes the text after click",
@@ -31351,10 +31377,10 @@ Received string: \\"·
"totalTime": 469,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Service: DataService",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should create service",
@@ -31374,10 +31400,10 @@ Received string: \\"·
"totalTime": 431,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "prints useful error for environment methods after test is done",
@@ -31391,10 +31417,10 @@ Received string: \\"·
"totalTime": 892,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "reads config from cjs file",
@@ -31405,7 +31431,7 @@ Received string: \\"·
},
TestGroupResult {
"name": "on node ^12.17.0 || >=13.2.0",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "reads config from mjs file",
@@ -31425,10 +31451,10 @@ Received string: \\"·
"totalTime": 526,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "CheckboxWithLabel changes the text after click",
@@ -31442,10 +31468,10 @@ Received string: \\"·
"totalTime": 434,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "define mock per test",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "uses mocked module",
@@ -31465,10 +31491,10 @@ Received string: \\"·
"totalTime": 116,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should aggregate docs from collection",
@@ -31482,10 +31508,10 @@ Received string: \\"·
"totalTime": 236,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "renders correctly",
@@ -31517,10 +31543,10 @@ Received string: \\"·
"totalTime": 181,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "CheckboxWithLabel changes the text after click",
@@ -31534,10 +31560,10 @@ Received string: \\"·
"totalTime": 227,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "CheckboxWithLabel changes the text after click",
@@ -31551,10 +31577,10 @@ Received string: \\"·
"totalTime": 256,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "--showConfig outputs config info and exits",
@@ -31568,10 +31594,10 @@ Received string: \\"·
"totalTime": 195,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "renders correctly",
@@ -31585,10 +31611,10 @@ Received string: \\"·
"totalTime": 62,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "schedules a 10-second timer after 1 second",
@@ -31602,10 +31628,10 @@ Received string: \\"·
"totalTime": 94,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "calls into $.ajax with the correct params",
@@ -31625,10 +31651,10 @@ Received string: \\"·
"totalTime": 196,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "displays a user after a click",
@@ -31642,10 +31668,10 @@ Received string: \\"·
"totalTime": 196,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "timerGame",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "waits 1 second before ending the game",
@@ -31671,10 +31697,10 @@ Received string: \\"·
"totalTime": 74,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "works with resolves",
@@ -31730,10 +31756,10 @@ Received string: \\"·
"totalTime": 96,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "implementation created by automock",
@@ -31753,10 +31779,10 @@ Received string: \\"·
"totalTime": 115,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "listFilesInDirectorySync",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "includes all files in the directory in the summary",
@@ -31770,10 +31796,10 @@ Received string: \\"·
"totalTime": 87,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "calc - mocks",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns result from subtract",
@@ -31817,10 +31843,10 @@ Received string: \\"·
"totalTime": 276,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "adds 1 + 2 to equal 3",
@@ -31834,10 +31860,10 @@ Received string: \\"·
"totalTime": 78,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Service: SubService",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should create service",
@@ -31851,10 +31877,10 @@ Received string: \\"·
"totalTime": 109,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "if orginal user model",
@@ -31868,10 +31894,10 @@ Received string: \\"·
"totalTime": 41,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "adds 1 + 2 to equal 3 in TScript",
@@ -31891,10 +31917,10 @@ Received string: \\"·
"totalTime": 69,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "if utils are mocked",
@@ -31914,10 +31940,10 @@ Received string: \\"·
"totalTime": 74,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "if lodash head is mocked",
@@ -31931,10 +31957,10 @@ Received string: \\"·
"totalTime": 109,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "if user model is mocked",
@@ -31948,10 +31974,10 @@ Received string: \\"·
"totalTime": 105,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "adds 1 + 2 to equal 3 in Typescript",
@@ -31971,10 +31997,10 @@ Received string: \\"·
"totalTime": 100,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "b",
@@ -31988,10 +32014,10 @@ Received string: \\"·
"totalTime": 21,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "subtracts 5 - 1 to equal 4 in TypeScript",
@@ -32005,10 +32031,10 @@ Received string: \\"·
"totalTime": 43,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does a full mock",
@@ -32022,10 +32048,10 @@ Received string: \\"·
"totalTime": 60,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "globals are properly defined",
@@ -32039,10 +32065,10 @@ Received string: \\"·
"totalTime": 31,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "original implementation",
@@ -32056,10 +32082,10 @@ Received string: \\"·
"totalTime": 24,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "timers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should work before calling resetAllMocks",
@@ -32079,10 +32105,10 @@ Received string: \\"·
"totalTime": 30,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "v8 module",
@@ -32096,10 +32122,10 @@ Received string: \\"·
"totalTime": 30,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "timers",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should work before calling resetAllMocks",
@@ -32113,10 +32139,10 @@ Received string: \\"·
"totalTime": 34,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does a partial mock",
@@ -32130,10 +32156,10 @@ Received string: \\"·
"totalTime": 215,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "d",
@@ -32147,10 +32173,10 @@ Received string: \\"·
"totalTime": 21,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "a",
@@ -32164,10 +32190,10 @@ Received string: \\"·
"totalTime": 29,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "e",
@@ -32181,10 +32207,10 @@ Received string: \\"·
"totalTime": 27,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "c",
@@ -32198,10 +32224,10 @@ Received string: \\"·
"totalTime": 42,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "errors when a test both returns a promise and takes a callback",
diff --git a/__tests__/__snapshots__/mocha-json.test.ts.snap b/__tests__/__snapshots__/mocha-json.test.ts.snap
index 70b4b96..7038239 100644
--- a/__tests__/__snapshots__/mocha-json.test.ts.snap
+++ b/__tests__/__snapshots__/mocha-json.test.ts.snap
@@ -3,12 +3,12 @@
exports[`mocha-json tests report from ./reports/mocha-json test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/mocha-json.json",
- "suites": Array [
+ "suites": [
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Test 1",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Passing test",
@@ -19,12 +19,12 @@ TestRunResult {
},
TestGroupResult {
"name": "Test 1 Test 1.1",
- "tests": Array [
+ "tests": [
TestCaseResult {
- "error": Object {
+ "error": {
"details": "Error: Some error
- at Object.throwError (lib\\\\main.js:2:9)
- at Context. (test\\\\main.test.js:15:11)
+ at Object.throwError (lib\\main.js:2:9)
+ at Context. (test\\main.test.js:15:11)
at processImmediate (internal/timers.js:461:21)",
"line": 2,
"message": "Some error",
@@ -35,12 +35,12 @@ TestRunResult {
"time": 0,
},
TestCaseResult {
- "error": Object {
+ "error": {
"details": "AssertionError [ERR_ASSERTION]: Expected values to be strictly equal:
false !== true
- at Context. (test\\\\main.test.js:11:14)
+ at Context. (test\\main.test.js:11:14)
at processImmediate (internal/timers.js:461:21)",
"line": 11,
"message": "Expected values to be strictly equal:
@@ -57,11 +57,11 @@ false !== true
},
TestGroupResult {
"name": "Test 2",
- "tests": Array [
+ "tests": [
TestCaseResult {
- "error": Object {
+ "error": {
"details": "Error: Some error
- at Context. (test\\\\main.test.js:22:11)
+ at Context. (test\\main.test.js:22:11)
at processImmediate (internal/timers.js:461:21)",
"line": 22,
"message": "Some error",
@@ -78,10 +78,10 @@ false !== true
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": null,
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "Skipped test",
@@ -89,12 +89,12 @@ false !== true
"time": 0,
},
TestCaseResult {
- "error": Object {
- "details": "Error: Timeout of 1ms exceeded. For async tests and hooks, ensure \\"done()\\" is called; if returning a Promise, ensure it resolves. (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-reporter\\\\reports\\\\mocha\\\\test\\\\second.test.js)
+ "error": {
+ "details": "Error: Timeout of 1ms exceeded. For async tests and hooks, ensure "done()" is called; if returning a Promise, ensure it resolves. (C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\mocha\\test\\second.test.js)
at listOnTimeout (internal/timers.js:554:17)
at processTimers (internal/timers.js:497:7)",
"line": undefined,
- "message": "Timeout of 1ms exceeded. For async tests and hooks, ensure \\"done()\\" is called; if returning a Promise, ensure it resolves. (C:\\\\Users\\\\Michal\\\\Workspace\\\\dorny\\\\test-reporter\\\\reports\\\\mocha\\\\test\\\\second.test.js)",
+ "message": "Timeout of 1ms exceeded. For async tests and hooks, ensure "done()" is called; if returning a Promise, ensure it resolves. (C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\mocha\\test\\second.test.js)",
"path": undefined,
},
"name": "Timeout test",
@@ -115,12 +115,12 @@ false !== true
exports[`mocha-json tests report from mochajs/mocha test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/mocha/mocha-test-results.json",
- "suites": Array [
+ "suites": [
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "class BufferedWorkerPool constructor",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should apply defaults",
@@ -131,7 +131,7 @@ TestRunResult {
},
TestGroupResult {
"name": "class BufferedWorkerPool instance method run()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should deserialize the result",
@@ -148,7 +148,7 @@ TestRunResult {
},
TestGroupResult {
"name": "class BufferedWorkerPool instance method run() when passed a non-string filepath",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should reject",
@@ -159,7 +159,7 @@ TestRunResult {
},
TestGroupResult {
"name": "class BufferedWorkerPool instance method run() when passed no arguments",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should reject",
@@ -170,7 +170,7 @@ TestRunResult {
},
TestGroupResult {
"name": "class BufferedWorkerPool instance method stats()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the object returned by \`workerpool.Pool#stats\`",
@@ -181,10 +181,10 @@ TestRunResult {
},
TestGroupResult {
"name": "class BufferedWorkerPool instance method terminate() when called with \`force\`",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should delegate to the underlying pool w/ \\"force\\" behavior",
+ "name": "should delegate to the underlying pool w/ "force" behavior",
"result": "success",
"time": 1,
},
@@ -192,10 +192,10 @@ TestRunResult {
},
TestGroupResult {
"name": "class BufferedWorkerPool instance method terminate() when called without \`force\`",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should delegate to the underlying pool w/o \\"force\\" behavior",
+ "name": "should delegate to the underlying pool w/o "force" behavior",
"result": "success",
"time": 1,
},
@@ -203,7 +203,7 @@ TestRunResult {
},
TestGroupResult {
"name": "class BufferedWorkerPool static method create()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return a BufferedWorkerPool instance",
@@ -214,7 +214,7 @@ TestRunResult {
},
TestGroupResult {
"name": "class BufferedWorkerPool static method create() when passed no arguments",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not throw",
@@ -225,7 +225,7 @@ TestRunResult {
},
TestGroupResult {
"name": "class BufferedWorkerPool static method serializeOptions()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return a serialized string",
@@ -236,7 +236,7 @@ TestRunResult {
},
TestGroupResult {
"name": "class BufferedWorkerPool static method serializeOptions() when called multiple times with the same object",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not perform serialization twice",
@@ -253,7 +253,7 @@ TestRunResult {
},
TestGroupResult {
"name": "class BufferedWorkerPool static method serializeOptions() when passed no arguments",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not throw",
@@ -267,10 +267,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "cli/config findConfig()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should look for one of the config files using findup-sync",
@@ -287,7 +287,7 @@ TestRunResult {
},
TestGroupResult {
"name": "cli/config loadConfig() when config file parsing fails",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw",
@@ -297,8 +297,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "cli/config loadConfig() when parsing succeeds when supplied a filepath with \\".cjs\\" extension",
- "tests": Array [
+ "name": "cli/config loadConfig() when parsing succeeds when supplied a filepath with ".cjs" extension",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should use the JS parser",
@@ -308,8 +308,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "cli/config loadConfig() when parsing succeeds when supplied a filepath with \\".js\\" extension",
- "tests": Array [
+ "name": "cli/config loadConfig() when parsing succeeds when supplied a filepath with ".js" extension",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should use the JS parser",
@@ -319,8 +319,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "cli/config loadConfig() when parsing succeeds when supplied a filepath with \\".json\\" extension",
- "tests": Array [
+ "name": "cli/config loadConfig() when parsing succeeds when supplied a filepath with ".json" extension",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should use the JSON parser",
@@ -330,8 +330,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "cli/config loadConfig() when parsing succeeds when supplied a filepath with \\".jsonc\\" extension",
- "tests": Array [
+ "name": "cli/config loadConfig() when parsing succeeds when supplied a filepath with ".jsonc" extension",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should use the JSON parser",
@@ -341,8 +341,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "cli/config loadConfig() when parsing succeeds when supplied a filepath with \\".yaml\\" extension",
- "tests": Array [
+ "name": "cli/config loadConfig() when parsing succeeds when supplied a filepath with ".yaml" extension",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should use the YAML parser",
@@ -352,8 +352,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "cli/config loadConfig() when parsing succeeds when supplied a filepath with \\".yml\\" extension",
- "tests": Array [
+ "name": "cli/config loadConfig() when parsing succeeds when supplied a filepath with ".yml" extension",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should use the YAML parser",
@@ -364,7 +364,7 @@ TestRunResult {
},
TestGroupResult {
"name": "cli/config loadConfig() when supplied a filepath with unsupported extension",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should use the JSON parser",
@@ -378,10 +378,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "node-flags impliesNoTimeouts()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return true for inspect flags",
@@ -392,7 +392,7 @@ TestRunResult {
},
TestGroupResult {
"name": "node-flags isNodeFlag() for all allowed node env flags which conflict with mocha flags",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "--require should return false",
@@ -409,7 +409,7 @@ TestRunResult {
},
TestGroupResult {
"name": "node-flags isNodeFlag() for all allowed node environment flags",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "--abort-on-uncaught-exception should return true",
@@ -960,52 +960,52 @@ TestRunResult {
},
TestGroupResult {
"name": "node-flags isNodeFlag() special cases",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should return true for \\"es-staging\\"",
+ "name": "should return true for "es-staging"",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "should return true for \\"gc-global\\"",
+ "name": "should return true for "gc-global"",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "should return true for \\"harmony\\" itself",
+ "name": "should return true for "harmony" itself",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "should return true for \\"use-strict\\"",
+ "name": "should return true for "use-strict"",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "should return true for flags starting with \\"--v8-\\"",
+ "name": "should return true for flags starting with "--v8-"",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "should return true for flags starting with \\"harmony-\\" or \\"harmony_\\"",
+ "name": "should return true for flags starting with "harmony-" or "harmony_"",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "should return true for flags starting with \\"preserve-symlinks\\"",
+ "name": "should return true for flags starting with "preserve-symlinks"",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "should return true for flags starting with \\"trace-\\" or \\"trace_\\"",
+ "name": "should return true for flags starting with "trace-" or "trace_"",
"result": "success",
"time": 0,
},
@@ -1013,7 +1013,7 @@ TestRunResult {
},
TestGroupResult {
"name": "node-flags isNodeFlag() when expecting leading dashes",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should require leading dashes",
@@ -1024,7 +1024,7 @@ TestRunResult {
},
TestGroupResult {
"name": "node-flags unparseNodeFlags()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should handle multiple v8 flags",
@@ -1044,10 +1044,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
- "name": "options loadOptions() \\"extension\\" handling when user does not supply \\"extension\\" option",
- "tests": Array [
+ "name": "options loadOptions() "extension" handling when user does not supply "extension" option",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should retain the default",
@@ -1057,8 +1057,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "options loadOptions() \\"extension\\" handling when user supplies \\"extension\\" option",
- "tests": Array [
+ "name": "options loadOptions() "extension" handling when user supplies "extension" option",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not concatenate the default value",
@@ -1068,8 +1068,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "options loadOptions() \\"ignore\\" handling",
- "tests": Array [
+ "name": "options loadOptions() "ignore" handling",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not split option values by comma",
@@ -1079,8 +1079,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "options loadOptions() \\"spec\\" handling when user supplies \\"spec\\" in config and positional arguments",
- "tests": Array [
+ "name": "options loadOptions() "spec" handling when user supplies "spec" in config and positional arguments",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should place both - unsplitted - into the positional arguments array",
@@ -1091,7 +1091,7 @@ TestRunResult {
},
TestGroupResult {
"name": "options loadOptions() config priority",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should prioritize args over rc file",
@@ -1113,8 +1113,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "options loadOptions() when called with a one-and-done arg \\"h\\"",
- "tests": Array [
+ "name": "options loadOptions() when called with a one-and-done arg "h"",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return basic parsed arguments and flag",
@@ -1124,8 +1124,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "options loadOptions() when called with a one-and-done arg \\"help\\"",
- "tests": Array [
+ "name": "options loadOptions() when called with a one-and-done arg "help"",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return basic parsed arguments and flag",
@@ -1135,8 +1135,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "options loadOptions() when called with a one-and-done arg \\"list-interfaces\\"",
- "tests": Array [
+ "name": "options loadOptions() when called with a one-and-done arg "list-interfaces"",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return basic parsed arguments and flag",
@@ -1146,8 +1146,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "options loadOptions() when called with a one-and-done arg \\"list-reporters\\"",
- "tests": Array [
+ "name": "options loadOptions() when called with a one-and-done arg "list-reporters"",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return basic parsed arguments and flag",
@@ -1157,8 +1157,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "options loadOptions() when called with a one-and-done arg \\"V\\"",
- "tests": Array [
+ "name": "options loadOptions() when called with a one-and-done arg "V"",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return basic parsed arguments and flag",
@@ -1168,8 +1168,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "options loadOptions() when called with a one-and-done arg \\"version\\"",
- "tests": Array [
+ "name": "options loadOptions() when called with a one-and-done arg "version"",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return basic parsed arguments and flag",
@@ -1180,7 +1180,7 @@ TestRunResult {
},
TestGroupResult {
"name": "options loadOptions() when no parameter provided",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return an object containing positional args, defaults, and anti-reloading flags",
@@ -1191,7 +1191,7 @@ TestRunResult {
},
TestGroupResult {
"name": "options loadOptions() when parameter provided package.json when called with package = false (\`--no-package\`)",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not look for package.json",
@@ -1214,7 +1214,7 @@ TestRunResult {
},
TestGroupResult {
"name": "options loadOptions() when parameter provided package.json when path to package.json (\`--package \`) is invalid",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw",
@@ -1225,7 +1225,7 @@ TestRunResult {
},
TestGroupResult {
"name": "options loadOptions() when parameter provided package.json when path to package.json (\`--package \`) is valid",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not try to find a package.json",
@@ -1248,7 +1248,7 @@ TestRunResult {
},
TestGroupResult {
"name": "options loadOptions() when parameter provided package.json when path to package.json unspecified",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return merged options incl. found package.json",
@@ -1265,7 +1265,7 @@ TestRunResult {
},
TestGroupResult {
"name": "options loadOptions() when parameter provided rc file when called with config = false (\`--no-config\`)",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not attempt to find a config file",
@@ -1294,7 +1294,7 @@ TestRunResult {
},
TestGroupResult {
"name": "options loadOptions() when parameter provided rc file when called with unspecified config when an rc file would be found",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should attempt to load file at found path",
@@ -1317,7 +1317,7 @@ TestRunResult {
},
TestGroupResult {
"name": "options loadOptions() when parameter provided rc file when called with unspecified config when an rc file would not be found",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should look for a config",
@@ -1340,7 +1340,7 @@ TestRunResult {
},
TestGroupResult {
"name": "options loadOptions() when parameter provided rc file when path to config (\`--config \`) is invalid",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should attempt to load file at path",
@@ -1366,10 +1366,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "helpers list() when given a comma-delimited string",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return a flat array",
@@ -1380,7 +1380,7 @@ TestRunResult {
},
TestGroupResult {
"name": "helpers list() when provided a flat array",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return a flat array",
@@ -1391,7 +1391,7 @@ TestRunResult {
},
TestGroupResult {
"name": "helpers list() when provided a nested array",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return a flat array",
@@ -1402,7 +1402,7 @@ TestRunResult {
},
TestGroupResult {
"name": "helpers validateLegacyPlugin() when a plugin throws an exception upon load",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should fail and report the original error",
@@ -1412,8 +1412,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "helpers validateLegacyPlugin() when used with \\"reporter\\" key",
- "tests": Array [
+ "name": "helpers validateLegacyPlugin() when used with "reporter" key",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should disallow an array of names",
@@ -1429,8 +1429,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "helpers validateLegacyPlugin() when used with an \\"interfaces\\" key",
- "tests": Array [
+ "name": "helpers validateLegacyPlugin() when used with an "interfaces" key",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should disallow an array of names",
@@ -1447,7 +1447,7 @@ TestRunResult {
},
TestGroupResult {
"name": "helpers validateLegacyPlugin() when used with an unknown plugin type",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should fail",
@@ -1461,10 +1461,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "command run builder array type",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should include option extension",
@@ -1523,7 +1523,7 @@ TestRunResult {
},
TestGroupResult {
"name": "command run builder boolean type",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should include option allow-uncaught",
@@ -1654,7 +1654,7 @@ TestRunResult {
},
TestGroupResult {
"name": "command run builder number type",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should include option jobs",
@@ -1671,7 +1671,7 @@ TestRunResult {
},
TestGroupResult {
"name": "command run builder string type",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should include option config",
@@ -1727,10 +1727,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Mocha instance method addFile()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should add the given file to the files array",
@@ -1747,7 +1747,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method lazyLoadFiles()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the \`Mocha\` instance",
@@ -1758,7 +1758,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method lazyLoadFiles() when passed \`true\`",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should enable lazy loading",
@@ -1769,7 +1769,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method lazyLoadFiles() when passed a non-\`true\` value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should enable eager loading",
@@ -1780,7 +1780,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method loadFiles()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should execute the optional callback if given",
@@ -1797,7 +1797,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method parallelMode() when \`Mocha\` is running in Node.js",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the Mocha instance",
@@ -1808,7 +1808,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method parallelMode() when \`Mocha\` is running in Node.js when \`Mocha\` instance in serial mode when passed \`true\` value when \`Mocha\` instance is in \`INIT\` state",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should enable parallel mode",
@@ -1819,7 +1819,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method parallelMode() when \`Mocha\` is running in Node.js when \`Mocha\` instance in serial mode when passed \`true\` value when \`Mocha\` instance is not in \`INIT\` state",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw",
@@ -1830,7 +1830,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method parallelMode() when \`Mocha\` is running in Node.js when \`Mocha\` instance in serial mode when passed non-\`true\` value when \`Mocha\` instance is in \`INIT\` state",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should enable serial mode",
@@ -1841,7 +1841,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method parallelMode() when \`Mocha\` is running in Node.js when parallel mode is already disabled",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not swap the Runner, nor change lazy loading setting",
@@ -1852,7 +1852,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method parallelMode() when \`Mocha\` is running in Node.js when parallel mode is already enabled",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not swap the Runner, nor change lazy loading setting",
@@ -1862,8 +1862,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "Mocha instance method reporter() when a reporter exists relative to the \\"mocha\\" module path",
- "tests": Array [
+ "name": "Mocha instance method reporter() when a reporter exists relative to the "mocha" module path",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should load from module path",
@@ -1873,11 +1873,11 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "Mocha instance method reporter() when a reporter exists relative to the \\"mocha\\" module path when the reporter throws upon load",
- "tests": Array [
+ "name": "Mocha instance method reporter() when a reporter exists relative to the "mocha" module path when the reporter throws upon load",
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should throw \\"invalid reporter\\" exception",
+ "name": "should throw "invalid reporter" exception",
"result": "success",
"time": 2,
},
@@ -1891,7 +1891,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method reporter() when a reporter exists relative to the cwd",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should load from current working directory",
@@ -1902,10 +1902,10 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method reporter() when a reporter exists relative to the cwd when the reporter throws upon load",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should throw \\"invalid reporter\\" exception",
+ "name": "should throw "invalid reporter" exception",
"result": "success",
"time": 2,
},
@@ -1919,7 +1919,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method unloadFiles()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be chainable",
@@ -1948,7 +1948,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha static method unloadFile()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should unload a specific file from cache",
@@ -1962,13 +1962,13 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "parallel-buffered-runner ParallelBufferedRunner constructor",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should start in \\"IDLE\\" state",
+ "name": "should start in "IDLE" state",
"result": "success",
"time": 1,
},
@@ -1976,7 +1976,7 @@ TestRunResult {
},
TestGroupResult {
"name": "parallel-buffered-runner ParallelBufferedRunner event EVENT_RUN_END",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should change the state to COMPLETE",
@@ -1987,7 +1987,7 @@ TestRunResult {
},
TestGroupResult {
"name": "parallel-buffered-runner ParallelBufferedRunner instance method isParallelMode()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return true",
@@ -1998,7 +1998,7 @@ TestRunResult {
},
TestGroupResult {
"name": "parallel-buffered-runner ParallelBufferedRunner instance method linkPartialObjects()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the runner",
@@ -2009,7 +2009,7 @@ TestRunResult {
},
TestGroupResult {
"name": "parallel-buffered-runner ParallelBufferedRunner instance method run()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be chainable",
@@ -2026,7 +2026,7 @@ TestRunResult {
},
TestGroupResult {
"name": "parallel-buffered-runner ParallelBufferedRunner instance method run() when a suite has a bail flag when an event contains an error and has positive failures when subsequent files already started running",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should cleanly terminate the thread pool",
@@ -2037,7 +2037,7 @@ TestRunResult {
},
TestGroupResult {
"name": "parallel-buffered-runner ParallelBufferedRunner instance method run() when a suite has a bail flag when an event contains an error and has positive failures when subsequent files have not yet been run",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should cleanly terminate the thread pool",
@@ -2054,7 +2054,7 @@ TestRunResult {
},
TestGroupResult {
"name": "parallel-buffered-runner ParallelBufferedRunner instance method run() when a suite has a bail flag when no event contains an error",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not force-terminate",
@@ -2065,7 +2065,7 @@ TestRunResult {
},
TestGroupResult {
"name": "parallel-buffered-runner ParallelBufferedRunner instance method run() when a worker fails",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should delegate to Runner#uncaught",
@@ -2082,7 +2082,7 @@ TestRunResult {
},
TestGroupResult {
"name": "parallel-buffered-runner ParallelBufferedRunner instance method run() when instructed to link objects",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should create object references",
@@ -2093,7 +2093,7 @@ TestRunResult {
},
TestGroupResult {
"name": "parallel-buffered-runner ParallelBufferedRunner instance method run() when instructed to link objects when event data object is missing an ID",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should result in an uncaught exception",
@@ -2104,7 +2104,7 @@ TestRunResult {
},
TestGroupResult {
"name": "parallel-buffered-runner ParallelBufferedRunner instance method run() when suite should bail when an event contains an error and has positive failures when subsequent files already started running",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should cleanly terminate the thread pool",
@@ -2115,7 +2115,7 @@ TestRunResult {
},
TestGroupResult {
"name": "parallel-buffered-runner ParallelBufferedRunner instance method run() when suite should bail when an event contains an error and has positive failures when subsequent files have not yet been run",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should cleanly terminate the thread pool",
@@ -2126,7 +2126,7 @@ TestRunResult {
},
TestGroupResult {
"name": "parallel-buffered-runner ParallelBufferedRunner instance method run() when suite should bail when no event contains an error",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not force-terminate",
@@ -2137,7 +2137,7 @@ TestRunResult {
},
TestGroupResult {
"name": "parallel-buffered-runner ParallelBufferedRunner instance method workerReporter()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return its context",
@@ -2148,7 +2148,7 @@ TestRunResult {
},
TestGroupResult {
"name": "parallel-buffered-runner ParallelBufferedRunner instance property _state",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should disallow an invalid state transition",
@@ -2162,10 +2162,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "ParallelBuffered constructor",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should listen for Runner events",
@@ -2182,7 +2182,7 @@ TestRunResult {
},
TestGroupResult {
"name": "ParallelBuffered event on any other event listened for",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should populate its \`events\` array with SerializableEvents",
@@ -2193,7 +2193,7 @@ TestRunResult {
},
TestGroupResult {
"name": "ParallelBuffered event on EVENT_RUN_END",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should remove all listeners",
@@ -2204,7 +2204,7 @@ TestRunResult {
},
TestGroupResult {
"name": "ParallelBuffered instance method done",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should execute its callback with a SerializableWorkerResult",
@@ -2224,10 +2224,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "serializer function deserialize when passed a non-object value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the value",
@@ -2238,7 +2238,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer function deserialize when passed a SerializedWorkerResult object",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the result of \`SerializableWorkerResult.deserialize\` called on the value",
@@ -2249,7 +2249,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer function deserialize when passed an object value which is not a SerializedWorkerResult",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the value",
@@ -2260,7 +2260,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer function deserialize when passed nothing",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return \`undefined\`",
@@ -2271,7 +2271,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer function serialize when not passed anything",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return \`undefined\`",
@@ -2282,7 +2282,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer function serialize when passed a non-object value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the value",
@@ -2293,7 +2293,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer function serialize when passed an object value having a \`serialize\` method",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the result of the \`serialize\` method",
@@ -2304,7 +2304,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer function serialize when passed an object value w/o a \`serialize\` method",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the value",
@@ -2315,10 +2315,10 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableEvent constructor when called with a non-object \`rawObject\`",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should throw \\"invalid arg type\\" error",
+ "name": "should throw "invalid arg type" error",
"result": "success",
"time": 1,
},
@@ -2326,10 +2326,10 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableEvent constructor when called without \`eventName\`",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should throw \\"invalid arg value\\" error",
+ "name": "should throw "invalid arg value" error",
"result": "success",
"time": 0,
},
@@ -2337,7 +2337,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableEvent instance method serialize",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should freeze the instance",
@@ -2354,7 +2354,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableEvent instance method serialize when passed an error",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not retain not-own props",
@@ -2377,7 +2377,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableEvent instance method serialize when passed an object containing a nested prop with an Error value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should serialize the Error",
@@ -2388,7 +2388,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableEvent instance method serialize when passed an object containing a non-\`serialize\` method",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should remove the method",
@@ -2399,7 +2399,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableEvent instance method serialize when passed an object containing a top-level prop with an Error value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should serialize the Error",
@@ -2410,7 +2410,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableEvent instance method serialize when passed an object containing an array",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should serialize the array",
@@ -2421,7 +2421,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableEvent instance method serialize when passed an object containing an object with a \`serialize\` method",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should call the \`serialize\` method",
@@ -2432,7 +2432,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableEvent instance method serialize when passed an object with a \`serialize\` method",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should call the \`serialize\` method",
@@ -2443,7 +2443,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableEvent static method create",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should instantiate a SerializableEvent",
@@ -2454,7 +2454,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableEvent static method deserialize",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return a new object w/ null prototype",
@@ -2465,10 +2465,10 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableEvent static method deserialize when passed a falsy parameter",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should throw \\"invalid arg type\\" error",
+ "name": "should throw "invalid arg type" error",
"result": "success",
"time": 0,
},
@@ -2476,7 +2476,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableEvent static method deserialize when passed value contains \`data\` prop",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should ignore __proto__",
@@ -2487,7 +2487,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableEvent static method deserialize when passed value contains \`data\` prop when \`data\` prop contains a nested serialized Error prop",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should create an Error instance from the nested serialized Error prop",
@@ -2498,7 +2498,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableEvent static method deserialize when passed value contains an \`error\` prop",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should create an Error instance from the prop",
@@ -2508,8 +2508,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "serializer SerializableEvent static method deserialize when passed value data contains a prop beginning with \\"$$\\"",
- "tests": Array [
+ "name": "serializer SerializableEvent static method deserialize when passed value data contains a prop beginning with "$$"",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should create a new prop having a function value",
@@ -2524,7 +2524,7 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "should remove the prop with the \\"$$\\" prefix",
+ "name": "should remove the prop with the "$$" prefix",
"result": "success",
"time": 0,
},
@@ -2532,7 +2532,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableEvent static method deserialize when the value data contains a prop with an array value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should deserialize each prop",
@@ -2543,7 +2543,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableWorkerResult constructor",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should add a readonly \`__type\` prop",
@@ -2554,7 +2554,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableWorkerResult instance method serialize",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should call \`SerializableEvent#serialize\` of each of its events",
@@ -2571,7 +2571,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableWorkerResult static method create",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return a new SerializableWorkerResult instance",
@@ -2582,7 +2582,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableWorkerResult static method deserialize",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should call SerializableEvent#deserialize on each item in its \`events\` prop",
@@ -2599,7 +2599,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableWorkerResult static method isSerializedWorkerResult when passed an instance",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return \`true\`",
@@ -2610,7 +2610,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableWorkerResult static method isSerializedWorkerResult when passed an object with an appropriate \`__type\` prop",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return \`true\`",
@@ -2621,7 +2621,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serializer SerializableWorkerResult static method isSerializedWorkerResult when passed an object without an appropriate \`__type\` prop",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return \`false\`",
@@ -2635,10 +2635,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "stackTraceFilter() on browser",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not strip out other bower_components",
@@ -2649,7 +2649,7 @@ TestRunResult {
},
TestGroupResult {
"name": "stackTraceFilter() on node on POSIX OS",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not ignore other bower_components and components",
@@ -2678,7 +2678,7 @@ TestRunResult {
},
TestGroupResult {
"name": "stackTraceFilter() on node on Windows",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should work on Windows",
@@ -2692,19 +2692,19 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "utils function canonicalType()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should return \\"asyncfunction\\" if the parameter is an async function",
+ "name": "should return "asyncfunction" if the parameter is an async function",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "should return \\"buffer\\" if the parameter is a Buffer",
+ "name": "should return "buffer" if the parameter is a Buffer",
"result": "success",
"time": 0,
},
@@ -2712,7 +2712,7 @@ TestRunResult {
},
TestGroupResult {
"name": "utils function cwd()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the current working directory",
@@ -2723,16 +2723,16 @@ TestRunResult {
},
TestGroupResult {
"name": "utils function type()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should return \\"error\\" if the parameter is an Error",
+ "name": "should return "error" if the parameter is an Error",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "should return \\"function\\" if the parameter is an async function",
+ "name": "should return "function" if the parameter is an async function",
"result": "success",
"time": 0,
},
@@ -2743,10 +2743,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "worker when run as main process",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw",
@@ -2757,7 +2757,7 @@ TestRunResult {
},
TestGroupResult {
"name": "worker when run as worker process",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should register itself with workerpool",
@@ -2767,8 +2767,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "worker when run as worker process function run() when called with empty \\"filepath\\" argument",
- "tests": Array [
+ "name": "worker when run as worker process function run() when called with empty "filepath" argument",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should reject",
@@ -2779,7 +2779,7 @@ TestRunResult {
},
TestGroupResult {
"name": "worker when run as worker process function run() when called without arguments",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should reject",
@@ -2790,7 +2790,7 @@ TestRunResult {
},
TestGroupResult {
"name": "worker when run as worker process function run() when passed a non-string \`options\` value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should reject",
@@ -2801,7 +2801,7 @@ TestRunResult {
},
TestGroupResult {
"name": "worker when run as worker process function run() when passed an invalid string \`options\` value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should reject",
@@ -2811,8 +2811,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "worker when run as worker process function run() when the file at \\"filepath\\" argument is unloadable",
- "tests": Array [
+ "name": "worker when run as worker process function run() when the file at "filepath" argument is unloadable",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should reject",
@@ -2822,8 +2822,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "worker when run as worker process function run() when the file at \\"filepath\\" is loadable",
- "tests": Array [
+ "name": "worker when run as worker process function run() when the file at "filepath" is loadable",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should call Mocha#run",
@@ -2832,13 +2832,13 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "should handle \\"--require\\"",
+ "name": "should handle "--require"",
"result": "success",
"time": 2,
},
TestCaseResult {
"error": undefined,
- "name": "should handle \\"--ui\\"",
+ "name": "should handle "--ui"",
"result": "success",
"time": 3,
},
@@ -2857,8 +2857,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "worker when run as worker process function run() when the file at \\"filepath\\" is loadable when run twice",
- "tests": Array [
+ "name": "worker when run as worker process function run() when the file at "filepath" is loadable when run twice",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should initialize only once",
@@ -2868,8 +2868,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "worker when run as worker process function run() when the file at \\"filepath\\" is loadable when serialization fails",
- "tests": Array [
+ "name": "worker when run as worker process function run() when the file at "filepath" is loadable when serialization fails",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should reject",
@@ -2879,8 +2879,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "worker when run as worker process function run() when the file at \\"filepath\\" is loadable when serialization succeeds",
- "tests": Array [
+ "name": "worker when run as worker process function run() when the file at "filepath" is loadable when serialization succeeds",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should resolve with a SerializedWorkerResult",
@@ -2894,10 +2894,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Context nested",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should work",
@@ -2908,7 +2908,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Context Siblings sequestered sibling",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should work",
@@ -2919,7 +2919,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Context Siblings sibling verifiction",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should allow test siblings to modify shared context",
@@ -2942,7 +2942,7 @@ TestRunResult {
},
TestGroupResult {
"name": "methods retries",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the number of retries",
@@ -2953,7 +2953,7 @@ TestRunResult {
},
TestGroupResult {
"name": "methods slow()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the slow",
@@ -2964,7 +2964,7 @@ TestRunResult {
},
TestGroupResult {
"name": "methods timeout()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the timeout",
@@ -2978,10 +2978,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "durations when fast",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not highlight",
@@ -2992,7 +2992,7 @@ TestRunResult {
},
TestGroupResult {
"name": "durations when reasonable",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should highlight in yellow",
@@ -3003,7 +3003,7 @@ TestRunResult {
},
TestGroupResult {
"name": "durations when slow",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should highlight in red",
@@ -3017,10 +3017,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Errors createForbiddenExclusivityError() when Mocha instance is not running in a worker process",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should output a message regarding --forbid-only",
@@ -3031,7 +3031,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Errors createForbiddenExclusivityError() when Mocha instance is running in a worker process",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should output a message regarding incompatibility",
@@ -3042,7 +3042,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Errors createInvalidInterfaceError()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should include expected code in thrown interface errors",
@@ -3053,7 +3053,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Errors createInvalidReporterError()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should include expected code in thrown reporter errors",
@@ -3064,7 +3064,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Errors deprecate()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should cache the message",
@@ -3087,7 +3087,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Errors isMochaError() when provided a non-error",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return false",
@@ -3098,7 +3098,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Errors isMochaError() when provided an Error object having a known Mocha error code",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return true",
@@ -3109,7 +3109,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Errors isMochaError() when provided an Error object with a non-Mocha error code",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return false",
@@ -3120,7 +3120,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Errors warn()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should call process.emitWarning",
@@ -3146,10 +3146,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "global leaks",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should cause tests to fail",
@@ -3164,7 +3164,7 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "should pass when prefixed \\"mocha-\\"",
+ "name": "should pass when prefixed "mocha-"",
"result": "success",
"time": 0,
},
@@ -3181,10 +3181,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Mocha .grep()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should add a RegExp to the mocha.options object",
@@ -3212,8 +3212,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "Mocha \\"fgrep\\" option",
- "tests": Array [
+ "name": "Mocha "fgrep" option",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should escape and convert string to a RegExp",
@@ -3223,8 +3223,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "Mocha \\"grep\\" option",
- "tests": Array [
+ "name": "Mocha "grep" option",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should add a RegExp to the mocha.options object",
@@ -3240,8 +3240,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "Mocha \\"invert\\" option",
- "tests": Array [
+ "name": "Mocha "invert" option",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should add a Boolean to the mocha.options object",
@@ -3255,10 +3255,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "async hooks",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "one",
@@ -3284,10 +3284,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "serial nested",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "bar",
@@ -3304,7 +3304,7 @@ TestRunResult {
},
TestGroupResult {
"name": "serial nested hooks",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "one",
@@ -3324,10 +3324,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "serial hooks",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "one",
@@ -3353,10 +3353,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "hook timeout",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should work",
@@ -3370,10 +3370,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Hook error",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should get the hook._error when called without arguments",
@@ -3390,7 +3390,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Hook reset",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should call Runnable.reset",
@@ -3410,10 +3410,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Mocha constructor",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should set _cleanReferencesAfterRun to true",
@@ -3424,7 +3424,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha constructor when \`global\` option is an \`Array\`",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should attempt to set globals",
@@ -3435,7 +3435,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha constructor when \`parallel\` option is true and \`jobs\` option <= 1",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not enable parallel mode",
@@ -3446,7 +3446,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha constructor when \`parallel\` option is true and \`jobs\` option > 1",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should enable parallel mode",
@@ -3457,7 +3457,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha constructor when \`parallel\` option is true when \`enableGlobalSetup\` option is present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should toggle global setup fixtures",
@@ -3468,7 +3468,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha constructor when \`parallel\` option is true when \`enableGlobalTeardown\` option is present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should configure global teardown fixtures",
@@ -3479,7 +3479,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha constructor when \`parallel\` option is true when \`globalSetup\` option is present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should configure global setup fixtures",
@@ -3490,7 +3490,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha constructor when \`parallel\` option is true when \`globalTeardown\` option is present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should configure global teardown fixtures",
@@ -3501,7 +3501,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha constructor when \`retries\` option is not present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not attempt to set retries",
@@ -3512,7 +3512,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha constructor when \`retries\` option is present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should attempt to set retries\`",
@@ -3523,7 +3523,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha constructor when \`rootHooks\` option is truthy",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "shouid attempt to set root hooks",
@@ -3534,7 +3534,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha constructor when \`timeout\` option is \`false\`",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should attempt to set timeout",
@@ -3545,7 +3545,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha constructor when \`timeout\` option is \`undefined\`",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not attempt to set timeout",
@@ -3556,7 +3556,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method _runGlobalFixtures()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should execute multiple fixtures in order",
@@ -3567,7 +3567,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method allowUncaught()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be chainable",
@@ -3590,7 +3590,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method asyncOnly()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be chainable",
@@ -3613,7 +3613,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method bail()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be chainable",
@@ -3624,10 +3624,10 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method bail() when provided a falsy argument",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should unset the \\"bail\\" flag on the root suite",
+ "name": "should unset the "bail" flag on the root suite",
"result": "success",
"time": 1,
},
@@ -3635,10 +3635,10 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method bail() when provided no arguments",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should set the \\"bail\\" flag on the root suite",
+ "name": "should set the "bail" flag on the root suite",
"result": "success",
"time": 1,
},
@@ -3646,7 +3646,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method checkLeaks()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should set the checkLeaks option to true",
@@ -3657,7 +3657,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method cleanReferencesAfterRun()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be chainable",
@@ -3680,7 +3680,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method color()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be chainable",
@@ -3703,7 +3703,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method delay()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be chainable",
@@ -3720,7 +3720,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method diff()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should set the diff option to true",
@@ -3731,7 +3731,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method diff() when provided \`false\` argument",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should set the diff option to false",
@@ -3742,7 +3742,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method dispose()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should dispose previous test runner",
@@ -3765,7 +3765,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method forbidOnly()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be chainable",
@@ -3788,7 +3788,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method forbidPending()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be chainable",
@@ -3811,7 +3811,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method fullTrace()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be chainable",
@@ -3834,7 +3834,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method global()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be an empty array initially",
@@ -3851,7 +3851,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method global() when argument is invalid",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not modify the whitelist when given empty array",
@@ -3868,7 +3868,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method global() when argument is valid",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should add contents of string array to the whitelist",
@@ -3891,7 +3891,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method growl()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be chainable",
@@ -3902,7 +3902,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method growl() if capable of notifications",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should set the growl option to true",
@@ -3913,7 +3913,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method growl() if not capable of notifications",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should set the growl option to false",
@@ -3924,7 +3924,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method hasGlobalSetupFixtures() when no global setup fixtures are present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return \`false\`",
@@ -3935,7 +3935,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method hasGlobalSetupFixtures() when one or more global setup fixtures are present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return \`true\`",
@@ -3946,7 +3946,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method hasGlobalTeardownFixtures() when no global teardown fixtures are present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return \`false\`",
@@ -3957,7 +3957,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method hasGlobalTeardownFixtures() when one or more global teardown fixtures are present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return \`true\`",
@@ -3968,7 +3968,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method inlineDiffs()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be chainable",
@@ -3991,7 +3991,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method invert()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be chainable",
@@ -4008,7 +4008,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method noHighlighting()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be chainable",
@@ -4025,7 +4025,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method parallelMode() when \`Mocha\` is running in a browser",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw",
@@ -4036,7 +4036,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method reporter()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be chainable",
@@ -4059,7 +4059,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method rootHooks()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be chainable",
@@ -4069,8 +4069,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "Mocha instance method rootHooks() when provided a single \\"after all\\" hook",
- "tests": Array [
+ "name": "Mocha instance method rootHooks() when provided a single "after all" hook",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should attach it to the root suite",
@@ -4080,8 +4080,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "Mocha instance method rootHooks() when provided a single \\"after each\\" hook",
- "tests": Array [
+ "name": "Mocha instance method rootHooks() when provided a single "after each" hook",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should attach it to the root suite",
@@ -4091,8 +4091,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "Mocha instance method rootHooks() when provided a single \\"before all\\" hook",
- "tests": Array [
+ "name": "Mocha instance method rootHooks() when provided a single "before all" hook",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should attach it to the root suite",
@@ -4102,8 +4102,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "Mocha instance method rootHooks() when provided a single \\"before each\\" hook",
- "tests": Array [
+ "name": "Mocha instance method rootHooks() when provided a single "before each" hook",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should attach it to the root suite",
@@ -4113,8 +4113,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "Mocha instance method rootHooks() when provided multiple \\"after all\\" hooks",
- "tests": Array [
+ "name": "Mocha instance method rootHooks() when provided multiple "after all" hooks",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should attach each to the root suite",
@@ -4124,8 +4124,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "Mocha instance method rootHooks() when provided multiple \\"after each\\" hooks",
- "tests": Array [
+ "name": "Mocha instance method rootHooks() when provided multiple "after each" hooks",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should attach each to the root suite",
@@ -4135,8 +4135,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "Mocha instance method rootHooks() when provided multiple \\"before all\\" hooks",
- "tests": Array [
+ "name": "Mocha instance method rootHooks() when provided multiple "before all" hooks",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should attach each to the root suite",
@@ -4146,8 +4146,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "Mocha instance method rootHooks() when provided multiple \\"before each\\" hooks",
- "tests": Array [
+ "name": "Mocha instance method rootHooks() when provided multiple "before each" hooks",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should attach each to the root suite",
@@ -4158,7 +4158,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should execute the callback when complete",
@@ -4181,7 +4181,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run() Base reporter initialization",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should configure the Base reporter",
@@ -4191,8 +4191,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "Mocha instance method run() Base reporter initialization when \\"color\\" options is set",
- "tests": Array [
+ "name": "Mocha instance method run() Base reporter initialization when "color" options is set",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should configure the Base reporter",
@@ -4203,7 +4203,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run() Runner initialization",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should instantiate a Runner",
@@ -4213,8 +4213,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "Mocha instance method run() Runner initialization when \\"global\\" option is present",
- "tests": Array [
+ "name": "Mocha instance method run() Runner initialization when "global" option is present",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should configure global vars",
@@ -4224,19 +4224,19 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "Mocha instance method run() Runner initialization when \\"grep\\" option is present",
- "tests": Array [
+ "name": "Mocha instance method run() Runner initialization when "grep" option is present",
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should configure \\"grep\\"",
+ "name": "should configure "grep"",
"result": "success",
"time": 1,
},
],
},
TestGroupResult {
- "name": "Mocha instance method run() when \\"growl\\" option is present",
- "tests": Array [
+ "name": "Mocha instance method run() when "growl" option is present",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should initialize growl support",
@@ -4246,11 +4246,11 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "Mocha instance method run() when a reporter instance has a \\"done\\" method",
- "tests": Array [
+ "name": "Mocha instance method run() when a reporter instance has a "done" method",
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should call the reporter \\"done\\" method",
+ "name": "should call the reporter "done" method",
"result": "success",
"time": 1,
},
@@ -4258,7 +4258,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run() when a run has finished and is called again",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not call \`Runner#runAsync()\`",
@@ -4275,7 +4275,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run() when a run is in progress",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not call \`Runner#runAsync\`",
@@ -4292,7 +4292,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run() when files have been added to the Mocha instance when Mocha is set to eagerly load files",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should eagerly load files",
@@ -4303,7 +4303,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run() when files have been added to the Mocha instance when Mocha is set to lazily load files",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not eagerly load files",
@@ -4314,7 +4314,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run() when global setup fixtures disabled when global setup fixtures are present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not run global setup fixtures",
@@ -4325,7 +4325,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run() when global setup fixtures disabled when global setup fixtures not present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not run global setup fixtures",
@@ -4336,7 +4336,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run() when global setup fixtures enabled when global setup fixtures are present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should run global setup fixtures",
@@ -4347,7 +4347,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run() when global setup fixtures enabled when global setup fixtures not present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not run global setup fixtures",
@@ -4358,7 +4358,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run() when global teardown fixtures disabled when global teardown fixtures are present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not run global teardown fixtures",
@@ -4369,7 +4369,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run() when global teardown fixtures disabled when global teardown fixtures not present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not run global teardown fixtures",
@@ -4380,7 +4380,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run() when global teardown fixtures enabled when global teardown fixtures are present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should run global teardown fixtures",
@@ -4391,7 +4391,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run() when global teardown fixtures enabled when global teardown fixtures are present when global setup fixtures are present and enabled",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should use the same context as returned by \`runGlobalSetup\`",
@@ -4402,7 +4402,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run() when global teardown fixtures enabled when global teardown fixtures not present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not run global teardown fixtures",
@@ -4413,7 +4413,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run() when Mocha configured for multiple runs and multiple runs are attempted",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should call \`Runner#runAsync\` for each call",
@@ -4442,7 +4442,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method run() when the \`Mocha\` instance is already disposed",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not call \`Runner#runAsync\`",
@@ -4459,7 +4459,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method runGlobalSetup() when a fixture is not present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not attempt to run fixtures",
@@ -4470,7 +4470,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method runGlobalSetup() when fixture(s) are present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should attempt run the fixtures",
@@ -4481,7 +4481,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method runGlobalTeardown() when a fixture is not present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "not attempt to run the fixtures",
@@ -4492,7 +4492,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method runGlobalTeardown() when fixture(s) are present",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should attempt to run the fixtures",
@@ -4503,7 +4503,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Mocha instance method unloadFile() when run in a browser",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw",
@@ -4517,10 +4517,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "overspecified asynchronous resolution method",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should fail when multiple methods are used",
@@ -4534,10 +4534,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "parseQuery()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should get queryString and return key-value object",
@@ -4546,7 +4546,7 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "should parse \\"+\\" as a space",
+ "name": "should parse "+" as a space",
"result": "success",
"time": 0,
},
@@ -4557,10 +4557,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "plugin module class PluginLoader constructor when passed custom plugins",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should register the custom plugins",
@@ -4571,7 +4571,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader constructor when passed ignored plugins",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should retain a list of ignored plugins",
@@ -4582,7 +4582,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader constructor when passed no options",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should populate a registry of built-in plugins",
@@ -4592,8 +4592,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "plugin module class PluginLoader instance method finalize() when a plugin has no \\"finalize\\" function",
- "tests": Array [
+ "name": "plugin module class PluginLoader instance method finalize() when a plugin has no "finalize" function",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return an array of raw implementations",
@@ -4604,7 +4604,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader instance method finalize() when a plugin has one or more implementations",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should omit unused plugins",
@@ -4621,7 +4621,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader instance method finalize() when no plugins have been loaded",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return an empty map",
@@ -4632,7 +4632,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader instance method load() when called with a falsy value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return false",
@@ -4643,7 +4643,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader instance method load() when called with an object containing a recognized plugin",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should call the associated validator, if present",
@@ -4666,7 +4666,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader instance method load() when called with an object containing no recognized plugin",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return false",
@@ -4677,7 +4677,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader instance method load() when passed a falsy or non-object value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not call a validator",
@@ -4694,7 +4694,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader instance method load() when passed an object value when a key matches a known named export",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should call the associated validator",
@@ -4711,7 +4711,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader instance method load() when passed an object value when a key matches a known named export when the value does not pass the associated validator",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw",
@@ -4722,7 +4722,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader instance method load() when passed an object value when a key matches a known named export when the value passes the associated validator",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should add the implementation to the internal mapping",
@@ -4745,7 +4745,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader instance method load() when passed an object value when no keys match any known named exports",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return false",
@@ -4756,7 +4756,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader instance method register() when passed a definition w/o an exportName",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw",
@@ -4767,7 +4767,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader instance method register() when passed a falsy parameter",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw",
@@ -4778,7 +4778,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader instance method register() when passed a non-object parameter",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw",
@@ -4789,7 +4789,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader instance method register() when the plugin export name is already in use",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw",
@@ -4800,7 +4800,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader instance method register() when the plugin export name is ignored",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not register the plugin",
@@ -4817,7 +4817,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader instance method register() when the plugin export name is not in use",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not throw",
@@ -4828,7 +4828,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module class PluginLoader static method create()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return a PluginLoader instance",
@@ -4839,7 +4839,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module global fixtures plugin global setup when an implementation is a function",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should pass validation",
@@ -4850,7 +4850,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module global fixtures plugin global setup when an implementation is a primitive",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should fail validation",
@@ -4861,7 +4861,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module global fixtures plugin global setup when an implementation is an array of functions",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should pass validation",
@@ -4872,7 +4872,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module global fixtures plugin global setup when an implementation is an array of primitives",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should fail validation",
@@ -4883,7 +4883,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module global fixtures plugin global teardown when an implementation is a function",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should pass validation",
@@ -4894,7 +4894,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module global fixtures plugin global teardown when an implementation is a primitive",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should fail validation",
@@ -4905,7 +4905,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module global fixtures plugin global teardown when an implementation is an array of functions",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should pass validation",
@@ -4916,7 +4916,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module global fixtures plugin global teardown when an implementation is an array of primitives",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should fail validation",
@@ -4927,7 +4927,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module root hooks plugin 🎣 when a loaded impl is finalized",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should flatten the implementations",
@@ -4938,7 +4938,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module root hooks plugin 🎣 when impl is a function",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should pass validation",
@@ -4949,7 +4949,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module root hooks plugin 🎣 when impl is a primitive",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should fail validation",
@@ -4960,7 +4960,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module root hooks plugin 🎣 when impl is an array",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should fail validation",
@@ -4971,7 +4971,7 @@ TestRunResult {
},
TestGroupResult {
"name": "plugin module root hooks plugin 🎣 when impl is an object of functions",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should pass validation",
@@ -4985,10 +4985,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "using imported describe",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "using imported it",
@@ -5002,10 +5002,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "root",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be a valid suite",
@@ -5019,10 +5019,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) if async",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "this.skip() should halt synchronous execution",
@@ -5039,7 +5039,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) if timed-out",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should ignore call to \`done\` and not execute callback again",
@@ -5050,7 +5050,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when .pending",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not invoke the callback",
@@ -5061,7 +5061,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when async",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should allow a timeout of 0",
@@ -5078,7 +5078,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when async when an error is passed",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should invoke the callback",
@@ -5089,7 +5089,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when async when an exception is thrown",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should invoke the callback",
@@ -5106,7 +5106,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when async when an exception is thrown and is allowed to remain uncaught",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws an error when it is allowed",
@@ -5117,7 +5117,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when async when done() is invoked with a non-Error object",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should invoke the callback",
@@ -5128,7 +5128,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when async when done() is invoked with a string",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should invoke the callback",
@@ -5139,10 +5139,10 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when async when the callback is invoked several times with an error",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should emit a single \\"error\\" event",
+ "name": "should emit a single "error" event",
"result": "success",
"time": 1,
},
@@ -5150,10 +5150,10 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when async when the callback is invoked several times without an error",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should emit a single \\"error\\" event",
+ "name": "should emit a single "error" event",
"result": "success",
"time": 1,
},
@@ -5161,7 +5161,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when async without error",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should invoke the callback",
@@ -5172,7 +5172,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when fn is not a function",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw an error",
@@ -5183,7 +5183,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when fn returns a non-promise",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should invoke the callback",
@@ -5194,7 +5194,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when fn returns a promise when the promise is fulfilled with a value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should invoke the callback",
@@ -5205,7 +5205,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when fn returns a promise when the promise is fulfilled with no value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should invoke the callback",
@@ -5216,7 +5216,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when fn returns a promise when the promise is rejected",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should invoke the callback",
@@ -5227,7 +5227,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when fn returns a promise when the promise is rejected without a reason",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should invoke the callback",
@@ -5238,7 +5238,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when fn returns a promise when the promise takes too long to settle",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw the timeout error",
@@ -5249,7 +5249,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when sync when an exception is thrown",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should invoke the callback with error",
@@ -5260,7 +5260,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when sync when an exception is thrown and is allowed to remain uncaught",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "throws an error when it is allowed",
@@ -5271,7 +5271,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when sync without error",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should invoke the callback",
@@ -5282,7 +5282,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .run(fn) when timeouts are disabled",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not error with timeout",
@@ -5293,7 +5293,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .title",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be present",
@@ -5304,7 +5304,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) .titlePath()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns the concatenation of the parent's title path and runnable's title",
@@ -5315,7 +5315,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) #globals",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should allow for whitelisting globals",
@@ -5326,7 +5326,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) #isFailed()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return \`false\` if test is pending",
@@ -5349,7 +5349,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) #reset",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should reset current run state",
@@ -5360,7 +5360,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) #resetTimeout()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not time out if timeouts disabled after reset",
@@ -5371,7 +5371,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) #retries(n)",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should set the number of retries",
@@ -5382,7 +5382,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) #slow(ms)",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not set the slow threshold if the parameter is not passed",
@@ -5405,7 +5405,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) #slow(ms) when passed a time-formatted string",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should convert to ms",
@@ -5416,7 +5416,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) #timeout(ms) when value is equal to lower bound given numeric value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should set the timeout value to disabled",
@@ -5427,7 +5427,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) #timeout(ms) when value is equal to lower bound given string timestamp",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should set the timeout value to disabled",
@@ -5438,7 +5438,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) #timeout(ms) when value is equal to upper bound given numeric value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should set the disabled timeout value",
@@ -5449,7 +5449,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) #timeout(ms) when value is less than lower bound",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should clamp to lower bound given numeric",
@@ -5466,7 +5466,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) #timeout(ms) when value is out-of-bounds given numeric value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should set the disabled timeout value",
@@ -5477,7 +5477,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) #timeout(ms) when value is within \`setTimeout\` bounds given numeric value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should set the timeout value",
@@ -5488,7 +5488,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) #timeout(ms) when value is within \`setTimeout\` bounds given string timestamp",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should set the timeout value",
@@ -5499,7 +5499,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) interesting property id",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should have a permanent identifier",
@@ -5516,7 +5516,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) static method toValueOrError",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return an Error if parameter is falsy",
@@ -5533,7 +5533,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) when arity == 0",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be .sync",
@@ -5550,7 +5550,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runnable(title, fn) when arity >= 1",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be .async",
@@ -5570,10 +5570,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Runner instance method _uncaught() when called with a non-Runner context",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw",
@@ -5584,7 +5584,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method abort()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the Runner",
@@ -5601,7 +5601,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method allowUncaught()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "async - should allow unhandled errors in hooks to propagate through",
@@ -5630,7 +5630,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method checkGlobals(test)",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should allow variables that match a wildcard",
@@ -5639,19 +5639,19 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "should emit \\"fail\\" when a global beginning with \\"d\\" is introduced",
+ "name": "should emit "fail" when a global beginning with "d" is introduced",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
- "name": "should emit \\"fail\\" when a new global is introduced",
+ "name": "should emit "fail" when a new global is introduced",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "should emit \\"fail\\" when a single new disallowed global is introduced after a single extra global is allowed",
+ "name": "should emit "fail" when a single new disallowed global is introduced after a single extra global is allowed",
"result": "success",
"time": 0,
},
@@ -5683,16 +5683,16 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method dispose()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should remove \\"error\\" listeners from a test",
+ "name": "should remove "error" listeners from a test",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "should remove \\"uncaughtException\\" listeners from the process",
+ "name": "should remove "uncaughtException" listeners from the process",
"result": "success",
"time": 0,
},
@@ -5706,16 +5706,16 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method fail()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should emit \\"fail\\"",
+ "name": "should emit "fail"",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
- "name": "should emit \\"fail\\"",
+ "name": "should emit "fail"",
"result": "success",
"time": 0,
},
@@ -5763,7 +5763,7 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "should not emit \\"end\\" if suite bail is not true",
+ "name": "should not emit "end" if suite bail is not true",
"result": "success",
"time": 1,
},
@@ -5781,29 +5781,29 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "should set \`Test#state\` to \\"failed\\"",
+ "name": "should set \`Test#state\` to "failed"",
"result": "success",
"time": 0,
},
],
},
TestGroupResult {
- "name": "Runner instance method fail() when Runner has stopped when test is not pending when error is not of the \\"multiple done\\" variety",
- "tests": Array [
+ "name": "Runner instance method fail() when Runner has stopped when test is not pending when error is not of the "multiple done" variety",
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should throw a \\"fatal\\" error",
+ "name": "should throw a "fatal" error",
"result": "success",
"time": 1,
},
],
},
TestGroupResult {
- "name": "Runner instance method fail() when Runner has stopped when test is not pending when error is the \\"multiple done\\" variety",
- "tests": Array [
+ "name": "Runner instance method fail() when Runner has stopped when test is not pending when error is the "multiple done" variety",
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should throw the \\"multiple done\\" error",
+ "name": "should throw the "multiple done" error",
"result": "success",
"time": 1,
},
@@ -5811,7 +5811,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method globalProps()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should include common non enumerable globals",
@@ -5822,7 +5822,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method globals()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should default to the known globals",
@@ -5839,7 +5839,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method grep()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should update the runner.total with number of matched tests",
@@ -5856,7 +5856,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method grepTotal()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the total number of matched tests",
@@ -5873,7 +5873,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method hook()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should augment hook title with current test title",
@@ -5890,7 +5890,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method isParallelMode()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return false",
@@ -5901,7 +5901,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method linkPartialObjects()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the Runner",
@@ -5912,7 +5912,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method run()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should clean references after a run",
@@ -5921,7 +5921,7 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "should emit \\"retry\\" when a retryable test fails",
+ "name": "should emit "retry" when a retryable test fails",
"result": "success",
"time": 1,
},
@@ -5947,7 +5947,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method run() stack traces ginormous",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not hang if overlong error message is multiple lines",
@@ -5964,7 +5964,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method run() stack traces long",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should display the full stack-trace",
@@ -5975,7 +5975,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method run() stack traces short",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should prettify the stack-trace",
@@ -5986,7 +5986,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method runAsync()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should pass through options to Runner#run",
@@ -6003,7 +6003,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method runTest()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return when no tests to run",
@@ -6014,7 +6014,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method uncaught() when allow-uncaught is set to true",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should propagate error and throw",
@@ -6025,7 +6025,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method uncaught() when provided an object argument when argument is a Pending",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should ignore argument and return",
@@ -6036,10 +6036,10 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method uncaught() when provided an object argument when argument is an Error",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
- "name": "should add the \\"uncaught\\" property to the Error",
+ "name": "should add the "uncaught" property to the Error",
"result": "success",
"time": 1,
},
@@ -6047,7 +6047,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method uncaught() when provided an object argument when argument is an Error when a Runnable is running or has run",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should clear any pending timeouts",
@@ -6058,7 +6058,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method uncaught() when provided an object argument when argument is an Error when a Runnable is running or has run when current Runnable has already failed",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not attempt to fail again",
@@ -6069,7 +6069,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method uncaught() when provided an object argument when argument is an Error when a Runnable is running or has run when current Runnable has been marked pending",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should attempt to fail",
@@ -6080,7 +6080,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method uncaught() when provided an object argument when argument is an Error when a Runnable is running or has run when the current Runnable has already passed",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should abort the runner without emitting end event",
@@ -6097,7 +6097,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method uncaught() when provided an object argument when argument is an Error when a Runnable is running or has run when the current Runnable is still running when the current Runnable is a Hook",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not notify run has ended",
@@ -6120,7 +6120,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method uncaught() when provided an object argument when argument is an Error when a Runnable is running or has run when the current Runnable is still running when the current Runnable is a Test",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not notify run has ended",
@@ -6143,7 +6143,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method uncaught() when provided an object argument when argument is an Error when no Runnables are running",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should fail with a transient Runnable and the error",
@@ -6154,7 +6154,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method uncaught() when provided an object argument when argument is an Error when no Runnables are running when Runner is IDLE",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should emit start/end events for the benefit of reporters",
@@ -6165,7 +6165,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method uncaught() when provided an object argument when argument is an Error when no Runnables are running when Runner is RUNNING",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not emit start/end events",
@@ -6176,7 +6176,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method uncaught() when provided an object argument when argument is an Error when no Runnables are running when Runner is STOPPED",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not emit start/end events, since this presumably would have already happened",
@@ -6193,7 +6193,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method uncaught() when provided an object argument when argument is not an Error",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should fail with a transient Runnable and a new Error coerced from the object",
@@ -6204,7 +6204,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Runner instance method workerReporter()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw",
@@ -6218,10 +6218,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Suite instance method addSuite()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "adds the suite to the suites collection",
@@ -6256,7 +6256,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method addTest()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "adds the test to the tests collection",
@@ -6279,7 +6279,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method afterAll() wraps the passed in function in a Hook",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "adds it to _afterAll",
@@ -6302,7 +6302,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method afterEach() wraps the passed in function in a Hook",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "adds it to _afterEach",
@@ -6325,7 +6325,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method bail() when argument is passed",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the Suite object",
@@ -6336,7 +6336,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method bail() when no argument is passed",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the bail value",
@@ -6347,7 +6347,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method beforeAll() wraps the passed in function in a Hook",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "adds it to _beforeAll",
@@ -6370,7 +6370,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method beforeEach() when the suite is pending",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not create a hook",
@@ -6381,7 +6381,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method beforeEach() wraps the passed in function in a Hook",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "adds it to _beforeEach",
@@ -6404,7 +6404,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method clone()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should clone the Suite, omitting children",
@@ -6415,7 +6415,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method constructor",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not throw if the title is a string",
@@ -6438,7 +6438,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method create()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "does not create a second root suite",
@@ -6455,7 +6455,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method eachTest(fn) when there are no nested suites or tests",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return 0",
@@ -6466,7 +6466,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method eachTest(fn) when there are several levels of nested suites",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the number",
@@ -6477,7 +6477,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method eachTest(fn) when there are several tests in the suite",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the number",
@@ -6488,7 +6488,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method filterOnly()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should filter out all other tests and suites if a suite has \`only\`",
@@ -6505,7 +6505,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method fullTitle() when there is a parent",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns the combination of parent's and suite's title",
@@ -6516,7 +6516,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method fullTitle() when there is no parent",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns the suite title",
@@ -6527,7 +6527,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method hasOnly()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return false if no suite or test is marked \`only\`",
@@ -6556,7 +6556,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method markOnly()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should call appendOnlySuite on parent",
@@ -6567,7 +6567,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method reset()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should forward reset to all hooks",
@@ -6590,7 +6590,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method slow() when argument is passed",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the Suite object",
@@ -6601,7 +6601,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method slow() when given a string",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should parse it",
@@ -6612,7 +6612,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method slow() when no argument is passed",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the slow value",
@@ -6623,7 +6623,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method timeout()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should convert a string to milliseconds",
@@ -6634,7 +6634,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method timeout() when argument is passed",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the Suite object",
@@ -6645,7 +6645,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method timeout() when no argument is passed",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the timeout value",
@@ -6656,7 +6656,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method titlePath() when there is a parent the parent is not the root suite",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns the concatenation of parent's and suite's title",
@@ -6667,7 +6667,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method titlePath() when there is a parent the parent is the root suite",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns the suite title",
@@ -6678,7 +6678,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method titlePath() when there is no parent",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "returns the suite title",
@@ -6689,7 +6689,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method total() when there are no nested suites or tests",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return 0",
@@ -6700,7 +6700,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Suite instance method total() when there are several tests in the suite",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return the number",
@@ -6711,7 +6711,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Test initialization",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not throw if the title is a string",
@@ -6731,10 +6731,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "Test .clone()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should add/keep the retriedTest value",
@@ -6793,7 +6793,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Test .isPending()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should be pending when its parent is pending",
@@ -6816,7 +6816,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Test .markOnly()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should call appendOnlyTest on parent",
@@ -6827,7 +6827,7 @@ TestRunResult {
},
TestGroupResult {
"name": "Test .reset()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should call Runnable.reset",
@@ -6847,10 +6847,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "a test that throws non-extensible",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not pass if throwing async and test is async",
@@ -6873,7 +6873,7 @@ TestRunResult {
},
TestGroupResult {
"name": "a test that throws null",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not pass if throwing async and test is async",
@@ -6896,7 +6896,7 @@ TestRunResult {
},
TestGroupResult {
"name": "a test that throws undefined",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should not pass if throwing async and test is async",
@@ -6922,10 +6922,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "timeouts",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should allow overriding per-test",
@@ -6942,7 +6942,7 @@ TestRunResult {
},
TestGroupResult {
"name": "timeouts disabling",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should work with timeout(0)",
@@ -6953,7 +6953,7 @@ TestRunResult {
},
TestGroupResult {
"name": "timeouts disabling suite-level",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should work with timeout(0)",
@@ -6964,7 +6964,7 @@ TestRunResult {
},
TestGroupResult {
"name": "timeouts disabling suite-level nested suite",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should work with timeout(0)",
@@ -6975,7 +6975,7 @@ TestRunResult {
},
TestGroupResult {
"name": "timeouts disabling using before",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should work with timeout(0)",
@@ -6986,7 +6986,7 @@ TestRunResult {
},
TestGroupResult {
"name": "timeouts disabling using beforeEach",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should work with timeout(0)",
@@ -6997,7 +6997,7 @@ TestRunResult {
},
TestGroupResult {
"name": "timeouts disabling using timeout(0)",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should suppress timeout(4)",
@@ -7011,10 +7011,10 @@ TestRunResult {
"totalTime": undefined,
},
TestSuiteResult {
- "groups": Array [
+ "groups": [
TestGroupResult {
"name": "lib/utils canonicalType()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should recognize various types",
@@ -7025,7 +7025,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils canonicalType() when toString on null or undefined stringifies window",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should recognize null and undefined",
@@ -7036,7 +7036,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils castArray() when provided a primitive value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return an array containing the primitive value only",
@@ -7046,8 +7046,8 @@ TestRunResult {
],
},
TestGroupResult {
- "name": "lib/utils castArray() when provided an \\"arguments\\" value",
- "tests": Array [
+ "name": "lib/utils castArray() when provided an "arguments" value",
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return an array containing the arguments",
@@ -7058,7 +7058,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils castArray() when provided an array value",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return a copy of the array",
@@ -7069,7 +7069,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils castArray() when provided an object",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return an array containing the object only",
@@ -7080,7 +7080,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils castArray() when provided no parameters",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return an empty array",
@@ -7091,7 +7091,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils castArray() when provided null",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return an array containing a null value only",
@@ -7102,7 +7102,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils clean()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should format a multi line test indented with spaces",
@@ -7209,7 +7209,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils createMap()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should add props from all object parameters to the object",
@@ -7232,7 +7232,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils dQuote()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return its input as string wrapped in double quotes",
@@ -7243,7 +7243,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils escape()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "replaces invalid xml characters",
@@ -7260,7 +7260,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils isPromise()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return false if the object is null",
@@ -7269,7 +7269,7 @@ TestRunResult {
},
TestCaseResult {
"error": undefined,
- "name": "should return false if the value is an object w/o a \\"then\\" function",
+ "name": "should return false if the value is an object w/o a "then" function",
"result": "success",
"time": 0,
},
@@ -7289,7 +7289,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils lookupFiles() when run in browser",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should throw",
@@ -7300,7 +7300,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils lookupFiles() when run in Node.js",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should delegate to new location of lookupFiles()",
@@ -7317,7 +7317,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils slug()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should convert the string to lowercase",
@@ -7346,7 +7346,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils sQuote()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return its input as string wrapped in single quotes",
@@ -7357,7 +7357,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils stringify()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "might get confusing",
@@ -7506,7 +7506,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils stringify() #Number",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "floats and ints",
@@ -7535,7 +7535,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils stringify() canonicalize example",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should represent the actual full result",
@@ -7546,7 +7546,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils type()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should recognize various types",
@@ -7557,7 +7557,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils type() when toString on null or undefined stringifies window",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should recognize null and undefined",
@@ -7568,7 +7568,7 @@ TestRunResult {
},
TestGroupResult {
"name": "lib/utils uniqueID()",
- "tests": Array [
+ "tests": [
TestCaseResult {
"error": undefined,
"name": "should return a non-empty string",
diff --git a/__tests__/__snapshots__/swift-xunit.test.ts.snap b/__tests__/__snapshots__/swift-xunit.test.ts.snap
new file mode 100644
index 0000000..01aa7d9
--- /dev/null
+++ b/__tests__/__snapshots__/swift-xunit.test.ts.snap
@@ -0,0 +1,44 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`swift-xunit tests report from swift test results matches snapshot 1`] = `
+TestRunResult {
+ "path": "fixtures/swift-xunit.xml",
+ "suites": [
+ TestSuiteResult {
+ "groups": [
+ TestGroupResult {
+ "name": "AcmeLibTests.AcmeLibTests",
+ "tests": [
+ TestCaseResult {
+ "error": undefined,
+ "name": "test_always_pass",
+ "result": "success",
+ "time": 36.386333,
+ },
+ TestCaseResult {
+ "error": undefined,
+ "name": "test_always_skip",
+ "result": "success",
+ "time": 92.039167,
+ },
+ TestCaseResult {
+ "error": {
+ "details": undefined,
+ "line": undefined,
+ "message": undefined,
+ "path": undefined,
+ },
+ "name": "test_always_fail",
+ "result": "failed",
+ "time": 92.05175,
+ },
+ ],
+ },
+ ],
+ "name": "TestResults",
+ "totalTime": 220.47725000000003,
+ },
+ ],
+ "totalTime": undefined,
+}
+`;
diff --git a/__tests__/fixtures/external/jest/jest-react-component-test-results.xml b/__tests__/fixtures/external/jest/jest-react-component-test-results.xml
new file mode 100644
index 0000000..cdaa87f
--- /dev/null
+++ b/__tests__/fixtures/external/jest/jest-react-component-test-results.xml
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
diff --git a/__tests__/fixtures/swift-xunit.xml b/__tests__/fixtures/swift-xunit.xml
new file mode 100644
index 0000000..1046225
--- /dev/null
+++ b/__tests__/fixtures/swift-xunit.xml
@@ -0,0 +1,12 @@
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/__tests__/jest-junit.test.ts b/__tests__/jest-junit.test.ts
index b6533e6..d42405c 100644
--- a/__tests__/jest-junit.test.ts
+++ b/__tests__/jest-junit.test.ts
@@ -82,4 +82,27 @@ describe('jest-junit tests', () => {
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
+
+ it('report from #235 testing react components named ', async () => {
+ const fixturePath = path.join(__dirname, 'fixtures', 'external', 'jest', 'jest-react-component-test-results.xml')
+ const trackedFilesPath = path.join(__dirname, 'fixtures', 'external', 'jest', 'files.txt')
+ const outputPath = path.join(__dirname, '__outputs__', 'jest-react-component-test-results.md')
+ const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
+ const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
+
+ const trackedFiles = fs.readFileSync(trackedFilesPath, {encoding: 'utf8'}).split(/\n\r?/g)
+ const opts: ParseOptions = {
+ parseErrors: true,
+ trackedFiles
+ //workDir: '/home/dorny/dorny/jest/'
+ }
+
+ const parser = new JestJunitParser(opts)
+ const result = await parser.parse(filePath, fileContent)
+ expect(result).toMatchSnapshot()
+
+ const report = getReport([result])
+ fs.mkdirSync(path.dirname(outputPath), {recursive: true})
+ fs.writeFileSync(outputPath, report)
+ })
})
diff --git a/__tests__/swift-xunit.test.ts b/__tests__/swift-xunit.test.ts
new file mode 100644
index 0000000..474c868
--- /dev/null
+++ b/__tests__/swift-xunit.test.ts
@@ -0,0 +1,30 @@
+import * as fs from 'fs'
+import * as path from 'path'
+
+import {SwiftXunitParser} from '../src/parsers/swift-xunit/swift-xunit-parser'
+import {ParseOptions} from '../src/test-parser'
+import {getReport} from '../src/report/get-report'
+import {normalizeFilePath} from '../src/utils/path-utils'
+
+describe('swift-xunit tests', () => {
+ it('report from swift test results matches snapshot', async () => {
+ const fixturePath = path.join(__dirname, 'fixtures', 'swift-xunit.xml')
+ const outputPath = path.join(__dirname, '__outputs__', 'swift-xunit.md')
+ const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
+ const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
+
+ const trackedFiles = ['Package.swift', 'Sources/AcmeLib/AcmeLib.swift', 'Tests/AcmeLibTests/AcmeLibTests.swift']
+ const opts: ParseOptions = {
+ parseErrors: true,
+ trackedFiles
+ }
+
+ const parser = new SwiftXunitParser(opts)
+ const result = await parser.parse(filePath, fileContent)
+ expect(result).toMatchSnapshot()
+
+ const report = getReport([result])
+ fs.mkdirSync(path.dirname(outputPath), {recursive: true})
+ fs.writeFileSync(outputPath, report)
+ })
+})
diff --git a/action.yml b/action.yml
index ee0b64b..6f35ebf 100644
--- a/action.yml
+++ b/action.yml
@@ -31,6 +31,7 @@ inputs:
- java-junit
- jest-junit
- mocha-json
+ - swift-xunit
required: true
list-suites:
description: |
@@ -94,7 +95,7 @@ outputs:
url_html:
description: Check run URL HTML
runs:
- using: 'node16'
+ using: 'node20'
main: 'dist/index.js'
branding:
color: blue
diff --git a/dist/index.js b/dist/index.js
index 6246d8d..47a1b8d 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -265,9 +265,9 @@ const dotnet_trx_parser_1 = __nccwpck_require__(2664);
const java_junit_parser_1 = __nccwpck_require__(676);
const jest_junit_parser_1 = __nccwpck_require__(1113);
const mocha_json_parser_1 = __nccwpck_require__(6043);
+const swift_xunit_parser_1 = __nccwpck_require__(5366);
const path_utils_1 = __nccwpck_require__(4070);
const github_utils_1 = __nccwpck_require__(3522);
-const markdown_utils_1 = __nccwpck_require__(6482);
function main() {
return __awaiter(this, void 0, void 0, function* () {
try {
@@ -402,10 +402,13 @@ class TestReporter {
const annotations = (0, get_annotations_1.getAnnotations)(results, this.maxAnnotations);
const isFailed = this.failOnError && results.some(tr => tr.result === 'failed');
const conclusion = isFailed ? 'failure' : 'success';
- const icon = isFailed ? markdown_utils_1.Icon.fail : markdown_utils_1.Icon.success;
+ const passed = results.reduce((sum, tr) => sum + tr.passed, 0);
+ const failed = results.reduce((sum, tr) => sum + tr.failed, 0);
+ const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0);
+ const shortSummary = `${passed} passed, ${failed} failed and ${skipped} skipped `;
core.info(`Updating check run conclusion (${conclusion}) and output`);
const resp = yield this.octokit.rest.checks.update(Object.assign({ check_run_id: createResp.data.id, conclusion, status: 'completed', output: {
- title: `${name} ${icon}`,
+ title: shortSummary,
summary,
annotations
} }, github.context.repo));
@@ -431,6 +434,8 @@ class TestReporter {
return new jest_junit_parser_1.JestJunitParser(options);
case 'mocha-json':
return new mocha_json_parser_1.MochaJsonParser(options);
+ case 'swift-xunit':
+ return new swift_xunit_parser_1.SwiftXunitParser(options);
default:
throw new Error(`Input variable 'reporter' is set to invalid value '${reporter}'`);
}
@@ -1209,7 +1214,7 @@ class JestJunitParser {
const suites = junit.testsuites.testsuite === undefined
? []
: junit.testsuites.testsuite.map(ts => {
- const name = ts.$.name.trim();
+ const name = this.escapeCharacters(ts.$.name.trim());
const time = parseFloat(ts.$.time) * 1000;
const sr = new test_results_1.TestSuiteResult(name, this.getGroups(ts), time);
return sr;
@@ -1278,6 +1283,9 @@ class JestJunitParser {
var _a, _b;
return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = (0, path_utils_1.getBasePath)(path, this.options.trackedFiles)));
}
+ escapeCharacters(s) {
+ return s.replace(/([<>])/g, '\\$1');
+ }
}
exports.JestJunitParser = JestJunitParser;
@@ -1395,6 +1403,25 @@ class MochaJsonParser {
exports.MochaJsonParser = MochaJsonParser;
+/***/ }),
+
+/***/ 5366:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.SwiftXunitParser = void 0;
+const java_junit_parser_1 = __nccwpck_require__(676);
+class SwiftXunitParser extends java_junit_parser_1.JavaJunitParser {
+ constructor(options) {
+ super(options);
+ this.options = options;
+ }
+}
+exports.SwiftXunitParser = SwiftXunitParser;
+
+
/***/ }),
/***/ 5867:
@@ -2092,10 +2119,10 @@ var Align;
Align["Center"] = ":---:";
Align["Right"] = "---:";
Align["None"] = "---";
-})(Align = exports.Align || (exports.Align = {}));
+})(Align || (exports.Align = Align = {}));
exports.Icon = {
- skip: '⚪',
- success: '✅',
+ skip: '⚪', // ':white_circle:'
+ success: '✅', // ':white_check_mark:'
fail: '❌' // ':x:'
};
function link(title, address) {
@@ -4022,8 +4049,8 @@ class Context {
var _a, _b, _c;
this.payload = {};
if (process.env.GITHUB_EVENT_PATH) {
- if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
- this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
+ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) {
+ this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
}
else {
const path = process.env.GITHUB_EVENT_PATH;
@@ -4041,7 +4068,8 @@ class Context {
this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);
this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;
this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;
- this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;
+ this.graphqlUrl =
+ (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;
}
get issue() {
const payload = this.payload;
@@ -4073,7 +4101,11 @@ exports.Context = Context;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
@@ -4086,7 +4118,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
@@ -4103,7 +4135,7 @@ exports.context = new Context.Context();
*/
function getOctokit(token, options, ...additionalPlugins) {
const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins);
- return new GitHubWithPlugins(utils_1.getOctokitOptions(token, options));
+ return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options));
}
exports.getOctokit = getOctokit;
//# sourceMappingURL=github.js.map
@@ -4117,7 +4149,11 @@ exports.getOctokit = getOctokit;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
@@ -4130,13 +4166,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;
+exports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0;
const httpClient = __importStar(__nccwpck_require__(6255));
+const undici_1 = __nccwpck_require__(1773);
function getAuthString(token, options) {
if (!token && !options.auth) {
throw new Error('Parameter token or opts.auth is required');
@@ -4152,6 +4198,19 @@ function getProxyAgent(destinationUrl) {
return hc.getAgent(destinationUrl);
}
exports.getProxyAgent = getProxyAgent;
+function getProxyAgentDispatcher(destinationUrl) {
+ const hc = new httpClient.HttpClient();
+ return hc.getAgentDispatcher(destinationUrl);
+}
+exports.getProxyAgentDispatcher = getProxyAgentDispatcher;
+function getProxyFetch(destinationUrl) {
+ const httpDispatcher = getProxyAgentDispatcher(destinationUrl);
+ const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () {
+ return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher }));
+ });
+ return proxyFetch;
+}
+exports.getProxyFetch = getProxyFetch;
function getApiBaseUrl() {
return process.env['GITHUB_API_URL'] || 'https://api.github.com';
}
@@ -4167,7 +4226,11 @@ exports.getApiBaseUrl = getApiBaseUrl;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
@@ -4180,7 +4243,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
@@ -4197,7 +4260,8 @@ const baseUrl = Utils.getApiBaseUrl();
exports.defaults = {
baseUrl,
request: {
- agent: Utils.getProxyAgent(baseUrl)
+ agent: Utils.getProxyAgent(baseUrl),
+ fetch: Utils.getProxyFetch(baseUrl)
}
};
exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults);
@@ -4317,7 +4381,11 @@ exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHand
/* eslint-disable @typescript-eslint/no-explicit-any */
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
@@ -4330,7 +4398,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
@@ -4349,6 +4417,7 @@ const http = __importStar(__nccwpck_require__(3685));
const https = __importStar(__nccwpck_require__(5687));
const pm = __importStar(__nccwpck_require__(9835));
const tunnel = __importStar(__nccwpck_require__(4294));
+const undici_1 = __nccwpck_require__(1773);
var HttpCodes;
(function (HttpCodes) {
HttpCodes[HttpCodes["OK"] = 200] = "OK";
@@ -4378,16 +4447,16 @@ var HttpCodes;
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
-})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
+})(HttpCodes || (exports.HttpCodes = HttpCodes = {}));
var Headers;
(function (Headers) {
Headers["Accept"] = "accept";
Headers["ContentType"] = "content-type";
-})(Headers = exports.Headers || (exports.Headers = {}));
+})(Headers || (exports.Headers = Headers = {}));
var MediaTypes;
(function (MediaTypes) {
MediaTypes["ApplicationJson"] = "application/json";
-})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
+})(MediaTypes || (exports.MediaTypes = MediaTypes = {}));
/**
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
@@ -4756,6 +4825,15 @@ class HttpClient {
const parsedUrl = new URL(serverUrl);
return this._getAgent(parsedUrl);
}
+ getAgentDispatcher(serverUrl) {
+ const parsedUrl = new URL(serverUrl);
+ const proxyUrl = pm.getProxyUrl(parsedUrl);
+ const useProxy = proxyUrl && proxyUrl.hostname;
+ if (!useProxy) {
+ return;
+ }
+ return this._getProxyAgentDispatcher(parsedUrl, proxyUrl);
+ }
_prepareRequest(method, requestUrl, headers) {
const info = {};
info.parsedUrl = requestUrl;
@@ -4855,6 +4933,30 @@ class HttpClient {
}
return agent;
}
+ _getProxyAgentDispatcher(parsedUrl, proxyUrl) {
+ let proxyAgent;
+ if (this._keepAlive) {
+ proxyAgent = this._proxyAgentDispatcher;
+ }
+ // if agent is already assigned use that agent.
+ if (proxyAgent) {
+ return proxyAgent;
+ }
+ const usingSsl = parsedUrl.protocol === 'https:';
+ proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && {
+ token: `${proxyUrl.username}:${proxyUrl.password}`
+ })));
+ this._proxyAgentDispatcher = proxyAgent;
+ if (usingSsl && this._ignoreSslError) {
+ // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
+ // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
+ // we have to cast it to any and change it directly
+ proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, {
+ rejectUnauthorized: false
+ });
+ }
+ return proxyAgent;
+ }
_performExponentialBackoff(retryNumber) {
return __awaiter(this, void 0, void 0, function* () {
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
@@ -6439,16 +6541,39 @@ exports["default"] = Settings;
/***/ }),
/***/ 334:
-/***/ ((__unused_webpack_module, exports) => {
+/***/ ((module) => {
"use strict";
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
-Object.defineProperty(exports, "__esModule", ({ value: true }));
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ createTokenAuth: () => createTokenAuth
+});
+module.exports = __toCommonJS(dist_src_exports);
-const REGEX_IS_INSTALLATION_LEGACY = /^v1\./;
-const REGEX_IS_INSTALLATION = /^ghs_/;
-const REGEX_IS_USER_TO_SERVER = /^ghu_/;
+// pkg/dist-src/auth.js
+var REGEX_IS_INSTALLATION_LEGACY = /^v1\./;
+var REGEX_IS_INSTALLATION = /^ghs_/;
+var REGEX_IS_USER_TO_SERVER = /^ghu_/;
async function auth(token) {
const isApp = token.split(/\./).length === 3;
const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);
@@ -6456,109 +6581,144 @@ async function auth(token) {
const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth";
return {
type: "token",
- token: token,
+ token,
tokenType
};
}
-/**
- * Prefix token for usage in the Authorization header
- *
- * @param token OAuth token or JSON Web Token
- */
+// pkg/dist-src/with-authorization-prefix.js
function withAuthorizationPrefix(token) {
if (token.split(/\./).length === 3) {
return `bearer ${token}`;
}
-
return `token ${token}`;
}
+// pkg/dist-src/hook.js
async function hook(token, request, route, parameters) {
- const endpoint = request.endpoint.merge(route, parameters);
+ const endpoint = request.endpoint.merge(
+ route,
+ parameters
+ );
endpoint.headers.authorization = withAuthorizationPrefix(token);
return request(endpoint);
}
-const createTokenAuth = function createTokenAuth(token) {
+// pkg/dist-src/index.js
+var createTokenAuth = function createTokenAuth2(token) {
if (!token) {
throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
}
-
if (typeof token !== "string") {
- throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
+ throw new Error(
+ "[@octokit/auth-token] Token passed to createTokenAuth is not a string"
+ );
}
-
token = token.replace(/^(token|bearer) +/i, "");
return Object.assign(auth.bind(null, token), {
hook: hook.bind(null, token)
});
};
-
-exports.createTokenAuth = createTokenAuth;
-//# sourceMappingURL=index.js.map
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
/***/ }),
/***/ 6762:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-
-var universalUserAgent = __nccwpck_require__(5030);
-var beforeAfterHook = __nccwpck_require__(3682);
-var request = __nccwpck_require__(6234);
-var graphql = __nccwpck_require__(8467);
-var authToken = __nccwpck_require__(334);
-
-function _objectWithoutPropertiesLoose(source, excluded) {
- if (source == null) return {};
- var target = {};
- var sourceKeys = Object.keys(source);
- var key, i;
-
- for (i = 0; i < sourceKeys.length; i++) {
- key = sourceKeys[i];
- if (excluded.indexOf(key) >= 0) continue;
- target[key] = source[key];
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
- return target;
-}
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ Octokit: () => Octokit
+});
+module.exports = __toCommonJS(dist_src_exports);
+var import_universal_user_agent = __nccwpck_require__(5030);
+var import_before_after_hook = __nccwpck_require__(3682);
+var import_request = __nccwpck_require__(6234);
+var import_graphql = __nccwpck_require__(8467);
+var import_auth_token = __nccwpck_require__(334);
-function _objectWithoutProperties(source, excluded) {
- if (source == null) return {};
+// pkg/dist-src/version.js
+var VERSION = "5.0.2";
- var target = _objectWithoutPropertiesLoose(source, excluded);
-
- var key, i;
-
- if (Object.getOwnPropertySymbols) {
- var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
-
- for (i = 0; i < sourceSymbolKeys.length; i++) {
- key = sourceSymbolKeys[i];
- if (excluded.indexOf(key) >= 0) continue;
- if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;
- target[key] = source[key];
- }
+// pkg/dist-src/index.js
+var noop = () => {
+};
+var consoleWarn = console.warn.bind(console);
+var consoleError = console.error.bind(console);
+var userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`;
+var Octokit = class {
+ static {
+ this.VERSION = VERSION;
+ }
+ static defaults(defaults) {
+ const OctokitWithDefaults = class extends this {
+ constructor(...args) {
+ const options = args[0] || {};
+ if (typeof defaults === "function") {
+ super(defaults(options));
+ return;
+ }
+ super(
+ Object.assign(
+ {},
+ defaults,
+ options,
+ options.userAgent && defaults.userAgent ? {
+ userAgent: `${options.userAgent} ${defaults.userAgent}`
+ } : null
+ )
+ );
+ }
+ };
+ return OctokitWithDefaults;
+ }
+ static {
+ this.plugins = [];
+ }
+ /**
+ * Attach a plugin (or many) to your Octokit instance.
+ *
+ * @example
+ * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)
+ */
+ static plugin(...newPlugins) {
+ const currentPlugins = this.plugins;
+ const NewOctokit = class extends this {
+ static {
+ this.plugins = currentPlugins.concat(
+ newPlugins.filter((plugin) => !currentPlugins.includes(plugin))
+ );
+ }
+ };
+ return NewOctokit;
}
-
- return target;
-}
-
-const VERSION = "3.6.0";
-
-const _excluded = ["authStrategy"];
-class Octokit {
constructor(options = {}) {
- const hook = new beforeAfterHook.Collection();
+ const hook = new import_before_after_hook.Collection();
const requestDefaults = {
- baseUrl: request.request.endpoint.DEFAULTS.baseUrl,
+ baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl,
headers: {},
request: Object.assign({}, options.request, {
// @ts-ignore internal usage only, no need to type
@@ -6568,321 +6728,283 @@ class Octokit {
previews: [],
format: ""
}
- }; // prepend default user agent with `options.userAgent` if set
-
- requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" ");
-
+ };
+ requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail;
if (options.baseUrl) {
requestDefaults.baseUrl = options.baseUrl;
}
-
if (options.previews) {
requestDefaults.mediaType.previews = options.previews;
}
-
if (options.timeZone) {
requestDefaults.headers["time-zone"] = options.timeZone;
}
-
- this.request = request.request.defaults(requestDefaults);
- this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);
- this.log = Object.assign({
- debug: () => {},
- info: () => {},
- warn: console.warn.bind(console),
- error: console.error.bind(console)
- }, options.log);
- this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
- // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.
- // (2) If only `options.auth` is set, use the default token authentication strategy.
- // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
- // TODO: type `options.auth` based on `options.authStrategy`.
-
+ this.request = import_request.request.defaults(requestDefaults);
+ this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults);
+ this.log = Object.assign(
+ {
+ debug: noop,
+ info: noop,
+ warn: consoleWarn,
+ error: consoleError
+ },
+ options.log
+ );
+ this.hook = hook;
if (!options.authStrategy) {
if (!options.auth) {
- // (1)
this.auth = async () => ({
type: "unauthenticated"
});
} else {
- // (2)
- const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯
-
+ const auth = (0, import_auth_token.createTokenAuth)(options.auth);
hook.wrap("request", auth.hook);
this.auth = auth;
}
} else {
- const {
- authStrategy
- } = options,
- otherOptions = _objectWithoutProperties(options, _excluded);
-
- const auth = authStrategy(Object.assign({
- request: this.request,
- log: this.log,
- // we pass the current octokit instance as well as its constructor options
- // to allow for authentication strategies that return a new octokit instance
- // that shares the same internal state as the current one. The original
- // requirement for this was the "event-octokit" authentication strategy
- // of https://github.com/probot/octokit-auth-probot.
- octokit: this,
- octokitOptions: otherOptions
- }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯
-
+ const { authStrategy, ...otherOptions } = options;
+ const auth = authStrategy(
+ Object.assign(
+ {
+ request: this.request,
+ log: this.log,
+ // we pass the current octokit instance as well as its constructor options
+ // to allow for authentication strategies that return a new octokit instance
+ // that shares the same internal state as the current one. The original
+ // requirement for this was the "event-octokit" authentication strategy
+ // of https://github.com/probot/octokit-auth-probot.
+ octokit: this,
+ octokitOptions: otherOptions
+ },
+ options.auth
+ )
+ );
hook.wrap("request", auth.hook);
this.auth = auth;
- } // apply plugins
- // https://stackoverflow.com/a/16345172
-
-
+ }
const classConstructor = this.constructor;
- classConstructor.plugins.forEach(plugin => {
- Object.assign(this, plugin(this, options));
- });
+ for (let i = 0; i < classConstructor.plugins.length; ++i) {
+ Object.assign(this, classConstructor.plugins[i](this, options));
+ }
}
-
- static defaults(defaults) {
- const OctokitWithDefaults = class extends this {
- constructor(...args) {
- const options = args[0] || {};
-
- if (typeof defaults === "function") {
- super(defaults(options));
- return;
- }
-
- super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {
- userAgent: `${options.userAgent} ${defaults.userAgent}`
- } : null));
- }
-
- };
- return OctokitWithDefaults;
- }
- /**
- * Attach a plugin (or many) to your Octokit instance.
- *
- * @example
- * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)
- */
-
-
- static plugin(...newPlugins) {
- var _a;
-
- const currentPlugins = this.plugins;
- const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);
- return NewOctokit;
- }
-
-}
-Octokit.VERSION = VERSION;
-Octokit.plugins = [];
-
-exports.Octokit = Octokit;
-//# sourceMappingURL=index.js.map
+};
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
/***/ }),
/***/ 9440:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
-Object.defineProperty(exports, "__esModule", ({ value: true }));
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ endpoint: () => endpoint
+});
+module.exports = __toCommonJS(dist_src_exports);
-var isPlainObject = __nccwpck_require__(3287);
-var universalUserAgent = __nccwpck_require__(5030);
+// pkg/dist-src/defaults.js
+var import_universal_user_agent = __nccwpck_require__(5030);
+// pkg/dist-src/version.js
+var VERSION = "9.0.4";
+
+// pkg/dist-src/defaults.js
+var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`;
+var DEFAULTS = {
+ method: "GET",
+ baseUrl: "https://api.github.com",
+ headers: {
+ accept: "application/vnd.github.v3+json",
+ "user-agent": userAgent
+ },
+ mediaType: {
+ format: ""
+ }
+};
+
+// pkg/dist-src/util/lowercase-keys.js
function lowercaseKeys(object) {
if (!object) {
return {};
}
-
return Object.keys(object).reduce((newObj, key) => {
newObj[key.toLowerCase()] = object[key];
return newObj;
}, {});
}
+// pkg/dist-src/util/is-plain-object.js
+function isPlainObject(value) {
+ if (typeof value !== "object" || value === null)
+ return false;
+ if (Object.prototype.toString.call(value) !== "[object Object]")
+ return false;
+ const proto = Object.getPrototypeOf(value);
+ if (proto === null)
+ return true;
+ const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
+ return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
+}
+
+// pkg/dist-src/util/merge-deep.js
function mergeDeep(defaults, options) {
const result = Object.assign({}, defaults);
- Object.keys(options).forEach(key => {
- if (isPlainObject.isPlainObject(options[key])) {
- if (!(key in defaults)) Object.assign(result, {
- [key]: options[key]
- });else result[key] = mergeDeep(defaults[key], options[key]);
+ Object.keys(options).forEach((key) => {
+ if (isPlainObject(options[key])) {
+ if (!(key in defaults))
+ Object.assign(result, { [key]: options[key] });
+ else
+ result[key] = mergeDeep(defaults[key], options[key]);
} else {
- Object.assign(result, {
- [key]: options[key]
- });
+ Object.assign(result, { [key]: options[key] });
}
});
return result;
}
+// pkg/dist-src/util/remove-undefined-properties.js
function removeUndefinedProperties(obj) {
for (const key in obj) {
- if (obj[key] === undefined) {
+ if (obj[key] === void 0) {
delete obj[key];
}
}
-
return obj;
}
+// pkg/dist-src/merge.js
function merge(defaults, route, options) {
if (typeof route === "string") {
let [method, url] = route.split(" ");
- options = Object.assign(url ? {
- method,
- url
- } : {
- url: method
- }, options);
+ options = Object.assign(url ? { method, url } : { url: method }, options);
} else {
options = Object.assign({}, route);
- } // lowercase header names before merging with defaults to avoid duplicates
-
-
- options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging
-
+ }
+ options.headers = lowercaseKeys(options.headers);
removeUndefinedProperties(options);
removeUndefinedProperties(options.headers);
- const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten
-
- if (defaults && defaults.mediaType.previews.length) {
- mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);
+ const mergedOptions = mergeDeep(defaults || {}, options);
+ if (options.url === "/graphql") {
+ if (defaults && defaults.mediaType.previews?.length) {
+ mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(
+ (preview) => !mergedOptions.mediaType.previews.includes(preview)
+ ).concat(mergedOptions.mediaType.previews);
+ }
+ mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, ""));
}
-
- mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, ""));
return mergedOptions;
}
+// pkg/dist-src/util/add-query-parameters.js
function addQueryParameters(url, parameters) {
const separator = /\?/.test(url) ? "&" : "?";
const names = Object.keys(parameters);
-
if (names.length === 0) {
return url;
}
-
- return url + separator + names.map(name => {
+ return url + separator + names.map((name) => {
if (name === "q") {
return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
}
-
return `${name}=${encodeURIComponent(parameters[name])}`;
}).join("&");
}
-const urlVariableRegex = /\{[^}]+\}/g;
-
+// pkg/dist-src/util/extract-url-variable-names.js
+var urlVariableRegex = /\{[^}]+\}/g;
function removeNonChars(variableName) {
return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
}
-
function extractUrlVariableNames(url) {
const matches = url.match(urlVariableRegex);
-
if (!matches) {
return [];
}
-
return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
}
+// pkg/dist-src/util/omit.js
function omit(object, keysToOmit) {
- return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {
- obj[key] = object[key];
- return obj;
- }, {});
+ const result = { __proto__: null };
+ for (const key of Object.keys(object)) {
+ if (keysToOmit.indexOf(key) === -1) {
+ result[key] = object[key];
+ }
+ }
+ return result;
}
-// Based on https://github.com/bramstein/url-template, licensed under BSD
-// TODO: create separate package.
-//
-// Copyright (c) 2012-2014, Bram Stein
-// All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions
-// are met:
-// 1. Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// 2. Redistributions in binary form must reproduce the above copyright
-// notice, this list of conditions and the following disclaimer in the
-// documentation and/or other materials provided with the distribution.
-// 3. The name of the author may not be used to endorse or promote products
-// derived from this software without specific prior written permission.
-// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
-// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
-// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
-// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
-// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
-// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-/* istanbul ignore file */
+// pkg/dist-src/util/url-template.js
function encodeReserved(str) {
- return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {
+ return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) {
if (!/%[0-9A-Fa-f]/.test(part)) {
part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
}
-
return part;
}).join("");
}
-
function encodeUnreserved(str) {
- return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {
+ return encodeURIComponent(str).replace(/[!'()*]/g, function(c) {
return "%" + c.charCodeAt(0).toString(16).toUpperCase();
});
}
-
function encodeValue(operator, value, key) {
value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value);
-
if (key) {
return encodeUnreserved(key) + "=" + value;
} else {
return value;
}
}
-
function isDefined(value) {
- return value !== undefined && value !== null;
+ return value !== void 0 && value !== null;
}
-
function isKeyOperator(operator) {
return operator === ";" || operator === "&" || operator === "?";
}
-
function getValues(context, operator, key, modifier) {
- var value = context[key],
- result = [];
-
+ var value = context[key], result = [];
if (isDefined(value) && value !== "") {
if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
value = value.toString();
-
if (modifier && modifier !== "*") {
value = value.substring(0, parseInt(modifier, 10));
}
-
- result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
+ result.push(
+ encodeValue(operator, value, isKeyOperator(operator) ? key : "")
+ );
} else {
if (modifier === "*") {
if (Array.isArray(value)) {
- value.filter(isDefined).forEach(function (value) {
- result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
+ value.filter(isDefined).forEach(function(value2) {
+ result.push(
+ encodeValue(operator, value2, isKeyOperator(operator) ? key : "")
+ );
});
} else {
- Object.keys(value).forEach(function (k) {
+ Object.keys(value).forEach(function(k) {
if (isDefined(value[k])) {
result.push(encodeValue(operator, value[k], k));
}
@@ -6890,20 +7012,18 @@ function getValues(context, operator, key, modifier) {
}
} else {
const tmp = [];
-
if (Array.isArray(value)) {
- value.filter(isDefined).forEach(function (value) {
- tmp.push(encodeValue(operator, value));
+ value.filter(isDefined).forEach(function(value2) {
+ tmp.push(encodeValue(operator, value2));
});
} else {
- Object.keys(value).forEach(function (k) {
+ Object.keys(value).forEach(function(k) {
if (isDefined(value[k])) {
tmp.push(encodeUnreserved(k));
tmp.push(encodeValue(operator, value[k].toString()));
}
});
}
-
if (isKeyOperator(operator)) {
result.push(encodeUnreserved(key) + "=" + tmp.join(","));
} else if (tmp.length !== 0) {
@@ -6922,89 +7042,93 @@ function getValues(context, operator, key, modifier) {
result.push("");
}
}
-
return result;
}
-
function parseUrl(template) {
return {
expand: expand.bind(null, template)
};
}
-
function expand(template, context) {
var operators = ["+", "#", ".", "/", ";", "?", "&"];
- return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) {
- if (expression) {
- let operator = "";
- const values = [];
-
- if (operators.indexOf(expression.charAt(0)) !== -1) {
- operator = expression.charAt(0);
- expression = expression.substr(1);
- }
-
- expression.split(/,/g).forEach(function (variable) {
- var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
- values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
- });
-
- if (operator && operator !== "+") {
- var separator = ",";
-
- if (operator === "?") {
- separator = "&";
- } else if (operator !== "#") {
- separator = operator;
+ template = template.replace(
+ /\{([^\{\}]+)\}|([^\{\}]+)/g,
+ function(_, expression, literal) {
+ if (expression) {
+ let operator = "";
+ const values = [];
+ if (operators.indexOf(expression.charAt(0)) !== -1) {
+ operator = expression.charAt(0);
+ expression = expression.substr(1);
+ }
+ expression.split(/,/g).forEach(function(variable) {
+ var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
+ values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
+ });
+ if (operator && operator !== "+") {
+ var separator = ",";
+ if (operator === "?") {
+ separator = "&";
+ } else if (operator !== "#") {
+ separator = operator;
+ }
+ return (values.length !== 0 ? operator : "") + values.join(separator);
+ } else {
+ return values.join(",");
}
-
- return (values.length !== 0 ? operator : "") + values.join(separator);
} else {
- return values.join(",");
+ return encodeReserved(literal);
}
- } else {
- return encodeReserved(literal);
}
- });
+ );
+ if (template === "/") {
+ return template;
+ } else {
+ return template.replace(/\/$/, "");
+ }
}
+// pkg/dist-src/parse.js
function parse(options) {
- // https://fetch.spec.whatwg.org/#methods
- let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible
-
+ let method = options.method.toUpperCase();
let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}");
let headers = Object.assign({}, options.headers);
let body;
- let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later
-
+ let parameters = omit(options, [
+ "method",
+ "baseUrl",
+ "url",
+ "headers",
+ "request",
+ "mediaType"
+ ]);
const urlVariableNames = extractUrlVariableNames(url);
url = parseUrl(url).expand(parameters);
-
if (!/^http/.test(url)) {
url = options.baseUrl + url;
}
-
- const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl");
+ const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl");
const remainingParameters = omit(parameters, omittedParameters);
const isBinaryRequest = /application\/octet-stream/i.test(headers.accept);
-
if (!isBinaryRequest) {
if (options.mediaType.format) {
- // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
- headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(",");
+ headers.accept = headers.accept.split(/,/).map(
+ (format) => format.replace(
+ /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/,
+ `application/vnd$1$2.${options.mediaType.format}`
+ )
+ ).join(",");
}
-
- if (options.mediaType.previews.length) {
- const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
- headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {
- const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
- return `application/vnd.github.${preview}-preview${format}`;
- }).join(",");
+ if (url.endsWith("/graphql")) {
+ if (options.mediaType.previews?.length) {
+ const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
+ headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => {
+ const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
+ return `application/vnd.github.${preview}-preview${format}`;
+ }).join(",");
+ }
}
- } // for GET/HEAD requests, set URL query parameters from remaining parameters
- // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
-
-
+ }
if (["GET", "HEAD"].includes(method)) {
url = addQueryParameters(url, remainingParameters);
} else {
@@ -7013,183 +7137,189 @@ function parse(options) {
} else {
if (Object.keys(remainingParameters).length) {
body = remainingParameters;
- } else {
- headers["content-length"] = 0;
}
}
- } // default content-type for JSON if body is set
-
-
+ }
if (!headers["content-type"] && typeof body !== "undefined") {
headers["content-type"] = "application/json; charset=utf-8";
- } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
- // fetch does not allow to set `content-length` header, but we can set body to an empty string
-
-
+ }
if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
body = "";
- } // Only return body/request keys if present
-
-
- return Object.assign({
- method,
- url,
- headers
- }, typeof body !== "undefined" ? {
- body
- } : null, options.request ? {
- request: options.request
- } : null);
+ }
+ return Object.assign(
+ { method, url, headers },
+ typeof body !== "undefined" ? { body } : null,
+ options.request ? { request: options.request } : null
+ );
}
+// pkg/dist-src/endpoint-with-defaults.js
function endpointWithDefaults(defaults, route, options) {
return parse(merge(defaults, route, options));
}
+// pkg/dist-src/with-defaults.js
function withDefaults(oldDefaults, newDefaults) {
- const DEFAULTS = merge(oldDefaults, newDefaults);
- const endpoint = endpointWithDefaults.bind(null, DEFAULTS);
- return Object.assign(endpoint, {
- DEFAULTS,
- defaults: withDefaults.bind(null, DEFAULTS),
- merge: merge.bind(null, DEFAULTS),
+ const DEFAULTS2 = merge(oldDefaults, newDefaults);
+ const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2);
+ return Object.assign(endpoint2, {
+ DEFAULTS: DEFAULTS2,
+ defaults: withDefaults.bind(null, DEFAULTS2),
+ merge: merge.bind(null, DEFAULTS2),
parse
});
}
-const VERSION = "6.0.12";
-
-const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.
-// So we use RequestParameters and add method as additional required property.
-
-const DEFAULTS = {
- method: "GET",
- baseUrl: "https://api.github.com",
- headers: {
- accept: "application/vnd.github.v3+json",
- "user-agent": userAgent
- },
- mediaType: {
- format: "",
- previews: []
- }
-};
-
-const endpoint = withDefaults(null, DEFAULTS);
-
-exports.endpoint = endpoint;
-//# sourceMappingURL=index.js.map
+// pkg/dist-src/index.js
+var endpoint = withDefaults(null, DEFAULTS);
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
/***/ }),
/***/ 8467:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
-Object.defineProperty(exports, "__esModule", ({ value: true }));
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ GraphqlResponseError: () => GraphqlResponseError,
+ graphql: () => graphql2,
+ withCustomRequest: () => withCustomRequest
+});
+module.exports = __toCommonJS(dist_src_exports);
+var import_request3 = __nccwpck_require__(6234);
+var import_universal_user_agent = __nccwpck_require__(5030);
-var request = __nccwpck_require__(6234);
-var universalUserAgent = __nccwpck_require__(5030);
+// pkg/dist-src/version.js
+var VERSION = "7.0.2";
-const VERSION = "4.8.0";
+// pkg/dist-src/with-defaults.js
+var import_request2 = __nccwpck_require__(6234);
+// pkg/dist-src/graphql.js
+var import_request = __nccwpck_require__(6234);
+
+// pkg/dist-src/error.js
function _buildMessageForResponseErrors(data) {
- return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n");
+ return `Request failed due to following response errors:
+` + data.errors.map((e) => ` - ${e.message}`).join("\n");
}
-
-class GraphqlResponseError extends Error {
- constructor(request, headers, response) {
+var GraphqlResponseError = class extends Error {
+ constructor(request2, headers, response) {
super(_buildMessageForResponseErrors(response));
- this.request = request;
+ this.request = request2;
this.headers = headers;
this.response = response;
- this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties.
-
+ this.name = "GraphqlResponseError";
this.errors = response.errors;
- this.data = response.data; // Maintains proper stack trace (only available on V8)
-
- /* istanbul ignore next */
-
+ this.data = response.data;
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
}
}
+};
-}
-
-const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"];
-const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"];
-const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/;
-function graphql(request, query, options) {
+// pkg/dist-src/graphql.js
+var NON_VARIABLE_OPTIONS = [
+ "method",
+ "baseUrl",
+ "url",
+ "headers",
+ "request",
+ "query",
+ "mediaType"
+];
+var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"];
+var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/;
+function graphql(request2, query, options) {
if (options) {
if (typeof query === "string" && "query" in options) {
- return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`));
+ return Promise.reject(
+ new Error(`[@octokit/graphql] "query" cannot be used as variable name`)
+ );
}
-
for (const key in options) {
- if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;
- return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`));
+ if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key))
+ continue;
+ return Promise.reject(
+ new Error(
+ `[@octokit/graphql] "${key}" cannot be used as variable name`
+ )
+ );
}
}
-
- const parsedOptions = typeof query === "string" ? Object.assign({
- query
- }, options) : query;
- const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {
+ const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query;
+ const requestOptions = Object.keys(
+ parsedOptions
+ ).reduce((result, key) => {
if (NON_VARIABLE_OPTIONS.includes(key)) {
result[key] = parsedOptions[key];
return result;
}
-
if (!result.variables) {
result.variables = {};
}
-
result.variables[key] = parsedOptions[key];
return result;
- }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix
- // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451
-
- const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;
-
+ }, {});
+ const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl;
if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {
requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql");
}
-
- return request(requestOptions).then(response => {
+ return request2(requestOptions).then((response) => {
if (response.data.errors) {
const headers = {};
-
for (const key of Object.keys(response.headers)) {
headers[key] = response.headers[key];
}
-
- throw new GraphqlResponseError(requestOptions, headers, response.data);
+ throw new GraphqlResponseError(
+ requestOptions,
+ headers,
+ response.data
+ );
}
-
return response.data.data;
});
}
-function withDefaults(request$1, newDefaults) {
- const newRequest = request$1.defaults(newDefaults);
-
+// pkg/dist-src/with-defaults.js
+function withDefaults(request2, newDefaults) {
+ const newRequest = request2.defaults(newDefaults);
const newApi = (query, options) => {
return graphql(newRequest, query, options);
};
-
return Object.assign(newApi, {
defaults: withDefaults.bind(null, newRequest),
- endpoint: request.request.endpoint
+ endpoint: newRequest.endpoint
});
}
-const graphql$1 = withDefaults(request.request, {
+// pkg/dist-src/index.js
+var graphql2 = withDefaults(import_request3.request, {
headers: {
- "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`
+ "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`
},
method: "POST",
url: "/graphql"
@@ -7200,94 +7330,59 @@ function withCustomRequest(customRequest) {
url: "/graphql"
});
}
-
-exports.GraphqlResponseError = GraphqlResponseError;
-exports.graphql = graphql$1;
-exports.withCustomRequest = withCustomRequest;
-//# sourceMappingURL=index.js.map
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
/***/ }),
/***/ 4193:
-/***/ ((__unused_webpack_module, exports) => {
+/***/ ((module) => {
"use strict";
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-
-const VERSION = "2.21.3";
-
-function ownKeys(object, enumerableOnly) {
- var keys = Object.keys(object);
-
- if (Object.getOwnPropertySymbols) {
- var symbols = Object.getOwnPropertySymbols(object);
- enumerableOnly && (symbols = symbols.filter(function (sym) {
- return Object.getOwnPropertyDescriptor(object, sym).enumerable;
- })), keys.push.apply(keys, symbols);
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
- return keys;
-}
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ composePaginateRest: () => composePaginateRest,
+ isPaginatingEndpoint: () => isPaginatingEndpoint,
+ paginateRest: () => paginateRest,
+ paginatingEndpoints: () => paginatingEndpoints
+});
+module.exports = __toCommonJS(dist_src_exports);
-function _objectSpread2(target) {
- for (var i = 1; i < arguments.length; i++) {
- var source = null != arguments[i] ? arguments[i] : {};
- i % 2 ? ownKeys(Object(source), !0).forEach(function (key) {
- _defineProperty(target, key, source[key]);
- }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) {
- Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
- });
- }
+// pkg/dist-src/version.js
+var VERSION = "9.1.5";
- return target;
-}
-
-function _defineProperty(obj, key, value) {
- if (key in obj) {
- Object.defineProperty(obj, key, {
- value: value,
- enumerable: true,
- configurable: true,
- writable: true
- });
- } else {
- obj[key] = value;
- }
-
- return obj;
-}
-
-/**
- * Some “list” response that can be paginated have a different response structure
- *
- * They have a `total_count` key in the response (search also has `incomplete_results`,
- * /installation/repositories also has `repository_selection`), as well as a key with
- * the list of the items which name varies from endpoint to endpoint.
- *
- * Octokit normalizes these responses so that paginated results are always returned following
- * the same structure. One challenge is that if the list response has only one page, no Link
- * header is provided, so this header alone is not sufficient to check wether a response is
- * paginated or not.
- *
- * We check if a "total_count" key is present in the response data, but also make sure that
- * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would
- * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
- */
+// pkg/dist-src/normalize-paginated-list-response.js
function normalizePaginatedListResponse(response) {
- // endpoints can respond with 204 if repository is empty
if (!response.data) {
- return _objectSpread2(_objectSpread2({}, response), {}, {
+ return {
+ ...response,
data: []
- });
+ };
}
-
const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
- if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way
- // to retrieve the same information.
-
+ if (!responseNeedsNormalization)
+ return response;
const incompleteResults = response.data.incomplete_results;
const repositorySelection = response.data.repository_selection;
const totalCount = response.data.total_count;
@@ -7297,19 +7392,17 @@ function normalizePaginatedListResponse(response) {
const namespaceKey = Object.keys(response.data)[0];
const data = response.data[namespaceKey];
response.data = data;
-
if (typeof incompleteResults !== "undefined") {
response.data.incomplete_results = incompleteResults;
}
-
if (typeof repositorySelection !== "undefined") {
response.data.repository_selection = repositorySelection;
}
-
response.data.total_count = totalCount;
return response;
}
+// pkg/dist-src/iterator.js
function iterator(octokit, route, parameters) {
const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);
const requestMethod = typeof route === "function" ? route : octokit.request;
@@ -7319,26 +7412,18 @@ function iterator(octokit, route, parameters) {
return {
[Symbol.asyncIterator]: () => ({
async next() {
- if (!url) return {
- done: true
- };
-
+ if (!url)
+ return { done: true };
try {
- const response = await requestMethod({
- method,
- url,
- headers
- });
- const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:
- // '; rel="next", ; rel="last"'
- // sets `url` to undefined if "next" URL is not present or `link` header is not set
-
- url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1];
- return {
- value: normalizedResponse
- };
+ const response = await requestMethod({ method, url, headers });
+ const normalizedResponse = normalizePaginatedListResponse(response);
+ url = ((normalizedResponse.headers.link || "").match(
+ /<([^>]+)>;\s*rel="next"/
+ ) || [])[1];
+ return { value: normalizedResponse };
} catch (error) {
- if (error.status !== 409) throw error;
+ if (error.status !== 409)
+ throw error;
url = "";
return {
value: {
@@ -7349,48 +7434,284 @@ function iterator(octokit, route, parameters) {
};
}
}
-
})
};
}
+// pkg/dist-src/paginate.js
function paginate(octokit, route, parameters, mapFn) {
if (typeof parameters === "function") {
mapFn = parameters;
- parameters = undefined;
+ parameters = void 0;
}
-
- return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);
+ return gather(
+ octokit,
+ [],
+ iterator(octokit, route, parameters)[Symbol.asyncIterator](),
+ mapFn
+ );
}
-
-function gather(octokit, results, iterator, mapFn) {
- return iterator.next().then(result => {
+function gather(octokit, results, iterator2, mapFn) {
+ return iterator2.next().then((result) => {
if (result.done) {
return results;
}
-
let earlyExit = false;
-
function done() {
earlyExit = true;
}
-
- results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);
-
+ results = results.concat(
+ mapFn ? mapFn(result.value, done) : result.value.data
+ );
if (earlyExit) {
return results;
}
-
- return gather(octokit, results, iterator, mapFn);
+ return gather(octokit, results, iterator2, mapFn);
});
}
-const composePaginateRest = Object.assign(paginate, {
+// pkg/dist-src/compose-paginate.js
+var composePaginateRest = Object.assign(paginate, {
iterator
});
-const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"];
+// pkg/dist-src/generated/paginating-endpoints.js
+var paginatingEndpoints = [
+ "GET /advisories",
+ "GET /app/hook/deliveries",
+ "GET /app/installation-requests",
+ "GET /app/installations",
+ "GET /assignments/{assignment_id}/accepted_assignments",
+ "GET /classrooms",
+ "GET /classrooms/{classroom_id}/assignments",
+ "GET /enterprises/{enterprise}/dependabot/alerts",
+ "GET /enterprises/{enterprise}/secret-scanning/alerts",
+ "GET /events",
+ "GET /gists",
+ "GET /gists/public",
+ "GET /gists/starred",
+ "GET /gists/{gist_id}/comments",
+ "GET /gists/{gist_id}/commits",
+ "GET /gists/{gist_id}/forks",
+ "GET /installation/repositories",
+ "GET /issues",
+ "GET /licenses",
+ "GET /marketplace_listing/plans",
+ "GET /marketplace_listing/plans/{plan_id}/accounts",
+ "GET /marketplace_listing/stubbed/plans",
+ "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts",
+ "GET /networks/{owner}/{repo}/events",
+ "GET /notifications",
+ "GET /organizations",
+ "GET /orgs/{org}/actions/cache/usage-by-repository",
+ "GET /orgs/{org}/actions/permissions/repositories",
+ "GET /orgs/{org}/actions/runners",
+ "GET /orgs/{org}/actions/secrets",
+ "GET /orgs/{org}/actions/secrets/{secret_name}/repositories",
+ "GET /orgs/{org}/actions/variables",
+ "GET /orgs/{org}/actions/variables/{name}/repositories",
+ "GET /orgs/{org}/blocks",
+ "GET /orgs/{org}/code-scanning/alerts",
+ "GET /orgs/{org}/codespaces",
+ "GET /orgs/{org}/codespaces/secrets",
+ "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories",
+ "GET /orgs/{org}/copilot/billing/seats",
+ "GET /orgs/{org}/dependabot/alerts",
+ "GET /orgs/{org}/dependabot/secrets",
+ "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories",
+ "GET /orgs/{org}/events",
+ "GET /orgs/{org}/failed_invitations",
+ "GET /orgs/{org}/hooks",
+ "GET /orgs/{org}/hooks/{hook_id}/deliveries",
+ "GET /orgs/{org}/installations",
+ "GET /orgs/{org}/invitations",
+ "GET /orgs/{org}/invitations/{invitation_id}/teams",
+ "GET /orgs/{org}/issues",
+ "GET /orgs/{org}/members",
+ "GET /orgs/{org}/members/{username}/codespaces",
+ "GET /orgs/{org}/migrations",
+ "GET /orgs/{org}/migrations/{migration_id}/repositories",
+ "GET /orgs/{org}/outside_collaborators",
+ "GET /orgs/{org}/packages",
+ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions",
+ "GET /orgs/{org}/personal-access-token-requests",
+ "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories",
+ "GET /orgs/{org}/personal-access-tokens",
+ "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories",
+ "GET /orgs/{org}/projects",
+ "GET /orgs/{org}/properties/values",
+ "GET /orgs/{org}/public_members",
+ "GET /orgs/{org}/repos",
+ "GET /orgs/{org}/rulesets",
+ "GET /orgs/{org}/rulesets/rule-suites",
+ "GET /orgs/{org}/secret-scanning/alerts",
+ "GET /orgs/{org}/security-advisories",
+ "GET /orgs/{org}/teams",
+ "GET /orgs/{org}/teams/{team_slug}/discussions",
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments",
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions",
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions",
+ "GET /orgs/{org}/teams/{team_slug}/invitations",
+ "GET /orgs/{org}/teams/{team_slug}/members",
+ "GET /orgs/{org}/teams/{team_slug}/projects",
+ "GET /orgs/{org}/teams/{team_slug}/repos",
+ "GET /orgs/{org}/teams/{team_slug}/teams",
+ "GET /projects/columns/{column_id}/cards",
+ "GET /projects/{project_id}/collaborators",
+ "GET /projects/{project_id}/columns",
+ "GET /repos/{owner}/{repo}/actions/artifacts",
+ "GET /repos/{owner}/{repo}/actions/caches",
+ "GET /repos/{owner}/{repo}/actions/organization-secrets",
+ "GET /repos/{owner}/{repo}/actions/organization-variables",
+ "GET /repos/{owner}/{repo}/actions/runners",
+ "GET /repos/{owner}/{repo}/actions/runs",
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts",
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs",
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs",
+ "GET /repos/{owner}/{repo}/actions/secrets",
+ "GET /repos/{owner}/{repo}/actions/variables",
+ "GET /repos/{owner}/{repo}/actions/workflows",
+ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs",
+ "GET /repos/{owner}/{repo}/activity",
+ "GET /repos/{owner}/{repo}/assignees",
+ "GET /repos/{owner}/{repo}/branches",
+ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations",
+ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs",
+ "GET /repos/{owner}/{repo}/code-scanning/alerts",
+ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances",
+ "GET /repos/{owner}/{repo}/code-scanning/analyses",
+ "GET /repos/{owner}/{repo}/codespaces",
+ "GET /repos/{owner}/{repo}/codespaces/devcontainers",
+ "GET /repos/{owner}/{repo}/codespaces/secrets",
+ "GET /repos/{owner}/{repo}/collaborators",
+ "GET /repos/{owner}/{repo}/comments",
+ "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions",
+ "GET /repos/{owner}/{repo}/commits",
+ "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments",
+ "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls",
+ "GET /repos/{owner}/{repo}/commits/{ref}/check-runs",
+ "GET /repos/{owner}/{repo}/commits/{ref}/check-suites",
+ "GET /repos/{owner}/{repo}/commits/{ref}/status",
+ "GET /repos/{owner}/{repo}/commits/{ref}/statuses",
+ "GET /repos/{owner}/{repo}/contributors",
+ "GET /repos/{owner}/{repo}/dependabot/alerts",
+ "GET /repos/{owner}/{repo}/dependabot/secrets",
+ "GET /repos/{owner}/{repo}/deployments",
+ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses",
+ "GET /repos/{owner}/{repo}/environments",
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies",
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps",
+ "GET /repos/{owner}/{repo}/events",
+ "GET /repos/{owner}/{repo}/forks",
+ "GET /repos/{owner}/{repo}/hooks",
+ "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries",
+ "GET /repos/{owner}/{repo}/invitations",
+ "GET /repos/{owner}/{repo}/issues",
+ "GET /repos/{owner}/{repo}/issues/comments",
+ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions",
+ "GET /repos/{owner}/{repo}/issues/events",
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/comments",
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/events",
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/labels",
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions",
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline",
+ "GET /repos/{owner}/{repo}/keys",
+ "GET /repos/{owner}/{repo}/labels",
+ "GET /repos/{owner}/{repo}/milestones",
+ "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels",
+ "GET /repos/{owner}/{repo}/notifications",
+ "GET /repos/{owner}/{repo}/pages/builds",
+ "GET /repos/{owner}/{repo}/projects",
+ "GET /repos/{owner}/{repo}/pulls",
+ "GET /repos/{owner}/{repo}/pulls/comments",
+ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions",
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments",
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits",
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/files",
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews",
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments",
+ "GET /repos/{owner}/{repo}/releases",
+ "GET /repos/{owner}/{repo}/releases/{release_id}/assets",
+ "GET /repos/{owner}/{repo}/releases/{release_id}/reactions",
+ "GET /repos/{owner}/{repo}/rules/branches/{branch}",
+ "GET /repos/{owner}/{repo}/rulesets",
+ "GET /repos/{owner}/{repo}/rulesets/rule-suites",
+ "GET /repos/{owner}/{repo}/secret-scanning/alerts",
+ "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations",
+ "GET /repos/{owner}/{repo}/security-advisories",
+ "GET /repos/{owner}/{repo}/stargazers",
+ "GET /repos/{owner}/{repo}/subscribers",
+ "GET /repos/{owner}/{repo}/tags",
+ "GET /repos/{owner}/{repo}/teams",
+ "GET /repos/{owner}/{repo}/topics",
+ "GET /repositories",
+ "GET /repositories/{repository_id}/environments/{environment_name}/secrets",
+ "GET /repositories/{repository_id}/environments/{environment_name}/variables",
+ "GET /search/code",
+ "GET /search/commits",
+ "GET /search/issues",
+ "GET /search/labels",
+ "GET /search/repositories",
+ "GET /search/topics",
+ "GET /search/users",
+ "GET /teams/{team_id}/discussions",
+ "GET /teams/{team_id}/discussions/{discussion_number}/comments",
+ "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions",
+ "GET /teams/{team_id}/discussions/{discussion_number}/reactions",
+ "GET /teams/{team_id}/invitations",
+ "GET /teams/{team_id}/members",
+ "GET /teams/{team_id}/projects",
+ "GET /teams/{team_id}/repos",
+ "GET /teams/{team_id}/teams",
+ "GET /user/blocks",
+ "GET /user/codespaces",
+ "GET /user/codespaces/secrets",
+ "GET /user/emails",
+ "GET /user/followers",
+ "GET /user/following",
+ "GET /user/gpg_keys",
+ "GET /user/installations",
+ "GET /user/installations/{installation_id}/repositories",
+ "GET /user/issues",
+ "GET /user/keys",
+ "GET /user/marketplace_purchases",
+ "GET /user/marketplace_purchases/stubbed",
+ "GET /user/memberships/orgs",
+ "GET /user/migrations",
+ "GET /user/migrations/{migration_id}/repositories",
+ "GET /user/orgs",
+ "GET /user/packages",
+ "GET /user/packages/{package_type}/{package_name}/versions",
+ "GET /user/public_emails",
+ "GET /user/repos",
+ "GET /user/repository_invitations",
+ "GET /user/social_accounts",
+ "GET /user/ssh_signing_keys",
+ "GET /user/starred",
+ "GET /user/subscriptions",
+ "GET /user/teams",
+ "GET /users",
+ "GET /users/{username}/events",
+ "GET /users/{username}/events/orgs/{org}",
+ "GET /users/{username}/events/public",
+ "GET /users/{username}/followers",
+ "GET /users/{username}/following",
+ "GET /users/{username}/gists",
+ "GET /users/{username}/gpg_keys",
+ "GET /users/{username}/keys",
+ "GET /users/{username}/orgs",
+ "GET /users/{username}/packages",
+ "GET /users/{username}/projects",
+ "GET /users/{username}/received_events",
+ "GET /users/{username}/received_events/public",
+ "GET /users/{username}/repos",
+ "GET /users/{username}/social_accounts",
+ "GET /users/{username}/ssh_signing_keys",
+ "GET /users/{username}/starred",
+ "GET /users/{username}/subscriptions"
+];
+// pkg/dist-src/paginating-endpoints.js
function isPaginatingEndpoint(arg) {
if (typeof arg === "string") {
return paginatingEndpoints.includes(arg);
@@ -7399,11 +7720,7 @@ function isPaginatingEndpoint(arg) {
}
}
-/**
- * @param octokit Octokit instance
- * @param options Options passed to Octokit constructor
- */
-
+// pkg/dist-src/index.js
function paginateRest(octokit) {
return {
paginate: Object.assign(paginate.bind(null, octokit), {
@@ -7412,202 +7729,384 @@ function paginateRest(octokit) {
};
}
paginateRest.VERSION = VERSION;
-
-exports.composePaginateRest = composePaginateRest;
-exports.isPaginatingEndpoint = isPaginatingEndpoint;
-exports.paginateRest = paginateRest;
-exports.paginatingEndpoints = paginatingEndpoints;
-//# sourceMappingURL=index.js.map
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
/***/ }),
/***/ 3044:
-/***/ ((__unused_webpack_module, exports) => {
+/***/ ((module) => {
"use strict";
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-
-function ownKeys(object, enumerableOnly) {
- var keys = Object.keys(object);
-
- if (Object.getOwnPropertySymbols) {
- var symbols = Object.getOwnPropertySymbols(object);
-
- if (enumerableOnly) {
- symbols = symbols.filter(function (sym) {
- return Object.getOwnPropertyDescriptor(object, sym).enumerable;
- });
- }
-
- keys.push.apply(keys, symbols);
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
- return keys;
-}
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ legacyRestEndpointMethods: () => legacyRestEndpointMethods,
+ restEndpointMethods: () => restEndpointMethods
+});
+module.exports = __toCommonJS(dist_src_exports);
-function _objectSpread2(target) {
- for (var i = 1; i < arguments.length; i++) {
- var source = arguments[i] != null ? arguments[i] : {};
+// pkg/dist-src/version.js
+var VERSION = "10.2.0";
- if (i % 2) {
- ownKeys(Object(source), true).forEach(function (key) {
- _defineProperty(target, key, source[key]);
- });
- } else if (Object.getOwnPropertyDescriptors) {
- Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));
- } else {
- ownKeys(Object(source)).forEach(function (key) {
- Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
- });
- }
- }
-
- return target;
-}
-
-function _defineProperty(obj, key, value) {
- if (key in obj) {
- Object.defineProperty(obj, key, {
- value: value,
- enumerable: true,
- configurable: true,
- writable: true
- });
- } else {
- obj[key] = value;
- }
-
- return obj;
-}
-
-const Endpoints = {
+// pkg/dist-src/generated/endpoints.js
+var Endpoints = {
actions: {
- addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"],
- addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
- addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
- approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"],
- cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"],
- createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
+ addCustomLabelsToSelfHostedRunnerForOrg: [
+ "POST /orgs/{org}/actions/runners/{runner_id}/labels"
+ ],
+ addCustomLabelsToSelfHostedRunnerForRepo: [
+ "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
+ ],
+ addSelectedRepoToOrgSecret: [
+ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ addSelectedRepoToOrgVariable: [
+ "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}"
+ ],
+ approveWorkflowRun: [
+ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"
+ ],
+ cancelWorkflowRun: [
+ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"
+ ],
+ createEnvironmentVariable: [
+ "POST /repositories/{repository_id}/environments/{environment_name}/variables"
+ ],
+ createOrUpdateEnvironmentSecret: [
+ "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"
+ ],
createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"],
- createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
- createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"],
- createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"],
+ createOrUpdateRepoSecret: [
+ "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"
+ ],
+ createOrgVariable: ["POST /orgs/{org}/actions/variables"],
+ createRegistrationTokenForOrg: [
+ "POST /orgs/{org}/actions/runners/registration-token"
+ ],
+ createRegistrationTokenForRepo: [
+ "POST /repos/{owner}/{repo}/actions/runners/registration-token"
+ ],
createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"],
- createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"],
- createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"],
- deleteActionsCacheById: ["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"],
- deleteActionsCacheByKey: ["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"],
- deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
- deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
+ createRemoveTokenForRepo: [
+ "POST /repos/{owner}/{repo}/actions/runners/remove-token"
+ ],
+ createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"],
+ createWorkflowDispatch: [
+ "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"
+ ],
+ deleteActionsCacheById: [
+ "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"
+ ],
+ deleteActionsCacheByKey: [
+ "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"
+ ],
+ deleteArtifact: [
+ "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"
+ ],
+ deleteEnvironmentSecret: [
+ "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"
+ ],
+ deleteEnvironmentVariable: [
+ "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}"
+ ],
deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"],
- deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
- deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"],
- deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"],
+ deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"],
+ deleteRepoSecret: [
+ "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"
+ ],
+ deleteRepoVariable: [
+ "DELETE /repos/{owner}/{repo}/actions/variables/{name}"
+ ],
+ deleteSelfHostedRunnerFromOrg: [
+ "DELETE /orgs/{org}/actions/runners/{runner_id}"
+ ],
+ deleteSelfHostedRunnerFromRepo: [
+ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"
+ ],
deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"],
- deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
- disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"],
- disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"],
- downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"],
- downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"],
- downloadWorkflowRunAttemptLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"],
- downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
- enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"],
- enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"],
+ deleteWorkflowRunLogs: [
+ "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"
+ ],
+ disableSelectedRepositoryGithubActionsOrganization: [
+ "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"
+ ],
+ disableWorkflow: [
+ "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"
+ ],
+ downloadArtifact: [
+ "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"
+ ],
+ downloadJobLogsForWorkflowRun: [
+ "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"
+ ],
+ downloadWorkflowRunAttemptLogs: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"
+ ],
+ downloadWorkflowRunLogs: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"
+ ],
+ enableSelectedRepositoryGithubActionsOrganization: [
+ "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"
+ ],
+ enableWorkflow: [
+ "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"
+ ],
+ forceCancelWorkflowRun: [
+ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel"
+ ],
+ generateRunnerJitconfigForOrg: [
+ "POST /orgs/{org}/actions/runners/generate-jitconfig"
+ ],
+ generateRunnerJitconfigForRepo: [
+ "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig"
+ ],
getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"],
getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"],
- getActionsCacheUsageByRepoForOrg: ["GET /orgs/{org}/actions/cache/usage-by-repository"],
- getActionsCacheUsageForEnterprise: ["GET /enterprises/{enterprise}/actions/cache/usage"],
+ getActionsCacheUsageByRepoForOrg: [
+ "GET /orgs/{org}/actions/cache/usage-by-repository"
+ ],
getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"],
- getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"],
- getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"],
+ getAllowedActionsOrganization: [
+ "GET /orgs/{org}/actions/permissions/selected-actions"
+ ],
+ getAllowedActionsRepository: [
+ "GET /repos/{owner}/{repo}/actions/permissions/selected-actions"
+ ],
getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
- getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"],
- getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
- getGithubActionsDefaultWorkflowPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/workflow"],
- getGithubActionsDefaultWorkflowPermissionsOrganization: ["GET /orgs/{org}/actions/permissions/workflow"],
- getGithubActionsDefaultWorkflowPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/workflow"],
- getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"],
- getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"],
+ getEnvironmentPublicKey: [
+ "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"
+ ],
+ getEnvironmentSecret: [
+ "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"
+ ],
+ getEnvironmentVariable: [
+ "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}"
+ ],
+ getGithubActionsDefaultWorkflowPermissionsOrganization: [
+ "GET /orgs/{org}/actions/permissions/workflow"
+ ],
+ getGithubActionsDefaultWorkflowPermissionsRepository: [
+ "GET /repos/{owner}/{repo}/actions/permissions/workflow"
+ ],
+ getGithubActionsPermissionsOrganization: [
+ "GET /orgs/{org}/actions/permissions"
+ ],
+ getGithubActionsPermissionsRepository: [
+ "GET /repos/{owner}/{repo}/actions/permissions"
+ ],
getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"],
getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"],
getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"],
- getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"],
- getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, {
- renamed: ["actions", "getGithubActionsPermissionsRepository"]
- }],
+ getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"],
+ getPendingDeploymentsForRun: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"
+ ],
+ getRepoPermissions: [
+ "GET /repos/{owner}/{repo}/actions/permissions",
+ {},
+ { renamed: ["actions", "getGithubActionsPermissionsRepository"] }
+ ],
getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"],
getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
- getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"],
+ getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"],
+ getReviewsForRun: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"
+ ],
getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"],
- getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"],
+ getSelfHostedRunnerForRepo: [
+ "GET /repos/{owner}/{repo}/actions/runners/{runner_id}"
+ ],
getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"],
- getWorkflowAccessToRepository: ["GET /repos/{owner}/{repo}/actions/permissions/access"],
+ getWorkflowAccessToRepository: [
+ "GET /repos/{owner}/{repo}/actions/permissions/access"
+ ],
getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"],
- getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"],
- getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"],
- getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"],
+ getWorkflowRunAttempt: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"
+ ],
+ getWorkflowRunUsage: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"
+ ],
+ getWorkflowUsage: [
+ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"
+ ],
listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"],
- listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"],
- listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"],
- listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"],
- listLabelsForSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}/labels"],
- listLabelsForSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
+ listEnvironmentSecrets: [
+ "GET /repositories/{repository_id}/environments/{environment_name}/secrets"
+ ],
+ listEnvironmentVariables: [
+ "GET /repositories/{repository_id}/environments/{environment_name}/variables"
+ ],
+ listJobsForWorkflowRun: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"
+ ],
+ listJobsForWorkflowRunAttempt: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"
+ ],
+ listLabelsForSelfHostedRunnerForOrg: [
+ "GET /orgs/{org}/actions/runners/{runner_id}/labels"
+ ],
+ listLabelsForSelfHostedRunnerForRepo: [
+ "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
+ ],
listOrgSecrets: ["GET /orgs/{org}/actions/secrets"],
+ listOrgVariables: ["GET /orgs/{org}/actions/variables"],
+ listRepoOrganizationSecrets: [
+ "GET /repos/{owner}/{repo}/actions/organization-secrets"
+ ],
+ listRepoOrganizationVariables: [
+ "GET /repos/{owner}/{repo}/actions/organization-variables"
+ ],
listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"],
+ listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"],
listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"],
listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"],
- listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"],
- listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"],
- listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"],
+ listRunnerApplicationsForRepo: [
+ "GET /repos/{owner}/{repo}/actions/runners/downloads"
+ ],
+ listSelectedReposForOrgSecret: [
+ "GET /orgs/{org}/actions/secrets/{secret_name}/repositories"
+ ],
+ listSelectedReposForOrgVariable: [
+ "GET /orgs/{org}/actions/variables/{name}/repositories"
+ ],
+ listSelectedRepositoriesEnabledGithubActionsOrganization: [
+ "GET /orgs/{org}/actions/permissions/repositories"
+ ],
listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"],
listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"],
- listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"],
- listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"],
+ listWorkflowRunArtifacts: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"
+ ],
+ listWorkflowRuns: [
+ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"
+ ],
listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"],
- reRunJobForWorkflowRun: ["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"],
+ reRunJobForWorkflowRun: [
+ "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"
+ ],
reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"],
- reRunWorkflowFailedJobs: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"],
- removeAllCustomLabelsFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"],
- removeAllCustomLabelsFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
- removeCustomLabelFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"],
- removeCustomLabelFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"],
- removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
- reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"],
- setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"],
- setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"],
- setCustomLabelsForSelfHostedRunnerForOrg: ["PUT /orgs/{org}/actions/runners/{runner_id}/labels"],
- setCustomLabelsForSelfHostedRunnerForRepo: ["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
- setGithubActionsDefaultWorkflowPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/workflow"],
- setGithubActionsDefaultWorkflowPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions/workflow"],
- setGithubActionsDefaultWorkflowPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/workflow"],
- setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"],
- setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"],
- setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"],
- setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"],
- setWorkflowAccessToRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/access"]
+ reRunWorkflowFailedJobs: [
+ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"
+ ],
+ removeAllCustomLabelsFromSelfHostedRunnerForOrg: [
+ "DELETE /orgs/{org}/actions/runners/{runner_id}/labels"
+ ],
+ removeAllCustomLabelsFromSelfHostedRunnerForRepo: [
+ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
+ ],
+ removeCustomLabelFromSelfHostedRunnerForOrg: [
+ "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"
+ ],
+ removeCustomLabelFromSelfHostedRunnerForRepo: [
+ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"
+ ],
+ removeSelectedRepoFromOrgSecret: [
+ "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ removeSelectedRepoFromOrgVariable: [
+ "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}"
+ ],
+ reviewCustomGatesForRun: [
+ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule"
+ ],
+ reviewPendingDeploymentsForRun: [
+ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"
+ ],
+ setAllowedActionsOrganization: [
+ "PUT /orgs/{org}/actions/permissions/selected-actions"
+ ],
+ setAllowedActionsRepository: [
+ "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"
+ ],
+ setCustomLabelsForSelfHostedRunnerForOrg: [
+ "PUT /orgs/{org}/actions/runners/{runner_id}/labels"
+ ],
+ setCustomLabelsForSelfHostedRunnerForRepo: [
+ "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
+ ],
+ setGithubActionsDefaultWorkflowPermissionsOrganization: [
+ "PUT /orgs/{org}/actions/permissions/workflow"
+ ],
+ setGithubActionsDefaultWorkflowPermissionsRepository: [
+ "PUT /repos/{owner}/{repo}/actions/permissions/workflow"
+ ],
+ setGithubActionsPermissionsOrganization: [
+ "PUT /orgs/{org}/actions/permissions"
+ ],
+ setGithubActionsPermissionsRepository: [
+ "PUT /repos/{owner}/{repo}/actions/permissions"
+ ],
+ setSelectedReposForOrgSecret: [
+ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"
+ ],
+ setSelectedReposForOrgVariable: [
+ "PUT /orgs/{org}/actions/variables/{name}/repositories"
+ ],
+ setSelectedRepositoriesEnabledGithubActionsOrganization: [
+ "PUT /orgs/{org}/actions/permissions/repositories"
+ ],
+ setWorkflowAccessToRepository: [
+ "PUT /repos/{owner}/{repo}/actions/permissions/access"
+ ],
+ updateEnvironmentVariable: [
+ "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}"
+ ],
+ updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"],
+ updateRepoVariable: [
+ "PATCH /repos/{owner}/{repo}/actions/variables/{name}"
+ ]
},
activity: {
checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"],
deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"],
- deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"],
+ deleteThreadSubscription: [
+ "DELETE /notifications/threads/{thread_id}/subscription"
+ ],
getFeeds: ["GET /feeds"],
getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"],
getThread: ["GET /notifications/threads/{thread_id}"],
- getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"],
+ getThreadSubscriptionForAuthenticatedUser: [
+ "GET /notifications/threads/{thread_id}/subscription"
+ ],
listEventsForAuthenticatedUser: ["GET /users/{username}/events"],
listNotificationsForAuthenticatedUser: ["GET /notifications"],
- listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"],
+ listOrgEventsForAuthenticatedUser: [
+ "GET /users/{username}/events/orgs/{org}"
+ ],
listPublicEvents: ["GET /events"],
listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"],
listPublicEventsForUser: ["GET /users/{username}/events/public"],
listPublicOrgEvents: ["GET /orgs/{org}/events"],
listReceivedEventsForUser: ["GET /users/{username}/received_events"],
- listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"],
+ listReceivedPublicEventsForUser: [
+ "GET /users/{username}/received_events/public"
+ ],
listRepoEvents: ["GET /repos/{owner}/{repo}/events"],
- listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"],
+ listRepoNotificationsForAuthenticatedUser: [
+ "GET /repos/{owner}/{repo}/notifications"
+ ],
listReposStarredByAuthenticatedUser: ["GET /user/starred"],
listReposStarredByUser: ["GET /users/{username}/starred"],
listReposWatchedByUser: ["GET /users/{username}/subscriptions"],
@@ -7618,18 +8117,26 @@ const Endpoints = {
markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"],
markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"],
setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"],
- setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"],
+ setThreadSubscription: [
+ "PUT /notifications/threads/{thread_id}/subscription"
+ ],
starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"],
unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"]
},
apps: {
- addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", {}, {
- renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"]
- }],
- addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"],
+ addRepoToInstallation: [
+ "PUT /user/installations/{installation_id}/repositories/{repository_id}",
+ {},
+ { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] }
+ ],
+ addRepoToInstallationForAuthenticatedUser: [
+ "PUT /user/installations/{installation_id}/repositories/{repository_id}"
+ ],
checkToken: ["POST /applications/{client_id}/token"],
createFromManifest: ["POST /app-manifests/{code}/conversions"],
- createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"],
+ createInstallationAccessToken: [
+ "POST /app/installations/{installation_id}/access_tokens"
+ ],
deleteAuthorization: ["DELETE /applications/{client_id}/grant"],
deleteInstallation: ["DELETE /app/installations/{installation_id}"],
deleteToken: ["DELETE /applications/{client_id}/token"],
@@ -7638,75 +8145,132 @@ const Endpoints = {
getInstallation: ["GET /app/installations/{installation_id}"],
getOrgInstallation: ["GET /orgs/{org}/installation"],
getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"],
- getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"],
- getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"],
+ getSubscriptionPlanForAccount: [
+ "GET /marketplace_listing/accounts/{account_id}"
+ ],
+ getSubscriptionPlanForAccountStubbed: [
+ "GET /marketplace_listing/stubbed/accounts/{account_id}"
+ ],
getUserInstallation: ["GET /users/{username}/installation"],
getWebhookConfigForApp: ["GET /app/hook/config"],
getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"],
listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"],
- listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"],
- listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"],
+ listAccountsForPlanStubbed: [
+ "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"
+ ],
+ listInstallationReposForAuthenticatedUser: [
+ "GET /user/installations/{installation_id}/repositories"
+ ],
+ listInstallationRequestsForAuthenticatedApp: [
+ "GET /app/installation-requests"
+ ],
listInstallations: ["GET /app/installations"],
listInstallationsForAuthenticatedUser: ["GET /user/installations"],
listPlans: ["GET /marketplace_listing/plans"],
listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"],
listReposAccessibleToInstallation: ["GET /installation/repositories"],
listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"],
- listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"],
+ listSubscriptionsForAuthenticatedUserStubbed: [
+ "GET /user/marketplace_purchases/stubbed"
+ ],
listWebhookDeliveries: ["GET /app/hook/deliveries"],
- redeliverWebhookDelivery: ["POST /app/hook/deliveries/{delivery_id}/attempts"],
- removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", {}, {
- renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"]
- }],
- removeRepoFromInstallationForAuthenticatedUser: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"],
+ redeliverWebhookDelivery: [
+ "POST /app/hook/deliveries/{delivery_id}/attempts"
+ ],
+ removeRepoFromInstallation: [
+ "DELETE /user/installations/{installation_id}/repositories/{repository_id}",
+ {},
+ { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] }
+ ],
+ removeRepoFromInstallationForAuthenticatedUser: [
+ "DELETE /user/installations/{installation_id}/repositories/{repository_id}"
+ ],
resetToken: ["PATCH /applications/{client_id}/token"],
revokeInstallationAccessToken: ["DELETE /installation/token"],
scopeToken: ["POST /applications/{client_id}/token/scoped"],
suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"],
- unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"],
+ unsuspendInstallation: [
+ "DELETE /app/installations/{installation_id}/suspended"
+ ],
updateWebhookConfigForApp: ["PATCH /app/hook/config"]
},
billing: {
getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"],
- getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"],
- getGithubAdvancedSecurityBillingGhe: ["GET /enterprises/{enterprise}/settings/billing/advanced-security"],
- getGithubAdvancedSecurityBillingOrg: ["GET /orgs/{org}/settings/billing/advanced-security"],
+ getGithubActionsBillingUser: [
+ "GET /users/{username}/settings/billing/actions"
+ ],
getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"],
- getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"],
- getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"],
- getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"]
+ getGithubPackagesBillingUser: [
+ "GET /users/{username}/settings/billing/packages"
+ ],
+ getSharedStorageBillingOrg: [
+ "GET /orgs/{org}/settings/billing/shared-storage"
+ ],
+ getSharedStorageBillingUser: [
+ "GET /users/{username}/settings/billing/shared-storage"
+ ]
},
checks: {
create: ["POST /repos/{owner}/{repo}/check-runs"],
createSuite: ["POST /repos/{owner}/{repo}/check-suites"],
get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"],
getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"],
- listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"],
+ listAnnotations: [
+ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"
+ ],
listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"],
- listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"],
+ listForSuite: [
+ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"
+ ],
listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"],
- rerequestRun: ["POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"],
- rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"],
- setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"],
+ rerequestRun: [
+ "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"
+ ],
+ rerequestSuite: [
+ "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"
+ ],
+ setSuitesPreferences: [
+ "PATCH /repos/{owner}/{repo}/check-suites/preferences"
+ ],
update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"]
},
codeScanning: {
- deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"],
- getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, {
- renamedParameters: {
- alert_id: "alert_number"
- }
- }],
- getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"],
+ deleteAnalysis: [
+ "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"
+ ],
+ getAlert: [
+ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}",
+ {},
+ { renamedParameters: { alert_id: "alert_number" } }
+ ],
+ getAnalysis: [
+ "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"
+ ],
+ getCodeqlDatabase: [
+ "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}"
+ ],
+ getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"],
getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"],
- listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"],
+ listAlertInstances: [
+ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"
+ ],
listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"],
listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"],
- listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, {
- renamed: ["codeScanning", "listAlertInstances"]
- }],
+ listAlertsInstances: [
+ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances",
+ {},
+ { renamed: ["codeScanning", "listAlertInstances"] }
+ ],
+ listCodeqlDatabases: [
+ "GET /repos/{owner}/{repo}/code-scanning/codeql/databases"
+ ],
listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"],
- updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"],
+ updateAlert: [
+ "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"
+ ],
+ updateDefaultSetup: [
+ "PATCH /repos/{owner}/{repo}/code-scanning/default-setup"
+ ],
uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"]
},
codesOfConduct: {
@@ -7714,82 +8278,190 @@ const Endpoints = {
getConductCode: ["GET /codes_of_conduct/{key}"]
},
codespaces: {
- addRepositoryForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"],
- codespaceMachinesForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/machines"],
+ addRepositoryForSecretForAuthenticatedUser: [
+ "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ addSelectedRepoToOrgSecret: [
+ "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ checkPermissionsForDevcontainer: [
+ "GET /repos/{owner}/{repo}/codespaces/permissions_check"
+ ],
+ codespaceMachinesForAuthenticatedUser: [
+ "GET /user/codespaces/{codespace_name}/machines"
+ ],
createForAuthenticatedUser: ["POST /user/codespaces"],
- createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"],
- createOrUpdateSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}"],
- createWithPrForAuthenticatedUser: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"],
- createWithRepoForAuthenticatedUser: ["POST /repos/{owner}/{repo}/codespaces"],
+ createOrUpdateOrgSecret: [
+ "PUT /orgs/{org}/codespaces/secrets/{secret_name}"
+ ],
+ createOrUpdateRepoSecret: [
+ "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
+ ],
+ createOrUpdateSecretForAuthenticatedUser: [
+ "PUT /user/codespaces/secrets/{secret_name}"
+ ],
+ createWithPrForAuthenticatedUser: [
+ "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"
+ ],
+ createWithRepoForAuthenticatedUser: [
+ "POST /repos/{owner}/{repo}/codespaces"
+ ],
deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"],
- deleteFromOrganization: ["DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"],
- deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"],
- deleteSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}"],
- exportForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/exports"],
- getExportDetailsForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/exports/{export_id}"],
+ deleteFromOrganization: [
+ "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"
+ ],
+ deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"],
+ deleteRepoSecret: [
+ "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
+ ],
+ deleteSecretForAuthenticatedUser: [
+ "DELETE /user/codespaces/secrets/{secret_name}"
+ ],
+ exportForAuthenticatedUser: [
+ "POST /user/codespaces/{codespace_name}/exports"
+ ],
+ getCodespacesForUserInOrg: [
+ "GET /orgs/{org}/members/{username}/codespaces"
+ ],
+ getExportDetailsForAuthenticatedUser: [
+ "GET /user/codespaces/{codespace_name}/exports/{export_id}"
+ ],
getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"],
- getPublicKeyForAuthenticatedUser: ["GET /user/codespaces/secrets/public-key"],
- getRepoPublicKey: ["GET /repos/{owner}/{repo}/codespaces/secrets/public-key"],
- getRepoSecret: ["GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"],
- getSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}"],
- listDevcontainersInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/devcontainers"],
+ getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"],
+ getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"],
+ getPublicKeyForAuthenticatedUser: [
+ "GET /user/codespaces/secrets/public-key"
+ ],
+ getRepoPublicKey: [
+ "GET /repos/{owner}/{repo}/codespaces/secrets/public-key"
+ ],
+ getRepoSecret: [
+ "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
+ ],
+ getSecretForAuthenticatedUser: [
+ "GET /user/codespaces/secrets/{secret_name}"
+ ],
+ listDevcontainersInRepositoryForAuthenticatedUser: [
+ "GET /repos/{owner}/{repo}/codespaces/devcontainers"
+ ],
listForAuthenticatedUser: ["GET /user/codespaces"],
- listInOrganization: ["GET /orgs/{org}/codespaces", {}, {
- renamedParameters: {
- org_id: "org"
- }
- }],
- listInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces"],
+ listInOrganization: [
+ "GET /orgs/{org}/codespaces",
+ {},
+ { renamedParameters: { org_id: "org" } }
+ ],
+ listInRepositoryForAuthenticatedUser: [
+ "GET /repos/{owner}/{repo}/codespaces"
+ ],
+ listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"],
listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"],
- listRepositoriesForSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}/repositories"],
+ listRepositoriesForSecretForAuthenticatedUser: [
+ "GET /user/codespaces/secrets/{secret_name}/repositories"
+ ],
listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"],
- removeRepositoryForSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"],
- repoMachinesForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/machines"],
- setRepositoriesForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories"],
+ listSelectedReposForOrgSecret: [
+ "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories"
+ ],
+ preFlightWithRepoForAuthenticatedUser: [
+ "GET /repos/{owner}/{repo}/codespaces/new"
+ ],
+ publishForAuthenticatedUser: [
+ "POST /user/codespaces/{codespace_name}/publish"
+ ],
+ removeRepositoryForSecretForAuthenticatedUser: [
+ "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ removeSelectedRepoFromOrgSecret: [
+ "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ repoMachinesForAuthenticatedUser: [
+ "GET /repos/{owner}/{repo}/codespaces/machines"
+ ],
+ setRepositoriesForSecretForAuthenticatedUser: [
+ "PUT /user/codespaces/secrets/{secret_name}/repositories"
+ ],
+ setSelectedReposForOrgSecret: [
+ "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories"
+ ],
startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"],
stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"],
- stopInOrganization: ["POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"],
+ stopInOrganization: [
+ "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"
+ ],
updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"]
},
+ copilot: {
+ addCopilotForBusinessSeatsForTeams: [
+ "POST /orgs/{org}/copilot/billing/selected_teams"
+ ],
+ addCopilotForBusinessSeatsForUsers: [
+ "POST /orgs/{org}/copilot/billing/selected_users"
+ ],
+ cancelCopilotSeatAssignmentForTeams: [
+ "DELETE /orgs/{org}/copilot/billing/selected_teams"
+ ],
+ cancelCopilotSeatAssignmentForUsers: [
+ "DELETE /orgs/{org}/copilot/billing/selected_users"
+ ],
+ getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"],
+ getCopilotSeatDetailsForUser: [
+ "GET /orgs/{org}/members/{username}/copilot"
+ ],
+ listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"]
+ },
dependabot: {
- addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"],
- createOrUpdateOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}"],
- createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"],
+ addSelectedRepoToOrgSecret: [
+ "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ createOrUpdateOrgSecret: [
+ "PUT /orgs/{org}/dependabot/secrets/{secret_name}"
+ ],
+ createOrUpdateRepoSecret: [
+ "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
+ ],
deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"],
- deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"],
+ deleteRepoSecret: [
+ "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
+ ],
+ getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"],
getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"],
getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"],
- getRepoPublicKey: ["GET /repos/{owner}/{repo}/dependabot/secrets/public-key"],
- getRepoSecret: ["GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"],
+ getRepoPublicKey: [
+ "GET /repos/{owner}/{repo}/dependabot/secrets/public-key"
+ ],
+ getRepoSecret: [
+ "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
+ ],
+ listAlertsForEnterprise: [
+ "GET /enterprises/{enterprise}/dependabot/alerts"
+ ],
+ listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"],
+ listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"],
listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"],
listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"],
- listSelectedReposForOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"],
- removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"],
- setSelectedReposForOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"]
+ listSelectedReposForOrgSecret: [
+ "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"
+ ],
+ removeSelectedRepoFromOrgSecret: [
+ "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ setSelectedReposForOrgSecret: [
+ "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"
+ ],
+ updateAlert: [
+ "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"
+ ]
},
dependencyGraph: {
- createRepositorySnapshot: ["POST /repos/{owner}/{repo}/dependency-graph/snapshots"],
- diffRange: ["GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"]
- },
- emojis: {
- get: ["GET /emojis"]
- },
- enterpriseAdmin: {
- addCustomLabelsToSelfHostedRunnerForEnterprise: ["POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
- disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"],
- enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"],
- getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"],
- getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"],
- getServerStatistics: ["GET /enterprise-installation/{enterprise_or_org}/server-statistics"],
- listLabelsForSelfHostedRunnerForEnterprise: ["GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
- listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"],
- removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
- removeCustomLabelFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}"],
- setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"],
- setCustomLabelsForSelfHostedRunnerForEnterprise: ["PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
- setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"],
- setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"]
+ createRepositorySnapshot: [
+ "POST /repos/{owner}/{repo}/dependency-graph/snapshots"
+ ],
+ diffRange: [
+ "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"
+ ],
+ exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"]
},
+ emojis: { get: ["GET /emojis"] },
gists: {
checkIsStarred: ["GET /gists/{gist_id}/star"],
create: ["POST /gists"],
@@ -7835,33 +8507,52 @@ const Endpoints = {
getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"],
getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"],
getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"],
- getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, {
- renamed: ["interactions", "getRestrictionsForAuthenticatedUser"]
- }],
+ getRestrictionsForYourPublicRepos: [
+ "GET /user/interaction-limits",
+ {},
+ { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] }
+ ],
removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"],
removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"],
- removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"],
- removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, {
- renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"]
- }],
+ removeRestrictionsForRepo: [
+ "DELETE /repos/{owner}/{repo}/interaction-limits"
+ ],
+ removeRestrictionsForYourPublicRepos: [
+ "DELETE /user/interaction-limits",
+ {},
+ { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] }
+ ],
setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"],
setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"],
setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"],
- setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, {
- renamed: ["interactions", "setRestrictionsForAuthenticatedUser"]
- }]
+ setRestrictionsForYourPublicRepos: [
+ "PUT /user/interaction-limits",
+ {},
+ { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] }
+ ]
},
issues: {
- addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
+ addAssignees: [
+ "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"
+ ],
addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"],
checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"],
+ checkUserCanBeAssignedToIssue: [
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}"
+ ],
create: ["POST /repos/{owner}/{repo}/issues"],
- createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"],
+ createComment: [
+ "POST /repos/{owner}/{repo}/issues/{issue_number}/comments"
+ ],
createLabel: ["POST /repos/{owner}/{repo}/labels"],
createMilestone: ["POST /repos/{owner}/{repo}/milestones"],
- deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"],
+ deleteComment: [
+ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"
+ ],
deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"],
- deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"],
+ deleteMilestone: [
+ "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"
+ ],
get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"],
getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"],
getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"],
@@ -7873,24 +8564,38 @@ const Endpoints = {
listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"],
listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"],
listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"],
- listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"],
+ listEventsForTimeline: [
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"
+ ],
listForAuthenticatedUser: ["GET /user/issues"],
listForOrg: ["GET /orgs/{org}/issues"],
listForRepo: ["GET /repos/{owner}/{repo}/issues"],
- listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"],
+ listLabelsForMilestone: [
+ "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"
+ ],
listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"],
- listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"],
+ listLabelsOnIssue: [
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/labels"
+ ],
listMilestones: ["GET /repos/{owner}/{repo}/milestones"],
lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"],
- removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"],
- removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
- removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"],
+ removeAllLabels: [
+ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"
+ ],
+ removeAssignees: [
+ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"
+ ],
+ removeLabel: [
+ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"
+ ],
setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"],
unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"],
update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"],
updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"],
updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"],
- updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"]
+ updateMilestone: [
+ "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"
+ ]
},
licenses: {
get: ["GET /licenses/{license}"],
@@ -7899,65 +8604,151 @@ const Endpoints = {
},
markdown: {
render: ["POST /markdown"],
- renderRaw: ["POST /markdown/raw", {
- headers: {
- "content-type": "text/plain; charset=utf-8"
- }
- }]
+ renderRaw: [
+ "POST /markdown/raw",
+ { headers: { "content-type": "text/plain; charset=utf-8" } }
+ ]
},
meta: {
get: ["GET /meta"],
+ getAllVersions: ["GET /versions"],
getOctocat: ["GET /octocat"],
getZen: ["GET /zen"],
root: ["GET /"]
},
migrations: {
- cancelImport: ["DELETE /repos/{owner}/{repo}/import"],
- deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive"],
- deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive"],
- downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive"],
- getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive"],
- getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"],
- getImportStatus: ["GET /repos/{owner}/{repo}/import"],
- getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"],
+ cancelImport: [
+ "DELETE /repos/{owner}/{repo}/import",
+ {},
+ {
+ deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import"
+ }
+ ],
+ deleteArchiveForAuthenticatedUser: [
+ "DELETE /user/migrations/{migration_id}/archive"
+ ],
+ deleteArchiveForOrg: [
+ "DELETE /orgs/{org}/migrations/{migration_id}/archive"
+ ],
+ downloadArchiveForOrg: [
+ "GET /orgs/{org}/migrations/{migration_id}/archive"
+ ],
+ getArchiveForAuthenticatedUser: [
+ "GET /user/migrations/{migration_id}/archive"
+ ],
+ getCommitAuthors: [
+ "GET /repos/{owner}/{repo}/import/authors",
+ {},
+ {
+ deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors"
+ }
+ ],
+ getImportStatus: [
+ "GET /repos/{owner}/{repo}/import",
+ {},
+ {
+ deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status"
+ }
+ ],
+ getLargeFiles: [
+ "GET /repos/{owner}/{repo}/import/large_files",
+ {},
+ {
+ deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files"
+ }
+ ],
getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"],
getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"],
listForAuthenticatedUser: ["GET /user/migrations"],
listForOrg: ["GET /orgs/{org}/migrations"],
- listReposForAuthenticatedUser: ["GET /user/migrations/{migration_id}/repositories"],
+ listReposForAuthenticatedUser: [
+ "GET /user/migrations/{migration_id}/repositories"
+ ],
listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"],
- listReposForUser: ["GET /user/migrations/{migration_id}/repositories", {}, {
- renamed: ["migrations", "listReposForAuthenticatedUser"]
- }],
- mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"],
- setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"],
+ listReposForUser: [
+ "GET /user/migrations/{migration_id}/repositories",
+ {},
+ { renamed: ["migrations", "listReposForAuthenticatedUser"] }
+ ],
+ mapCommitAuthor: [
+ "PATCH /repos/{owner}/{repo}/import/authors/{author_id}",
+ {},
+ {
+ deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author"
+ }
+ ],
+ setLfsPreference: [
+ "PATCH /repos/{owner}/{repo}/import/lfs",
+ {},
+ {
+ deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference"
+ }
+ ],
startForAuthenticatedUser: ["POST /user/migrations"],
startForOrg: ["POST /orgs/{org}/migrations"],
- startImport: ["PUT /repos/{owner}/{repo}/import"],
- unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"],
- unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"],
- updateImport: ["PATCH /repos/{owner}/{repo}/import"]
+ startImport: [
+ "PUT /repos/{owner}/{repo}/import",
+ {},
+ {
+ deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import"
+ }
+ ],
+ unlockRepoForAuthenticatedUser: [
+ "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"
+ ],
+ unlockRepoForOrg: [
+ "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"
+ ],
+ updateImport: [
+ "PATCH /repos/{owner}/{repo}/import",
+ {},
+ {
+ deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import"
+ }
+ ]
},
orgs: {
+ addSecurityManagerTeam: [
+ "PUT /orgs/{org}/security-managers/teams/{team_slug}"
+ ],
blockUser: ["PUT /orgs/{org}/blocks/{username}"],
cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"],
checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"],
checkMembershipForUser: ["GET /orgs/{org}/members/{username}"],
checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"],
- convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"],
+ convertMemberToOutsideCollaborator: [
+ "PUT /orgs/{org}/outside_collaborators/{username}"
+ ],
createInvitation: ["POST /orgs/{org}/invitations"],
+ createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"],
+ createOrUpdateCustomPropertiesValuesForRepos: [
+ "PATCH /orgs/{org}/properties/values"
+ ],
+ createOrUpdateCustomProperty: [
+ "PUT /orgs/{org}/properties/schema/{custom_property_name}"
+ ],
createWebhook: ["POST /orgs/{org}/hooks"],
+ delete: ["DELETE /orgs/{org}"],
deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"],
+ enableOrDisableSecurityProductOnAllOrgRepos: [
+ "POST /orgs/{org}/{security_product}/{enablement}"
+ ],
get: ["GET /orgs/{org}"],
+ getAllCustomProperties: ["GET /orgs/{org}/properties/schema"],
+ getCustomProperty: [
+ "GET /orgs/{org}/properties/schema/{custom_property_name}"
+ ],
getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"],
getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"],
getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"],
getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"],
- getWebhookDelivery: ["GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"],
+ getWebhookDelivery: [
+ "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"
+ ],
list: ["GET /organizations"],
listAppInstallations: ["GET /orgs/{org}/installations"],
listBlockedUsers: ["GET /orgs/{org}/blocks"],
- listCustomRoles: ["GET /organizations/{organization_id}/custom_roles"],
+ listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"],
listFailedInvitations: ["GET /orgs/{org}/failed_invitations"],
listForAuthenticatedUser: ["GET /user/orgs"],
listForUser: ["GET /users/{username}/orgs"],
@@ -7965,55 +8756,148 @@ const Endpoints = {
listMembers: ["GET /orgs/{org}/members"],
listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"],
listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"],
+ listPatGrantRepositories: [
+ "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories"
+ ],
+ listPatGrantRequestRepositories: [
+ "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories"
+ ],
+ listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"],
+ listPatGrants: ["GET /orgs/{org}/personal-access-tokens"],
listPendingInvitations: ["GET /orgs/{org}/invitations"],
listPublicMembers: ["GET /orgs/{org}/public_members"],
+ listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"],
listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"],
listWebhooks: ["GET /orgs/{org}/hooks"],
pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"],
- redeliverWebhookDelivery: ["POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"],
+ redeliverWebhookDelivery: [
+ "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"
+ ],
+ removeCustomProperty: [
+ "DELETE /orgs/{org}/properties/schema/{custom_property_name}"
+ ],
removeMember: ["DELETE /orgs/{org}/members/{username}"],
removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"],
- removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"],
- removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"],
+ removeOutsideCollaborator: [
+ "DELETE /orgs/{org}/outside_collaborators/{username}"
+ ],
+ removePublicMembershipForAuthenticatedUser: [
+ "DELETE /orgs/{org}/public_members/{username}"
+ ],
+ removeSecurityManagerTeam: [
+ "DELETE /orgs/{org}/security-managers/teams/{team_slug}"
+ ],
+ reviewPatGrantRequest: [
+ "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}"
+ ],
+ reviewPatGrantRequestsInBulk: [
+ "POST /orgs/{org}/personal-access-token-requests"
+ ],
setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"],
- setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"],
+ setPublicMembershipForAuthenticatedUser: [
+ "PUT /orgs/{org}/public_members/{username}"
+ ],
unblockUser: ["DELETE /orgs/{org}/blocks/{username}"],
update: ["PATCH /orgs/{org}"],
- updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"],
+ updateMembershipForAuthenticatedUser: [
+ "PATCH /user/memberships/orgs/{org}"
+ ],
+ updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"],
+ updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"],
updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"],
updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"]
},
packages: {
- deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"],
- deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"],
- deletePackageForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}"],
- deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"],
- deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
- deletePackageVersionForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
- getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, {
- renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"]
- }],
- getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, {
- renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"]
- }],
- getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"],
- getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"],
- getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"],
- getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"],
- getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"],
- getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"],
- getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"],
- getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
- getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
+ deletePackageForAuthenticatedUser: [
+ "DELETE /user/packages/{package_type}/{package_name}"
+ ],
+ deletePackageForOrg: [
+ "DELETE /orgs/{org}/packages/{package_type}/{package_name}"
+ ],
+ deletePackageForUser: [
+ "DELETE /users/{username}/packages/{package_type}/{package_name}"
+ ],
+ deletePackageVersionForAuthenticatedUser: [
+ "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ deletePackageVersionForOrg: [
+ "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ deletePackageVersionForUser: [
+ "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ getAllPackageVersionsForAPackageOwnedByAnOrg: [
+ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions",
+ {},
+ { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] }
+ ],
+ getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [
+ "GET /user/packages/{package_type}/{package_name}/versions",
+ {},
+ {
+ renamed: [
+ "packages",
+ "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"
+ ]
+ }
+ ],
+ getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [
+ "GET /user/packages/{package_type}/{package_name}/versions"
+ ],
+ getAllPackageVersionsForPackageOwnedByOrg: [
+ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions"
+ ],
+ getAllPackageVersionsForPackageOwnedByUser: [
+ "GET /users/{username}/packages/{package_type}/{package_name}/versions"
+ ],
+ getPackageForAuthenticatedUser: [
+ "GET /user/packages/{package_type}/{package_name}"
+ ],
+ getPackageForOrganization: [
+ "GET /orgs/{org}/packages/{package_type}/{package_name}"
+ ],
+ getPackageForUser: [
+ "GET /users/{username}/packages/{package_type}/{package_name}"
+ ],
+ getPackageVersionForAuthenticatedUser: [
+ "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ getPackageVersionForOrganization: [
+ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ getPackageVersionForUser: [
+ "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ listDockerMigrationConflictingPackagesForAuthenticatedUser: [
+ "GET /user/docker/conflicts"
+ ],
+ listDockerMigrationConflictingPackagesForOrganization: [
+ "GET /orgs/{org}/docker/conflicts"
+ ],
+ listDockerMigrationConflictingPackagesForUser: [
+ "GET /users/{username}/docker/conflicts"
+ ],
listPackagesForAuthenticatedUser: ["GET /user/packages"],
listPackagesForOrganization: ["GET /orgs/{org}/packages"],
listPackagesForUser: ["GET /users/{username}/packages"],
- restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"],
- restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"],
- restorePackageForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"],
- restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"],
- restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"],
- restorePackageVersionForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"]
+ restorePackageForAuthenticatedUser: [
+ "POST /user/packages/{package_type}/{package_name}/restore{?token}"
+ ],
+ restorePackageForOrg: [
+ "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"
+ ],
+ restorePackageForUser: [
+ "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"
+ ],
+ restorePackageVersionForAuthenticatedUser: [
+ "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
+ ],
+ restorePackageVersionForOrg: [
+ "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
+ ],
+ restorePackageVersionForUser: [
+ "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
+ ]
},
projects: {
addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"],
@@ -8028,7 +8912,9 @@ const Endpoints = {
get: ["GET /projects/{project_id}"],
getCard: ["GET /projects/columns/cards/{card_id}"],
getColumn: ["GET /projects/columns/{column_id}"],
- getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission"],
+ getPermissionForUser: [
+ "GET /projects/{project_id}/collaborators/{username}/permission"
+ ],
listCards: ["GET /projects/columns/{column_id}/cards"],
listCollaborators: ["GET /projects/{project_id}/collaborators"],
listColumns: ["GET /projects/{project_id}/columns"],
@@ -8037,7 +8923,9 @@ const Endpoints = {
listForUser: ["GET /users/{username}/projects"],
moveCard: ["POST /projects/columns/cards/{card_id}/moves"],
moveColumn: ["POST /projects/columns/{column_id}/moves"],
- removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}"],
+ removeCollaborator: [
+ "DELETE /projects/{project_id}/collaborators/{username}"
+ ],
update: ["PATCH /projects/{project_id}"],
updateCard: ["PATCH /projects/columns/cards/{card_id}"],
updateColumn: ["PATCH /projects/columns/{column_id}"]
@@ -8045,191 +8933,411 @@ const Endpoints = {
pulls: {
checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
create: ["POST /repos/{owner}/{repo}/pulls"],
- createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"],
+ createReplyForReviewComment: [
+ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"
+ ],
createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
- createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"],
- deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
- deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
- dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"],
+ createReviewComment: [
+ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"
+ ],
+ deletePendingReview: [
+ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
+ ],
+ deleteReviewComment: [
+ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"
+ ],
+ dismissReview: [
+ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"
+ ],
get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"],
- getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
+ getReview: [
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
+ ],
getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
list: ["GET /repos/{owner}/{repo}/pulls"],
- listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"],
+ listCommentsForReview: [
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"
+ ],
listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"],
listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"],
- listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
- listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"],
+ listRequestedReviewers: [
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
+ ],
+ listReviewComments: [
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"
+ ],
listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"],
listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
- removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
- requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
- submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"],
+ removeRequestedReviewers: [
+ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
+ ],
+ requestReviewers: [
+ "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
+ ],
+ submitReview: [
+ "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"
+ ],
update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"],
- updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"],
- updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
- updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"]
- },
- rateLimit: {
- get: ["GET /rate_limit"]
+ updateBranch: [
+ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"
+ ],
+ updateReview: [
+ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
+ ],
+ updateReviewComment: [
+ "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"
+ ]
},
+ rateLimit: { get: ["GET /rate_limit"] },
reactions: {
- createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"],
- createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"],
- createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"],
- createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"],
- createForRelease: ["POST /repos/{owner}/{repo}/releases/{release_id}/reactions"],
- createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"],
- createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"],
- deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"],
- deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"],
- deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"],
- deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"],
- deleteForRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"],
- deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"],
- deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"],
- listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"],
+ createForCommitComment: [
+ "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"
+ ],
+ createForIssue: [
+ "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"
+ ],
+ createForIssueComment: [
+ "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"
+ ],
+ createForPullRequestReviewComment: [
+ "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"
+ ],
+ createForRelease: [
+ "POST /repos/{owner}/{repo}/releases/{release_id}/reactions"
+ ],
+ createForTeamDiscussionCommentInOrg: [
+ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"
+ ],
+ createForTeamDiscussionInOrg: [
+ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"
+ ],
+ deleteForCommitComment: [
+ "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"
+ ],
+ deleteForIssue: [
+ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"
+ ],
+ deleteForIssueComment: [
+ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"
+ ],
+ deleteForPullRequestComment: [
+ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"
+ ],
+ deleteForRelease: [
+ "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"
+ ],
+ deleteForTeamDiscussion: [
+ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"
+ ],
+ deleteForTeamDiscussionComment: [
+ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"
+ ],
+ listForCommitComment: [
+ "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"
+ ],
listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"],
- listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"],
- listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"],
- listForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"],
- listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"],
- listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"]
+ listForIssueComment: [
+ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"
+ ],
+ listForPullRequestReviewComment: [
+ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"
+ ],
+ listForRelease: [
+ "GET /repos/{owner}/{repo}/releases/{release_id}/reactions"
+ ],
+ listForTeamDiscussionCommentInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"
+ ],
+ listForTeamDiscussionInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"
+ ]
},
repos: {
- acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}", {}, {
- renamed: ["repos", "acceptInvitationForAuthenticatedUser"]
- }],
- acceptInvitationForAuthenticatedUser: ["PATCH /user/repository_invitations/{invitation_id}"],
- addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
- mapToData: "apps"
- }],
+ acceptInvitation: [
+ "PATCH /user/repository_invitations/{invitation_id}",
+ {},
+ { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] }
+ ],
+ acceptInvitationForAuthenticatedUser: [
+ "PATCH /user/repository_invitations/{invitation_id}"
+ ],
+ addAppAccessRestrictions: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps",
+ {},
+ { mapToData: "apps" }
+ ],
addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"],
- addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
- mapToData: "contexts"
- }],
- addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
- mapToData: "teams"
- }],
- addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
- mapToData: "users"
- }],
+ addStatusCheckContexts: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts",
+ {},
+ { mapToData: "contexts" }
+ ],
+ addTeamAccessRestrictions: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams",
+ {},
+ { mapToData: "teams" }
+ ],
+ addUserAccessRestrictions: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users",
+ {},
+ { mapToData: "users" }
+ ],
+ checkAutomatedSecurityFixes: [
+ "GET /repos/{owner}/{repo}/automated-security-fixes"
+ ],
checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"],
- checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts"],
+ checkVulnerabilityAlerts: [
+ "GET /repos/{owner}/{repo}/vulnerability-alerts"
+ ],
codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"],
compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"],
- compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"],
+ compareCommitsWithBasehead: [
+ "GET /repos/{owner}/{repo}/compare/{basehead}"
+ ],
createAutolink: ["POST /repos/{owner}/{repo}/autolinks"],
- createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"],
- createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"],
+ createCommitComment: [
+ "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"
+ ],
+ createCommitSignatureProtection: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
+ ],
createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"],
createDeployKey: ["POST /repos/{owner}/{repo}/keys"],
createDeployment: ["POST /repos/{owner}/{repo}/deployments"],
- createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"],
+ createDeploymentBranchPolicy: [
+ "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"
+ ],
+ createDeploymentProtectionRule: [
+ "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules"
+ ],
+ createDeploymentStatus: [
+ "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"
+ ],
createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"],
createForAuthenticatedUser: ["POST /user/repos"],
createFork: ["POST /repos/{owner}/{repo}/forks"],
createInOrg: ["POST /orgs/{org}/repos"],
- createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"],
+ createOrUpdateEnvironment: [
+ "PUT /repos/{owner}/{repo}/environments/{environment_name}"
+ ],
createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"],
+ createOrgRuleset: ["POST /orgs/{org}/rulesets"],
+ createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployment"],
createPagesSite: ["POST /repos/{owner}/{repo}/pages"],
createRelease: ["POST /repos/{owner}/{repo}/releases"],
+ createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"],
createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"],
- createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate"],
+ createUsingTemplate: [
+ "POST /repos/{template_owner}/{template_repo}/generate"
+ ],
createWebhook: ["POST /repos/{owner}/{repo}/hooks"],
- declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}", {}, {
- renamed: ["repos", "declineInvitationForAuthenticatedUser"]
- }],
- declineInvitationForAuthenticatedUser: ["DELETE /user/repository_invitations/{invitation_id}"],
+ declineInvitation: [
+ "DELETE /user/repository_invitations/{invitation_id}",
+ {},
+ { renamed: ["repos", "declineInvitationForAuthenticatedUser"] }
+ ],
+ declineInvitationForAuthenticatedUser: [
+ "DELETE /user/repository_invitations/{invitation_id}"
+ ],
delete: ["DELETE /repos/{owner}/{repo}"],
- deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"],
- deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
- deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"],
+ deleteAccessRestrictions: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"
+ ],
+ deleteAdminBranchProtection: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
+ ],
+ deleteAnEnvironment: [
+ "DELETE /repos/{owner}/{repo}/environments/{environment_name}"
+ ],
deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"],
- deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"],
+ deleteBranchProtection: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection"
+ ],
deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"],
- deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"],
+ deleteCommitSignatureProtection: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
+ ],
deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"],
- deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"],
+ deleteDeployment: [
+ "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"
+ ],
+ deleteDeploymentBranchPolicy: [
+ "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
+ ],
deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"],
- deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"],
+ deleteInvitation: [
+ "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"
+ ],
+ deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"],
deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"],
- deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
+ deletePullRequestReviewProtection: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
+ ],
deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"],
- deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"],
- deleteTagProtection: ["DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"],
+ deleteReleaseAsset: [
+ "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"
+ ],
+ deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"],
+ deleteTagProtection: [
+ "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"
+ ],
deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"],
- disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes"],
- disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"],
- disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts"],
- downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, {
- renamed: ["repos", "downloadZipballArchive"]
- }],
+ disableAutomatedSecurityFixes: [
+ "DELETE /repos/{owner}/{repo}/automated-security-fixes"
+ ],
+ disableDeploymentProtectionRule: [
+ "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}"
+ ],
+ disablePrivateVulnerabilityReporting: [
+ "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting"
+ ],
+ disableVulnerabilityAlerts: [
+ "DELETE /repos/{owner}/{repo}/vulnerability-alerts"
+ ],
+ downloadArchive: [
+ "GET /repos/{owner}/{repo}/zipball/{ref}",
+ {},
+ { renamed: ["repos", "downloadZipballArchive"] }
+ ],
downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"],
downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"],
- enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes"],
- enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"],
- enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts"],
- generateReleaseNotes: ["POST /repos/{owner}/{repo}/releases/generate-notes"],
+ enableAutomatedSecurityFixes: [
+ "PUT /repos/{owner}/{repo}/automated-security-fixes"
+ ],
+ enablePrivateVulnerabilityReporting: [
+ "PUT /repos/{owner}/{repo}/private-vulnerability-reporting"
+ ],
+ enableVulnerabilityAlerts: [
+ "PUT /repos/{owner}/{repo}/vulnerability-alerts"
+ ],
+ generateReleaseNotes: [
+ "POST /repos/{owner}/{repo}/releases/generate-notes"
+ ],
get: ["GET /repos/{owner}/{repo}"],
- getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"],
- getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
+ getAccessRestrictions: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"
+ ],
+ getAdminBranchProtection: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
+ ],
+ getAllDeploymentProtectionRules: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules"
+ ],
getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"],
- getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"],
+ getAllStatusCheckContexts: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"
+ ],
getAllTopics: ["GET /repos/{owner}/{repo}/topics"],
- getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"],
+ getAppsWithAccessToProtectedBranch: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"
+ ],
getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"],
getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"],
- getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"],
+ getBranchProtection: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection"
+ ],
+ getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"],
getClones: ["GET /repos/{owner}/{repo}/traffic/clones"],
getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"],
- getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"],
+ getCollaboratorPermissionLevel: [
+ "GET /repos/{owner}/{repo}/collaborators/{username}/permission"
+ ],
getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"],
getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"],
getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"],
getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"],
- getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"],
+ getCommitSignatureProtection: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
+ ],
getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"],
getContent: ["GET /repos/{owner}/{repo}/contents/{path}"],
getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"],
+ getCustomDeploymentProtectionRule: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}"
+ ],
+ getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"],
getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"],
getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"],
- getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"],
- getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"],
+ getDeploymentBranchPolicy: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
+ ],
+ getDeploymentStatus: [
+ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"
+ ],
+ getEnvironment: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}"
+ ],
getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"],
getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"],
+ getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"],
+ getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"],
+ getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"],
+ getOrgRulesets: ["GET /orgs/{org}/rulesets"],
getPages: ["GET /repos/{owner}/{repo}/pages"],
getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"],
getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"],
getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"],
- getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
+ getPullRequestReviewProtection: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
+ ],
getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"],
getReadme: ["GET /repos/{owner}/{repo}/readme"],
getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"],
getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"],
getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"],
getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"],
- getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
- getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"],
+ getRepoRuleSuite: [
+ "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}"
+ ],
+ getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"],
+ getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"],
+ getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"],
+ getStatusChecksProtection: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
+ ],
+ getTeamsWithAccessToProtectedBranch: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"
+ ],
getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"],
getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"],
- getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"],
+ getUsersWithAccessToProtectedBranch: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"
+ ],
getViews: ["GET /repos/{owner}/{repo}/traffic/views"],
getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"],
- getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"],
- getWebhookDelivery: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"],
+ getWebhookConfigForRepo: [
+ "GET /repos/{owner}/{repo}/hooks/{hook_id}/config"
+ ],
+ getWebhookDelivery: [
+ "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"
+ ],
+ listActivities: ["GET /repos/{owner}/{repo}/activity"],
listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"],
listBranches: ["GET /repos/{owner}/{repo}/branches"],
- listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"],
+ listBranchesForHeadCommit: [
+ "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"
+ ],
listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"],
- listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"],
+ listCommentsForCommit: [
+ "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"
+ ],
listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"],
- listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"],
+ listCommitStatusesForRef: [
+ "GET /repos/{owner}/{repo}/commits/{ref}/statuses"
+ ],
listCommits: ["GET /repos/{owner}/{repo}/commits"],
listContributors: ["GET /repos/{owner}/{repo}/contributors"],
+ listCustomDeploymentRuleIntegrations: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps"
+ ],
listDeployKeys: ["GET /repos/{owner}/{repo}/keys"],
- listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"],
+ listDeploymentBranchPolicies: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"
+ ],
+ listDeploymentStatuses: [
+ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"
+ ],
listDeployments: ["GET /repos/{owner}/{repo}/deployments"],
listForAuthenticatedUser: ["GET /user/repos"],
listForOrg: ["GET /orgs/{org}/repos"],
@@ -8240,67 +9348,117 @@ const Endpoints = {
listLanguages: ["GET /repos/{owner}/{repo}/languages"],
listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"],
listPublic: ["GET /repositories"],
- listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"],
- listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"],
+ listPullRequestsAssociatedWithCommit: [
+ "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"
+ ],
+ listReleaseAssets: [
+ "GET /repos/{owner}/{repo}/releases/{release_id}/assets"
+ ],
listReleases: ["GET /repos/{owner}/{repo}/releases"],
listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"],
listTags: ["GET /repos/{owner}/{repo}/tags"],
listTeams: ["GET /repos/{owner}/{repo}/teams"],
- listWebhookDeliveries: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"],
+ listWebhookDeliveries: [
+ "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"
+ ],
listWebhooks: ["GET /repos/{owner}/{repo}/hooks"],
merge: ["POST /repos/{owner}/{repo}/merges"],
mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"],
pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"],
- redeliverWebhookDelivery: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"],
- removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
- mapToData: "apps"
- }],
- removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"],
- removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
- mapToData: "contexts"
- }],
- removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
- removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
- mapToData: "teams"
- }],
- removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
- mapToData: "users"
- }],
+ redeliverWebhookDelivery: [
+ "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"
+ ],
+ removeAppAccessRestrictions: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps",
+ {},
+ { mapToData: "apps" }
+ ],
+ removeCollaborator: [
+ "DELETE /repos/{owner}/{repo}/collaborators/{username}"
+ ],
+ removeStatusCheckContexts: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts",
+ {},
+ { mapToData: "contexts" }
+ ],
+ removeStatusCheckProtection: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
+ ],
+ removeTeamAccessRestrictions: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams",
+ {},
+ { mapToData: "teams" }
+ ],
+ removeUserAccessRestrictions: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users",
+ {},
+ { mapToData: "users" }
+ ],
renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"],
replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"],
requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"],
- setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
- setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
- mapToData: "apps"
- }],
- setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
- mapToData: "contexts"
- }],
- setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
- mapToData: "teams"
- }],
- setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
- mapToData: "users"
- }],
+ setAdminBranchProtection: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
+ ],
+ setAppAccessRestrictions: [
+ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps",
+ {},
+ { mapToData: "apps" }
+ ],
+ setStatusCheckContexts: [
+ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts",
+ {},
+ { mapToData: "contexts" }
+ ],
+ setTeamAccessRestrictions: [
+ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams",
+ {},
+ { mapToData: "teams" }
+ ],
+ setUserAccessRestrictions: [
+ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users",
+ {},
+ { mapToData: "users" }
+ ],
testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"],
transfer: ["POST /repos/{owner}/{repo}/transfer"],
update: ["PATCH /repos/{owner}/{repo}"],
- updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"],
+ updateBranchProtection: [
+ "PUT /repos/{owner}/{repo}/branches/{branch}/protection"
+ ],
updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"],
+ updateDeploymentBranchPolicy: [
+ "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
+ ],
updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"],
- updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"],
- updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
+ updateInvitation: [
+ "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"
+ ],
+ updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"],
+ updatePullRequestReviewProtection: [
+ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
+ ],
updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"],
- updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"],
- updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, {
- renamed: ["repos", "updateStatusCheckProtection"]
- }],
- updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
+ updateReleaseAsset: [
+ "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"
+ ],
+ updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"],
+ updateStatusCheckPotection: [
+ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks",
+ {},
+ { renamed: ["repos", "updateStatusCheckProtection"] }
+ ],
+ updateStatusCheckProtection: [
+ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
+ ],
updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"],
- updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"],
- uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", {
- baseUrl: "https://uploads.github.com"
- }]
+ updateWebhookConfigForRepo: [
+ "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"
+ ],
+ uploadReleaseAsset: [
+ "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}",
+ { baseUrl: "https://uploads.github.com" }
+ ]
},
search: {
code: ["GET /search/code"],
@@ -8312,390 +9470,596 @@ const Endpoints = {
users: ["GET /search/users"]
},
secretScanning: {
- getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"],
- listAlertsForEnterprise: ["GET /enterprises/{enterprise}/secret-scanning/alerts"],
+ getAlert: [
+ "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"
+ ],
+ listAlertsForEnterprise: [
+ "GET /enterprises/{enterprise}/secret-scanning/alerts"
+ ],
listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"],
listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"],
- listLocationsForAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"],
- updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"]
+ listLocationsForAlert: [
+ "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"
+ ],
+ updateAlert: [
+ "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"
+ ]
+ },
+ securityAdvisories: {
+ createPrivateVulnerabilityReport: [
+ "POST /repos/{owner}/{repo}/security-advisories/reports"
+ ],
+ createRepositoryAdvisory: [
+ "POST /repos/{owner}/{repo}/security-advisories"
+ ],
+ createRepositoryAdvisoryCveRequest: [
+ "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve"
+ ],
+ getGlobalAdvisory: ["GET /advisories/{ghsa_id}"],
+ getRepositoryAdvisory: [
+ "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}"
+ ],
+ listGlobalAdvisories: ["GET /advisories"],
+ listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"],
+ listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"],
+ updateRepositoryAdvisory: [
+ "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}"
+ ]
},
teams: {
- addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"],
- addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
- addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
- checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
- checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
+ addOrUpdateMembershipForUserInOrg: [
+ "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"
+ ],
+ addOrUpdateProjectPermissionsInOrg: [
+ "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"
+ ],
+ addOrUpdateRepoPermissionsInOrg: [
+ "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
+ ],
+ checkPermissionsForProjectInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"
+ ],
+ checkPermissionsForRepoInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
+ ],
create: ["POST /orgs/{org}/teams"],
- createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"],
+ createDiscussionCommentInOrg: [
+ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"
+ ],
createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"],
- deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
- deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
+ deleteDiscussionCommentInOrg: [
+ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
+ ],
+ deleteDiscussionInOrg: [
+ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
+ ],
deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"],
getByName: ["GET /orgs/{org}/teams/{team_slug}"],
- getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
- getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
- getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"],
+ getDiscussionCommentInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
+ ],
+ getDiscussionInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
+ ],
+ getMembershipForUserInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/memberships/{username}"
+ ],
list: ["GET /orgs/{org}/teams"],
listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"],
- listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"],
+ listDiscussionCommentsInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"
+ ],
listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"],
listForAuthenticatedUser: ["GET /user/teams"],
listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"],
- listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"],
+ listPendingInvitationsInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/invitations"
+ ],
listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"],
listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"],
- removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"],
- removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
- removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
- updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
- updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
+ removeMembershipForUserInOrg: [
+ "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"
+ ],
+ removeProjectInOrg: [
+ "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"
+ ],
+ removeRepoInOrg: [
+ "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
+ ],
+ updateDiscussionCommentInOrg: [
+ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
+ ],
+ updateDiscussionInOrg: [
+ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
+ ],
updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"]
},
users: {
- addEmailForAuthenticated: ["POST /user/emails", {}, {
- renamed: ["users", "addEmailForAuthenticatedUser"]
- }],
+ addEmailForAuthenticated: [
+ "POST /user/emails",
+ {},
+ { renamed: ["users", "addEmailForAuthenticatedUser"] }
+ ],
addEmailForAuthenticatedUser: ["POST /user/emails"],
+ addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"],
block: ["PUT /user/blocks/{username}"],
checkBlocked: ["GET /user/blocks/{username}"],
checkFollowingForUser: ["GET /users/{username}/following/{target_user}"],
checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"],
- createGpgKeyForAuthenticated: ["POST /user/gpg_keys", {}, {
- renamed: ["users", "createGpgKeyForAuthenticatedUser"]
- }],
+ createGpgKeyForAuthenticated: [
+ "POST /user/gpg_keys",
+ {},
+ { renamed: ["users", "createGpgKeyForAuthenticatedUser"] }
+ ],
createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"],
- createPublicSshKeyForAuthenticated: ["POST /user/keys", {}, {
- renamed: ["users", "createPublicSshKeyForAuthenticatedUser"]
- }],
+ createPublicSshKeyForAuthenticated: [
+ "POST /user/keys",
+ {},
+ { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] }
+ ],
createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"],
- deleteEmailForAuthenticated: ["DELETE /user/emails", {}, {
- renamed: ["users", "deleteEmailForAuthenticatedUser"]
- }],
+ createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"],
+ deleteEmailForAuthenticated: [
+ "DELETE /user/emails",
+ {},
+ { renamed: ["users", "deleteEmailForAuthenticatedUser"] }
+ ],
deleteEmailForAuthenticatedUser: ["DELETE /user/emails"],
- deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, {
- renamed: ["users", "deleteGpgKeyForAuthenticatedUser"]
- }],
+ deleteGpgKeyForAuthenticated: [
+ "DELETE /user/gpg_keys/{gpg_key_id}",
+ {},
+ { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] }
+ ],
deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"],
- deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}", {}, {
- renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"]
- }],
+ deletePublicSshKeyForAuthenticated: [
+ "DELETE /user/keys/{key_id}",
+ {},
+ { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] }
+ ],
deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"],
+ deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"],
+ deleteSshSigningKeyForAuthenticatedUser: [
+ "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}"
+ ],
follow: ["PUT /user/following/{username}"],
getAuthenticated: ["GET /user"],
getByUsername: ["GET /users/{username}"],
getContextForUser: ["GET /users/{username}/hovercard"],
- getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}", {}, {
- renamed: ["users", "getGpgKeyForAuthenticatedUser"]
- }],
+ getGpgKeyForAuthenticated: [
+ "GET /user/gpg_keys/{gpg_key_id}",
+ {},
+ { renamed: ["users", "getGpgKeyForAuthenticatedUser"] }
+ ],
getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"],
- getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}", {}, {
- renamed: ["users", "getPublicSshKeyForAuthenticatedUser"]
- }],
+ getPublicSshKeyForAuthenticated: [
+ "GET /user/keys/{key_id}",
+ {},
+ { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] }
+ ],
getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"],
+ getSshSigningKeyForAuthenticatedUser: [
+ "GET /user/ssh_signing_keys/{ssh_signing_key_id}"
+ ],
list: ["GET /users"],
- listBlockedByAuthenticated: ["GET /user/blocks", {}, {
- renamed: ["users", "listBlockedByAuthenticatedUser"]
- }],
+ listBlockedByAuthenticated: [
+ "GET /user/blocks",
+ {},
+ { renamed: ["users", "listBlockedByAuthenticatedUser"] }
+ ],
listBlockedByAuthenticatedUser: ["GET /user/blocks"],
- listEmailsForAuthenticated: ["GET /user/emails", {}, {
- renamed: ["users", "listEmailsForAuthenticatedUser"]
- }],
+ listEmailsForAuthenticated: [
+ "GET /user/emails",
+ {},
+ { renamed: ["users", "listEmailsForAuthenticatedUser"] }
+ ],
listEmailsForAuthenticatedUser: ["GET /user/emails"],
- listFollowedByAuthenticated: ["GET /user/following", {}, {
- renamed: ["users", "listFollowedByAuthenticatedUser"]
- }],
+ listFollowedByAuthenticated: [
+ "GET /user/following",
+ {},
+ { renamed: ["users", "listFollowedByAuthenticatedUser"] }
+ ],
listFollowedByAuthenticatedUser: ["GET /user/following"],
listFollowersForAuthenticatedUser: ["GET /user/followers"],
listFollowersForUser: ["GET /users/{username}/followers"],
listFollowingForUser: ["GET /users/{username}/following"],
- listGpgKeysForAuthenticated: ["GET /user/gpg_keys", {}, {
- renamed: ["users", "listGpgKeysForAuthenticatedUser"]
- }],
+ listGpgKeysForAuthenticated: [
+ "GET /user/gpg_keys",
+ {},
+ { renamed: ["users", "listGpgKeysForAuthenticatedUser"] }
+ ],
listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"],
listGpgKeysForUser: ["GET /users/{username}/gpg_keys"],
- listPublicEmailsForAuthenticated: ["GET /user/public_emails", {}, {
- renamed: ["users", "listPublicEmailsForAuthenticatedUser"]
- }],
+ listPublicEmailsForAuthenticated: [
+ "GET /user/public_emails",
+ {},
+ { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] }
+ ],
listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"],
listPublicKeysForUser: ["GET /users/{username}/keys"],
- listPublicSshKeysForAuthenticated: ["GET /user/keys", {}, {
- renamed: ["users", "listPublicSshKeysForAuthenticatedUser"]
- }],
+ listPublicSshKeysForAuthenticated: [
+ "GET /user/keys",
+ {},
+ { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] }
+ ],
listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"],
- setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility", {}, {
- renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"]
- }],
- setPrimaryEmailVisibilityForAuthenticatedUser: ["PATCH /user/email/visibility"],
+ listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"],
+ listSocialAccountsForUser: ["GET /users/{username}/social_accounts"],
+ listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"],
+ listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"],
+ setPrimaryEmailVisibilityForAuthenticated: [
+ "PATCH /user/email/visibility",
+ {},
+ { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] }
+ ],
+ setPrimaryEmailVisibilityForAuthenticatedUser: [
+ "PATCH /user/email/visibility"
+ ],
unblock: ["DELETE /user/blocks/{username}"],
unfollow: ["DELETE /user/following/{username}"],
updateAuthenticated: ["PATCH /user"]
}
};
+var endpoints_default = Endpoints;
-const VERSION = "5.16.2";
-
-function endpointsToMethods(octokit, endpointsMap) {
- const newMethods = {};
-
- for (const [scope, endpoints] of Object.entries(endpointsMap)) {
- for (const [methodName, endpoint] of Object.entries(endpoints)) {
- const [route, defaults, decorations] = endpoint;
- const [method, url] = route.split(/ /);
- const endpointDefaults = Object.assign({
+// pkg/dist-src/endpoints-to-methods.js
+var endpointMethodsMap = /* @__PURE__ */ new Map();
+for (const [scope, endpoints] of Object.entries(endpoints_default)) {
+ for (const [methodName, endpoint] of Object.entries(endpoints)) {
+ const [route, defaults, decorations] = endpoint;
+ const [method, url] = route.split(/ /);
+ const endpointDefaults = Object.assign(
+ {
method,
url
- }, defaults);
-
- if (!newMethods[scope]) {
- newMethods[scope] = {};
- }
-
- const scopeMethods = newMethods[scope];
-
- if (decorations) {
- scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);
- continue;
- }
-
- scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);
+ },
+ defaults
+ );
+ if (!endpointMethodsMap.has(scope)) {
+ endpointMethodsMap.set(scope, /* @__PURE__ */ new Map());
}
+ endpointMethodsMap.get(scope).set(methodName, {
+ scope,
+ methodName,
+ endpointDefaults,
+ decorations
+ });
+ }
+}
+var handler = {
+ has({ scope }, methodName) {
+ return endpointMethodsMap.get(scope).has(methodName);
+ },
+ getOwnPropertyDescriptor(target, methodName) {
+ return {
+ value: this.get(target, methodName),
+ // ensures method is in the cache
+ configurable: true,
+ writable: true,
+ enumerable: true
+ };
+ },
+ defineProperty(target, methodName, descriptor) {
+ Object.defineProperty(target.cache, methodName, descriptor);
+ return true;
+ },
+ deleteProperty(target, methodName) {
+ delete target.cache[methodName];
+ return true;
+ },
+ ownKeys({ scope }) {
+ return [...endpointMethodsMap.get(scope).keys()];
+ },
+ set(target, methodName, value) {
+ return target.cache[methodName] = value;
+ },
+ get({ octokit, scope, cache }, methodName) {
+ if (cache[methodName]) {
+ return cache[methodName];
+ }
+ const method = endpointMethodsMap.get(scope).get(methodName);
+ if (!method) {
+ return void 0;
+ }
+ const { endpointDefaults, decorations } = method;
+ if (decorations) {
+ cache[methodName] = decorate(
+ octokit,
+ scope,
+ methodName,
+ endpointDefaults,
+ decorations
+ );
+ } else {
+ cache[methodName] = octokit.request.defaults(endpointDefaults);
+ }
+ return cache[methodName];
+ }
+};
+function endpointsToMethods(octokit) {
+ const newMethods = {};
+ for (const scope of endpointMethodsMap.keys()) {
+ newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler);
}
-
return newMethods;
}
-
function decorate(octokit, scope, methodName, defaults, decorations) {
const requestWithDefaults = octokit.request.defaults(defaults);
- /* istanbul ignore next */
-
function withDecorations(...args) {
- // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
- let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`
-
+ let options = requestWithDefaults.endpoint.merge(...args);
if (decorations.mapToData) {
options = Object.assign({}, options, {
data: options[decorations.mapToData],
- [decorations.mapToData]: undefined
+ [decorations.mapToData]: void 0
});
return requestWithDefaults(options);
}
-
if (decorations.renamed) {
const [newScope, newMethodName] = decorations.renamed;
- octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);
+ octokit.log.warn(
+ `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`
+ );
}
-
if (decorations.deprecated) {
octokit.log.warn(decorations.deprecated);
}
-
if (decorations.renamedParameters) {
- // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
- const options = requestWithDefaults.endpoint.merge(...args);
-
- for (const [name, alias] of Object.entries(decorations.renamedParameters)) {
- if (name in options) {
- octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`);
-
- if (!(alias in options)) {
- options[alias] = options[name];
+ const options2 = requestWithDefaults.endpoint.merge(...args);
+ for (const [name, alias] of Object.entries(
+ decorations.renamedParameters
+ )) {
+ if (name in options2) {
+ octokit.log.warn(
+ `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`
+ );
+ if (!(alias in options2)) {
+ options2[alias] = options2[name];
}
-
- delete options[name];
+ delete options2[name];
}
}
-
- return requestWithDefaults(options);
- } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
-
-
+ return requestWithDefaults(options2);
+ }
return requestWithDefaults(...args);
}
-
return Object.assign(withDecorations, requestWithDefaults);
}
+// pkg/dist-src/index.js
function restEndpointMethods(octokit) {
- const api = endpointsToMethods(octokit, Endpoints);
+ const api = endpointsToMethods(octokit);
return {
rest: api
};
}
restEndpointMethods.VERSION = VERSION;
function legacyRestEndpointMethods(octokit) {
- const api = endpointsToMethods(octokit, Endpoints);
- return _objectSpread2(_objectSpread2({}, api), {}, {
+ const api = endpointsToMethods(octokit);
+ return {
+ ...api,
rest: api
- });
+ };
}
legacyRestEndpointMethods.VERSION = VERSION;
-
-exports.legacyRestEndpointMethods = legacyRestEndpointMethods;
-exports.restEndpointMethods = restEndpointMethods;
-//# sourceMappingURL=index.js.map
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
/***/ }),
/***/ 537:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
+var __create = Object.create;
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __getProtoOf = Object.getPrototypeOf;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
+ // If the importer is in node compatibility mode or this is not an ESM
+ // file that has been converted to a CommonJS file using a Babel-
+ // compatible transform (i.e. "__esModule" has not been set), then set
+ // "default" to the CommonJS "module.exports" for node compatibility.
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
+ mod
+));
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-
-function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
-
-var deprecation = __nccwpck_require__(8932);
-var once = _interopDefault(__nccwpck_require__(1223));
-
-const logOnceCode = once(deprecation => console.warn(deprecation));
-const logOnceHeaders = once(deprecation => console.warn(deprecation));
-/**
- * Error with extra properties to help with debugging
- */
-
-class RequestError extends Error {
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ RequestError: () => RequestError
+});
+module.exports = __toCommonJS(dist_src_exports);
+var import_deprecation = __nccwpck_require__(8932);
+var import_once = __toESM(__nccwpck_require__(1223));
+var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
+var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
+var RequestError = class extends Error {
constructor(message, statusCode, options) {
- super(message); // Maintains proper stack trace (only available on V8)
-
- /* istanbul ignore next */
-
+ super(message);
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
}
-
this.name = "HttpError";
this.status = statusCode;
let headers;
-
if ("headers" in options && typeof options.headers !== "undefined") {
headers = options.headers;
}
-
if ("response" in options) {
this.response = options.response;
headers = options.response.headers;
- } // redact request credentials without mutating original request options
-
-
+ }
const requestCopy = Object.assign({}, options.request);
-
if (options.request.headers.authorization) {
requestCopy.headers = Object.assign({}, options.request.headers, {
- authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
+ authorization: options.request.headers.authorization.replace(
+ / .*$/,
+ " [REDACTED]"
+ )
});
}
-
- requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
- // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
- .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
- // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
- .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
- this.request = requestCopy; // deprecations
-
+ requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
+ this.request = requestCopy;
Object.defineProperty(this, "code", {
get() {
- logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
+ logOnceCode(
+ new import_deprecation.Deprecation(
+ "[@octokit/request-error] `error.code` is deprecated, use `error.status`."
+ )
+ );
return statusCode;
}
-
});
Object.defineProperty(this, "headers", {
get() {
- logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`."));
+ logOnceHeaders(
+ new import_deprecation.Deprecation(
+ "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`."
+ )
+ );
return headers || {};
}
-
});
}
-
-}
-
-exports.RequestError = RequestError;
-//# sourceMappingURL=index.js.map
+};
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
/***/ }),
/***/ 6234:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
-Object.defineProperty(exports, "__esModule", ({ value: true }));
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ request: () => request
+});
+module.exports = __toCommonJS(dist_src_exports);
+var import_endpoint = __nccwpck_require__(9440);
+var import_universal_user_agent = __nccwpck_require__(5030);
-function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+// pkg/dist-src/version.js
+var VERSION = "8.1.6";
-var endpoint = __nccwpck_require__(9440);
-var universalUserAgent = __nccwpck_require__(5030);
-var isPlainObject = __nccwpck_require__(3287);
-var nodeFetch = _interopDefault(__nccwpck_require__(467));
-var requestError = __nccwpck_require__(537);
+// pkg/dist-src/is-plain-object.js
+function isPlainObject(value) {
+ if (typeof value !== "object" || value === null)
+ return false;
+ if (Object.prototype.toString.call(value) !== "[object Object]")
+ return false;
+ const proto = Object.getPrototypeOf(value);
+ if (proto === null)
+ return true;
+ const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
+ return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
+}
-const VERSION = "5.6.3";
+// pkg/dist-src/fetch-wrapper.js
+var import_request_error = __nccwpck_require__(537);
+// pkg/dist-src/get-buffer-response.js
function getBufferResponse(response) {
return response.arrayBuffer();
}
+// pkg/dist-src/fetch-wrapper.js
function fetchWrapper(requestOptions) {
+ var _a, _b, _c;
const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;
-
- if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
+ const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false;
+ if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
requestOptions.body = JSON.stringify(requestOptions.body);
}
-
let headers = {};
let status;
let url;
- const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
- return fetch(requestOptions.url, Object.assign({
+ let { fetch } = globalThis;
+ if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) {
+ fetch = requestOptions.request.fetch;
+ }
+ if (!fetch) {
+ throw new Error(
+ "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing"
+ );
+ }
+ return fetch(requestOptions.url, {
method: requestOptions.method,
body: requestOptions.body,
headers: requestOptions.headers,
- redirect: requestOptions.redirect
- }, // `requestOptions.request.agent` type is incompatible
- // see https://github.com/octokit/types.ts/pull/264
- requestOptions.request)).then(async response => {
+ signal: (_c = requestOptions.request) == null ? void 0 : _c.signal,
+ // duplex must be set if request.body is ReadableStream or Async Iterables.
+ // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex.
+ ...requestOptions.body && { duplex: "half" }
+ }).then(async (response) => {
url = response.url;
status = response.status;
-
for (const keyAndValue of response.headers) {
headers[keyAndValue[0]] = keyAndValue[1];
}
-
if ("deprecation" in headers) {
const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/);
const deprecationLink = matches && matches.pop();
- log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`);
+ log.warn(
+ `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`
+ );
}
-
if (status === 204 || status === 205) {
return;
- } // GitHub API returns 200 for HEAD requests
-
-
+ }
if (requestOptions.method === "HEAD") {
if (status < 400) {
return;
}
-
- throw new requestError.RequestError(response.statusText, status, {
+ throw new import_request_error.RequestError(response.statusText, status, {
response: {
url,
status,
headers,
- data: undefined
+ data: void 0
},
request: requestOptions
});
}
-
if (status === 304) {
- throw new requestError.RequestError("Not modified", status, {
+ throw new import_request_error.RequestError("Not modified", status, {
response: {
url,
status,
@@ -8705,10 +10069,9 @@ function fetchWrapper(requestOptions) {
request: requestOptions
});
}
-
if (status >= 400) {
const data = await getResponseData(response);
- const error = new requestError.RequestError(toErrorMessage(data), status, {
+ const error = new import_request_error.RequestError(toErrorMessage(data), status, {
response: {
url,
status,
@@ -8719,87 +10082,87 @@ function fetchWrapper(requestOptions) {
});
throw error;
}
-
- return getResponseData(response);
- }).then(data => {
+ return parseSuccessResponseBody ? await getResponseData(response) : response.body;
+ }).then((data) => {
return {
status,
url,
headers,
data
};
- }).catch(error => {
- if (error instanceof requestError.RequestError) throw error;
- throw new requestError.RequestError(error.message, 500, {
+ }).catch((error) => {
+ if (error instanceof import_request_error.RequestError)
+ throw error;
+ else if (error.name === "AbortError")
+ throw error;
+ let message = error.message;
+ if (error.name === "TypeError" && "cause" in error) {
+ if (error.cause instanceof Error) {
+ message = error.cause.message;
+ } else if (typeof error.cause === "string") {
+ message = error.cause;
+ }
+ }
+ throw new import_request_error.RequestError(message, 500, {
request: requestOptions
});
});
}
-
async function getResponseData(response) {
const contentType = response.headers.get("content-type");
-
if (/application\/json/.test(contentType)) {
- return response.json();
+ return response.json().catch(() => response.text()).catch(() => "");
}
-
if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
return response.text();
}
-
return getBufferResponse(response);
}
-
function toErrorMessage(data) {
- if (typeof data === "string") return data; // istanbul ignore else - just in case
-
+ if (typeof data === "string")
+ return data;
if ("message" in data) {
if (Array.isArray(data.errors)) {
return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`;
}
-
return data.message;
- } // istanbul ignore next - just in case
-
-
+ }
return `Unknown error: ${JSON.stringify(data)}`;
}
+// pkg/dist-src/with-defaults.js
function withDefaults(oldEndpoint, newDefaults) {
- const endpoint = oldEndpoint.defaults(newDefaults);
-
- const newApi = function (route, parameters) {
- const endpointOptions = endpoint.merge(route, parameters);
-
+ const endpoint2 = oldEndpoint.defaults(newDefaults);
+ const newApi = function(route, parameters) {
+ const endpointOptions = endpoint2.merge(route, parameters);
if (!endpointOptions.request || !endpointOptions.request.hook) {
- return fetchWrapper(endpoint.parse(endpointOptions));
+ return fetchWrapper(endpoint2.parse(endpointOptions));
}
-
- const request = (route, parameters) => {
- return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
+ const request2 = (route2, parameters2) => {
+ return fetchWrapper(
+ endpoint2.parse(endpoint2.merge(route2, parameters2))
+ );
};
-
- Object.assign(request, {
- endpoint,
- defaults: withDefaults.bind(null, endpoint)
+ Object.assign(request2, {
+ endpoint: endpoint2,
+ defaults: withDefaults.bind(null, endpoint2)
});
- return endpointOptions.request.hook(request, endpointOptions);
+ return endpointOptions.request.hook(request2, endpointOptions);
};
-
return Object.assign(newApi, {
- endpoint,
- defaults: withDefaults.bind(null, endpoint)
+ endpoint: endpoint2,
+ defaults: withDefaults.bind(null, endpoint2)
});
}
-const request = withDefaults(endpoint.endpoint, {
+// pkg/dist-src/index.js
+var request = withDefaults(import_endpoint.endpoint, {
headers: {
- "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
+ "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`
}
});
-
-exports.request = request;
-//# sourceMappingURL=index.js.map
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
/***/ }),
@@ -10522,12 +11885,12 @@ module.exports = function () {
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
exports.EntryHeader = __nccwpck_require__(9032);
-exports.MainHeader = __nccwpck_require__(4408);
+exports.MainHeader = __nccwpck_require__(8952);
/***/ }),
-/***/ 4408:
+/***/ 8952:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
var Utils = __nccwpck_require__(5182),
@@ -13331,7 +14694,7 @@ exports.flatten = (...args) => {
/***/ }),
-/***/ 2286:
+/***/ 8367:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
@@ -13773,6 +15136,134 @@ module.exports = CacheableLookup;
module.exports["default"] = CacheableLookup;
+/***/ }),
+
+/***/ 4340:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+const {PassThrough: PassThroughStream} = __nccwpck_require__(2781);
+
+module.exports = options => {
+ options = {...options};
+
+ const {array} = options;
+ let {encoding} = options;
+ const isBuffer = encoding === 'buffer';
+ let objectMode = false;
+
+ if (array) {
+ objectMode = !(encoding || isBuffer);
+ } else {
+ encoding = encoding || 'utf8';
+ }
+
+ if (isBuffer) {
+ encoding = null;
+ }
+
+ const stream = new PassThroughStream({objectMode});
+
+ if (encoding) {
+ stream.setEncoding(encoding);
+ }
+
+ let length = 0;
+ const chunks = [];
+
+ stream.on('data', chunk => {
+ chunks.push(chunk);
+
+ if (objectMode) {
+ length = chunks.length;
+ } else {
+ length += chunk.length;
+ }
+ });
+
+ stream.getBufferedValue = () => {
+ if (array) {
+ return chunks;
+ }
+
+ return isBuffer ? Buffer.concat(chunks, length) : chunks.join('');
+ };
+
+ stream.getBufferedLength = () => length;
+
+ return stream;
+};
+
+
+/***/ }),
+
+/***/ 7040:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+const {constants: BufferConstants} = __nccwpck_require__(4300);
+const pump = __nccwpck_require__(8341);
+const bufferStream = __nccwpck_require__(4340);
+
+class MaxBufferError extends Error {
+ constructor() {
+ super('maxBuffer exceeded');
+ this.name = 'MaxBufferError';
+ }
+}
+
+async function getStream(inputStream, options) {
+ if (!inputStream) {
+ return Promise.reject(new Error('Expected a stream'));
+ }
+
+ options = {
+ maxBuffer: Infinity,
+ ...options
+ };
+
+ const {maxBuffer} = options;
+
+ let stream;
+ await new Promise((resolve, reject) => {
+ const rejectPromise = error => {
+ // Don't retrieve an oversized buffer.
+ if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) {
+ error.bufferedData = stream.getBufferedValue();
+ }
+
+ reject(error);
+ };
+
+ stream = pump(inputStream, bufferStream(options), error => {
+ if (error) {
+ rejectPromise(error);
+ return;
+ }
+
+ resolve();
+ });
+
+ stream.on('data', () => {
+ if (stream.getBufferedLength() > maxBuffer) {
+ rejectPromise(new MaxBufferError());
+ }
+ });
+ });
+
+ return stream.getBufferedValue();
+}
+
+module.exports = getStream;
+// TODO: Remove this for the next major release
+module.exports["default"] = getStream;
+module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'});
+module.exports.array = (stream, options) => getStream(stream, {...options, array: true});
+module.exports.MaxBufferError = MaxBufferError;
+
+
/***/ }),
/***/ 8116:
@@ -13784,7 +15275,7 @@ module.exports["default"] = CacheableLookup;
const EventEmitter = __nccwpck_require__(2361);
const urlLib = __nccwpck_require__(7310);
const normalizeUrl = __nccwpck_require__(7952);
-const getStream = __nccwpck_require__(1766);
+const getStream = __nccwpck_require__(7040);
const CachePolicy = __nccwpck_require__(1002);
const Response = __nccwpck_require__(9004);
const lowercaseKeys = __nccwpck_require__(9662);
@@ -14454,7 +15945,7 @@ const async_1 = __nccwpck_require__(5679);
const stream_1 = __nccwpck_require__(4630);
const sync_1 = __nccwpck_require__(2405);
const settings_1 = __nccwpck_require__(952);
-const utils = __nccwpck_require__(5444);
+const utils = __nccwpck_require__(8223);
async function FastGlob(source, options) {
assertPatternsInput(source);
const works = getWorks(source, async_1.default, options);
@@ -14561,7 +16052,7 @@ module.exports = FastGlob;
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0;
-const utils = __nccwpck_require__(5444);
+const utils = __nccwpck_require__(8223);
function generate(input, settings) {
const patterns = processPatterns(input, settings);
const ignore = processPatterns(settings.ignore, settings);
@@ -14709,7 +16200,7 @@ exports["default"] = ProviderAsync;
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
-const utils = __nccwpck_require__(5444);
+const utils = __nccwpck_require__(8223);
const partial_1 = __nccwpck_require__(5295);
class DeepFilter {
constructor(_settings, _micromatchOptions) {
@@ -14779,7 +16270,7 @@ exports["default"] = DeepFilter;
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
-const utils = __nccwpck_require__(5444);
+const utils = __nccwpck_require__(8223);
class EntryFilter {
constructor(_settings, _micromatchOptions) {
this._settings = _settings;
@@ -14850,7 +16341,7 @@ exports["default"] = EntryFilter;
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
-const utils = __nccwpck_require__(5444);
+const utils = __nccwpck_require__(8223);
class ErrorFilter {
constructor(_settings) {
this._settings = _settings;
@@ -14873,7 +16364,7 @@ exports["default"] = ErrorFilter;
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
-const utils = __nccwpck_require__(5444);
+const utils = __nccwpck_require__(8223);
class Matcher {
constructor(_patterns, _settings, _micromatchOptions) {
this._patterns = _patterns;
@@ -15098,7 +16589,7 @@ exports["default"] = ProviderSync;
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
-const utils = __nccwpck_require__(5444);
+const utils = __nccwpck_require__(8223);
class EntryTransformer {
constructor(_settings) {
this._settings = _settings;
@@ -15177,7 +16668,7 @@ exports["default"] = ReaderAsync;
Object.defineProperty(exports, "__esModule", ({ value: true }));
const path = __nccwpck_require__(1017);
const fsStat = __nccwpck_require__(109);
-const utils = __nccwpck_require__(5444);
+const utils = __nccwpck_require__(8223);
class Reader {
constructor(_settings) {
this._settings = _settings;
@@ -15463,7 +16954,7 @@ exports.createDirentFromStats = createDirentFromStats;
/***/ }),
-/***/ 5444:
+/***/ 8223:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -15501,11 +16992,11 @@ const IS_WINDOWS_PLATFORM = os.platform() === 'win32';
const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\
/**
* All non-escaped special characters.
- * Posix: ()*?[\]{|}, !+@ before (, ! at the beginning, \\ before non-special characters.
- * Windows: (){}, !+@ before (, ! at the beginning.
+ * Posix: ()*?[]{|}, !+@ before (, ! at the beginning, \\ before non-special characters.
+ * Windows: (){}[], !+@ before (, ! at the beginning.
*/
const POSIX_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\()|\\(?![!()*+?@[\]{|}]))/g;
-const WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([(){}]|^!|[!+@](?=\())/g;
+const WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()[\]{}]|^!|[!+@](?=\())/g;
/**
* The device path (\\.\ or \\?\).
* https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats#dos-device-paths
@@ -15516,7 +17007,7 @@ const DOS_DEVICE_PATH_RE = /^\\\\([.?])/;
* Windows: !()+@{}
* https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions
*/
-const WINDOWS_BACKSLASHES_RE = /\\(?![!()+@{}])/g;
+const WINDOWS_BACKSLASHES_RE = /\\(?![!()+@[\]{}])/g;
/**
* Designed to work only with simple paths: `dir\\file`.
*/
@@ -15704,7 +17195,7 @@ function expandPatternsWithBraceExpansion(patterns) {
}
exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion;
function expandBraceExpansion(pattern) {
- const patterns = micromatch.braces(pattern, { expand: true, nodupes: true });
+ const patterns = micromatch.braces(pattern, { expand: true, nodupes: true, keepEscaping: true });
/**
* Sort the patterns by length so that the same depth patterns are processed side by side.
* `a/{b,}/{c,}/*` – `['a///*', 'a/b//*', 'a//c/*', 'a/b/c/*']`
@@ -16356,134 +17847,6 @@ const fill = (start, end, step, options = {}) => {
module.exports = fill;
-/***/ }),
-
-/***/ 1585:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
-
-const {PassThrough: PassThroughStream} = __nccwpck_require__(2781);
-
-module.exports = options => {
- options = {...options};
-
- const {array} = options;
- let {encoding} = options;
- const isBuffer = encoding === 'buffer';
- let objectMode = false;
-
- if (array) {
- objectMode = !(encoding || isBuffer);
- } else {
- encoding = encoding || 'utf8';
- }
-
- if (isBuffer) {
- encoding = null;
- }
-
- const stream = new PassThroughStream({objectMode});
-
- if (encoding) {
- stream.setEncoding(encoding);
- }
-
- let length = 0;
- const chunks = [];
-
- stream.on('data', chunk => {
- chunks.push(chunk);
-
- if (objectMode) {
- length = chunks.length;
- } else {
- length += chunk.length;
- }
- });
-
- stream.getBufferedValue = () => {
- if (array) {
- return chunks;
- }
-
- return isBuffer ? Buffer.concat(chunks, length) : chunks.join('');
- };
-
- stream.getBufferedLength = () => length;
-
- return stream;
-};
-
-
-/***/ }),
-
-/***/ 1766:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
-
-const {constants: BufferConstants} = __nccwpck_require__(4300);
-const pump = __nccwpck_require__(8341);
-const bufferStream = __nccwpck_require__(1585);
-
-class MaxBufferError extends Error {
- constructor() {
- super('maxBuffer exceeded');
- this.name = 'MaxBufferError';
- }
-}
-
-async function getStream(inputStream, options) {
- if (!inputStream) {
- return Promise.reject(new Error('Expected a stream'));
- }
-
- options = {
- maxBuffer: Infinity,
- ...options
- };
-
- const {maxBuffer} = options;
-
- let stream;
- await new Promise((resolve, reject) => {
- const rejectPromise = error => {
- // Don't retrieve an oversized buffer.
- if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) {
- error.bufferedData = stream.getBufferedValue();
- }
-
- reject(error);
- };
-
- stream = pump(inputStream, bufferStream(options), error => {
- if (error) {
- rejectPromise(error);
- return;
- }
-
- resolve();
- });
-
- stream.on('data', () => {
- if (stream.getBufferedLength() > maxBuffer) {
- rejectPromise(new MaxBufferError());
- }
- });
- });
-
- return stream.getBufferedValue();
-}
-
-module.exports = getStream;
-// TODO: Remove this for the next major release
-module.exports["default"] = getStream;
-module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'});
-module.exports.array = (stream, options) => getStream(stream, {...options, array: true});
-module.exports.MaxBufferError = MaxBufferError;
-
-
/***/ }),
/***/ 6457:
@@ -16929,7 +18292,7 @@ const http = __nccwpck_require__(3685);
const http_1 = __nccwpck_require__(3685);
const https = __nccwpck_require__(5687);
const http_timer_1 = __nccwpck_require__(8097);
-const cacheable_lookup_1 = __nccwpck_require__(2286);
+const cacheable_lookup_1 = __nccwpck_require__(8367);
const CacheableRequest = __nccwpck_require__(8116);
const decompressResponse = __nccwpck_require__(2391);
// @ts-expect-error Missing types
@@ -21734,52 +23097,6 @@ module.exports = function(num) {
};
-/***/ }),
-
-/***/ 3287:
-/***/ ((__unused_webpack_module, exports) => {
-
-"use strict";
-
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-
-/*!
- * is-plain-object
- *
- * Copyright (c) 2014-2017, Jon Schlinkert.
- * Released under the MIT License.
- */
-
-function isObject(o) {
- return Object.prototype.toString.call(o) === '[object Object]';
-}
-
-function isPlainObject(o) {
- var ctor,prot;
-
- if (isObject(o) === false) return false;
-
- // If has modified constructor
- ctor = o.constructor;
- if (ctor === undefined) return true;
-
- // If has modified prototype
- prot = ctor.prototype;
- if (isObject(prot) === false) return false;
-
- // If constructor does not have an Object-specific method
- if (prot.hasOwnProperty('isPrototypeOf') === false) {
- return false;
- }
-
- // Most likely a plain Object
- return true;
-}
-
-exports.isPlainObject = isPlainObject;
-
-
/***/ }),
/***/ 2820:
@@ -22297,8 +23614,8 @@ function pauseStreams (streams, options) {
const util = __nccwpck_require__(3837);
const braces = __nccwpck_require__(610);
-const picomatch = __nccwpck_require__(8569);
-const utils = __nccwpck_require__(479);
+const picomatch = __nccwpck_require__(2864);
+const utils = __nccwpck_require__(7426);
const isEmptyString = val => val === '' || val === './';
/**
@@ -22764,2245 +24081,18 @@ module.exports = micromatch;
/***/ }),
-/***/ 2610:
-/***/ ((module) => {
-
-"use strict";
-
-
-// We define these manually to ensure they're always copied
-// even if they would move up the prototype chain
-// https://nodejs.org/api/http.html#http_class_http_incomingmessage
-const knownProps = [
- 'destroy',
- 'setTimeout',
- 'socket',
- 'headers',
- 'trailers',
- 'rawHeaders',
- 'statusCode',
- 'httpVersion',
- 'httpVersionMinor',
- 'httpVersionMajor',
- 'rawTrailers',
- 'statusMessage'
-];
-
-module.exports = (fromStream, toStream) => {
- const fromProps = new Set(Object.keys(fromStream).concat(knownProps));
-
- for (const prop of fromProps) {
- // Don't overwrite existing properties
- if (prop in toStream) {
- continue;
- }
-
- toStream[prop] = typeof fromStream[prop] === 'function' ? fromStream[prop].bind(fromStream) : fromStream[prop];
- }
-};
-
-
-/***/ }),
-
-/***/ 467:
-/***/ ((module, exports, __nccwpck_require__) => {
-
-"use strict";
-
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-
-function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
-
-var Stream = _interopDefault(__nccwpck_require__(2781));
-var http = _interopDefault(__nccwpck_require__(3685));
-var Url = _interopDefault(__nccwpck_require__(7310));
-var whatwgUrl = _interopDefault(__nccwpck_require__(8665));
-var https = _interopDefault(__nccwpck_require__(5687));
-var zlib = _interopDefault(__nccwpck_require__(9796));
-
-// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
-
-// fix for "Readable" isn't a named export issue
-const Readable = Stream.Readable;
-
-const BUFFER = Symbol('buffer');
-const TYPE = Symbol('type');
-
-class Blob {
- constructor() {
- this[TYPE] = '';
-
- const blobParts = arguments[0];
- const options = arguments[1];
-
- const buffers = [];
- let size = 0;
-
- if (blobParts) {
- const a = blobParts;
- const length = Number(a.length);
- for (let i = 0; i < length; i++) {
- const element = a[i];
- let buffer;
- if (element instanceof Buffer) {
- buffer = element;
- } else if (ArrayBuffer.isView(element)) {
- buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
- } else if (element instanceof ArrayBuffer) {
- buffer = Buffer.from(element);
- } else if (element instanceof Blob) {
- buffer = element[BUFFER];
- } else {
- buffer = Buffer.from(typeof element === 'string' ? element : String(element));
- }
- size += buffer.length;
- buffers.push(buffer);
- }
- }
-
- this[BUFFER] = Buffer.concat(buffers);
-
- let type = options && options.type !== undefined && String(options.type).toLowerCase();
- if (type && !/[^\u0020-\u007E]/.test(type)) {
- this[TYPE] = type;
- }
- }
- get size() {
- return this[BUFFER].length;
- }
- get type() {
- return this[TYPE];
- }
- text() {
- return Promise.resolve(this[BUFFER].toString());
- }
- arrayBuffer() {
- const buf = this[BUFFER];
- const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
- return Promise.resolve(ab);
- }
- stream() {
- const readable = new Readable();
- readable._read = function () {};
- readable.push(this[BUFFER]);
- readable.push(null);
- return readable;
- }
- toString() {
- return '[object Blob]';
- }
- slice() {
- const size = this.size;
-
- const start = arguments[0];
- const end = arguments[1];
- let relativeStart, relativeEnd;
- if (start === undefined) {
- relativeStart = 0;
- } else if (start < 0) {
- relativeStart = Math.max(size + start, 0);
- } else {
- relativeStart = Math.min(start, size);
- }
- if (end === undefined) {
- relativeEnd = size;
- } else if (end < 0) {
- relativeEnd = Math.max(size + end, 0);
- } else {
- relativeEnd = Math.min(end, size);
- }
- const span = Math.max(relativeEnd - relativeStart, 0);
-
- const buffer = this[BUFFER];
- const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
- const blob = new Blob([], { type: arguments[2] });
- blob[BUFFER] = slicedBuffer;
- return blob;
- }
-}
-
-Object.defineProperties(Blob.prototype, {
- size: { enumerable: true },
- type: { enumerable: true },
- slice: { enumerable: true }
-});
-
-Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
- value: 'Blob',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-/**
- * fetch-error.js
- *
- * FetchError interface for operational errors
- */
-
-/**
- * Create FetchError instance
- *
- * @param String message Error message for human
- * @param String type Error type for machine
- * @param String systemError For Node.js system error
- * @return FetchError
- */
-function FetchError(message, type, systemError) {
- Error.call(this, message);
-
- this.message = message;
- this.type = type;
-
- // when err.type is `system`, err.code contains system error code
- if (systemError) {
- this.code = this.errno = systemError.code;
- }
-
- // hide custom error implementation details from end-users
- Error.captureStackTrace(this, this.constructor);
-}
-
-FetchError.prototype = Object.create(Error.prototype);
-FetchError.prototype.constructor = FetchError;
-FetchError.prototype.name = 'FetchError';
-
-let convert;
-try {
- convert = (__nccwpck_require__(2877).convert);
-} catch (e) {}
-
-const INTERNALS = Symbol('Body internals');
-
-// fix an issue where "PassThrough" isn't a named export for node <10
-const PassThrough = Stream.PassThrough;
-
-/**
- * Body mixin
- *
- * Ref: https://fetch.spec.whatwg.org/#body
- *
- * @param Stream body Readable stream
- * @param Object opts Response options
- * @return Void
- */
-function Body(body) {
- var _this = this;
-
- var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
- _ref$size = _ref.size;
-
- let size = _ref$size === undefined ? 0 : _ref$size;
- var _ref$timeout = _ref.timeout;
- let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
-
- if (body == null) {
- // body is undefined or null
- body = null;
- } else if (isURLSearchParams(body)) {
- // body is a URLSearchParams
- body = Buffer.from(body.toString());
- } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
- // body is ArrayBuffer
- body = Buffer.from(body);
- } else if (ArrayBuffer.isView(body)) {
- // body is ArrayBufferView
- body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
- } else if (body instanceof Stream) ; else {
- // none of the above
- // coerce to string then buffer
- body = Buffer.from(String(body));
- }
- this[INTERNALS] = {
- body,
- disturbed: false,
- error: null
- };
- this.size = size;
- this.timeout = timeout;
-
- if (body instanceof Stream) {
- body.on('error', function (err) {
- const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
- _this[INTERNALS].error = error;
- });
- }
-}
-
-Body.prototype = {
- get body() {
- return this[INTERNALS].body;
- },
-
- get bodyUsed() {
- return this[INTERNALS].disturbed;
- },
-
- /**
- * Decode response as ArrayBuffer
- *
- * @return Promise
- */
- arrayBuffer() {
- return consumeBody.call(this).then(function (buf) {
- return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
- });
- },
-
- /**
- * Return raw response as Blob
- *
- * @return Promise
- */
- blob() {
- let ct = this.headers && this.headers.get('content-type') || '';
- return consumeBody.call(this).then(function (buf) {
- return Object.assign(
- // Prevent copying
- new Blob([], {
- type: ct.toLowerCase()
- }), {
- [BUFFER]: buf
- });
- });
- },
-
- /**
- * Decode response as json
- *
- * @return Promise
- */
- json() {
- var _this2 = this;
-
- return consumeBody.call(this).then(function (buffer) {
- try {
- return JSON.parse(buffer.toString());
- } catch (err) {
- return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
- }
- });
- },
-
- /**
- * Decode response as text
- *
- * @return Promise
- */
- text() {
- return consumeBody.call(this).then(function (buffer) {
- return buffer.toString();
- });
- },
-
- /**
- * Decode response as buffer (non-spec api)
- *
- * @return Promise
- */
- buffer() {
- return consumeBody.call(this);
- },
-
- /**
- * Decode response as text, while automatically detecting the encoding and
- * trying to decode to UTF-8 (non-spec api)
- *
- * @return Promise
- */
- textConverted() {
- var _this3 = this;
-
- return consumeBody.call(this).then(function (buffer) {
- return convertBody(buffer, _this3.headers);
- });
- }
-};
-
-// In browsers, all properties are enumerable.
-Object.defineProperties(Body.prototype, {
- body: { enumerable: true },
- bodyUsed: { enumerable: true },
- arrayBuffer: { enumerable: true },
- blob: { enumerable: true },
- json: { enumerable: true },
- text: { enumerable: true }
-});
-
-Body.mixIn = function (proto) {
- for (const name of Object.getOwnPropertyNames(Body.prototype)) {
- // istanbul ignore else: future proof
- if (!(name in proto)) {
- const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
- Object.defineProperty(proto, name, desc);
- }
- }
-};
-
-/**
- * Consume and convert an entire Body to a Buffer.
- *
- * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
- *
- * @return Promise
- */
-function consumeBody() {
- var _this4 = this;
-
- if (this[INTERNALS].disturbed) {
- return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
- }
-
- this[INTERNALS].disturbed = true;
-
- if (this[INTERNALS].error) {
- return Body.Promise.reject(this[INTERNALS].error);
- }
-
- let body = this.body;
-
- // body is null
- if (body === null) {
- return Body.Promise.resolve(Buffer.alloc(0));
- }
-
- // body is blob
- if (isBlob(body)) {
- body = body.stream();
- }
-
- // body is buffer
- if (Buffer.isBuffer(body)) {
- return Body.Promise.resolve(body);
- }
-
- // istanbul ignore if: should never happen
- if (!(body instanceof Stream)) {
- return Body.Promise.resolve(Buffer.alloc(0));
- }
-
- // body is stream
- // get ready to actually consume the body
- let accum = [];
- let accumBytes = 0;
- let abort = false;
-
- return new Body.Promise(function (resolve, reject) {
- let resTimeout;
-
- // allow timeout on slow response body
- if (_this4.timeout) {
- resTimeout = setTimeout(function () {
- abort = true;
- reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
- }, _this4.timeout);
- }
-
- // handle stream errors
- body.on('error', function (err) {
- if (err.name === 'AbortError') {
- // if the request was aborted, reject with this Error
- abort = true;
- reject(err);
- } else {
- // other errors, such as incorrect content-encoding
- reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
- }
- });
-
- body.on('data', function (chunk) {
- if (abort || chunk === null) {
- return;
- }
-
- if (_this4.size && accumBytes + chunk.length > _this4.size) {
- abort = true;
- reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
- return;
- }
-
- accumBytes += chunk.length;
- accum.push(chunk);
- });
-
- body.on('end', function () {
- if (abort) {
- return;
- }
-
- clearTimeout(resTimeout);
-
- try {
- resolve(Buffer.concat(accum, accumBytes));
- } catch (err) {
- // handle streams that have accumulated too much data (issue #414)
- reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
- }
- });
- });
-}
-
-/**
- * Detect buffer encoding and convert to target encoding
- * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
- *
- * @param Buffer buffer Incoming buffer
- * @param String encoding Target encoding
- * @return String
- */
-function convertBody(buffer, headers) {
- if (typeof convert !== 'function') {
- throw new Error('The package `encoding` must be installed to use the textConverted() function');
- }
-
- const ct = headers.get('content-type');
- let charset = 'utf-8';
- let res, str;
-
- // header
- if (ct) {
- res = /charset=([^;]*)/i.exec(ct);
- }
-
- // no charset in content type, peek at response body for at most 1024 bytes
- str = buffer.slice(0, 1024).toString();
-
- // html5
- if (!res && str) {
- res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;
-
- this[MAP] = Object.create(null);
-
- if (init instanceof Headers) {
- const rawHeaders = init.raw();
- const headerNames = Object.keys(rawHeaders);
-
- for (const headerName of headerNames) {
- for (const value of rawHeaders[headerName]) {
- this.append(headerName, value);
- }
- }
-
- return;
- }
-
- // We don't worry about converting prop to ByteString here as append()
- // will handle it.
- if (init == null) ; else if (typeof init === 'object') {
- const method = init[Symbol.iterator];
- if (method != null) {
- if (typeof method !== 'function') {
- throw new TypeError('Header pairs must be iterable');
- }
-
- // sequence>
- // Note: per spec we have to first exhaust the lists then process them
- const pairs = [];
- for (const pair of init) {
- if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
- throw new TypeError('Each header pair must be iterable');
- }
- pairs.push(Array.from(pair));
- }
-
- for (const pair of pairs) {
- if (pair.length !== 2) {
- throw new TypeError('Each header pair must be a name/value tuple');
- }
- this.append(pair[0], pair[1]);
- }
- } else {
- // record
- for (const key of Object.keys(init)) {
- const value = init[key];
- this.append(key, value);
- }
- }
- } else {
- throw new TypeError('Provided initializer must be an object');
- }
- }
-
- /**
- * Return combined header value given name
- *
- * @param String name Header name
- * @return Mixed
- */
- get(name) {
- name = `${name}`;
- validateName(name);
- const key = find(this[MAP], name);
- if (key === undefined) {
- return null;
- }
-
- return this[MAP][key].join(', ');
- }
-
- /**
- * Iterate over all headers
- *
- * @param Function callback Executed for each item with parameters (value, name, thisArg)
- * @param Boolean thisArg `this` context for callback function
- * @return Void
- */
- forEach(callback) {
- let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
-
- let pairs = getHeaders(this);
- let i = 0;
- while (i < pairs.length) {
- var _pairs$i = pairs[i];
- const name = _pairs$i[0],
- value = _pairs$i[1];
-
- callback.call(thisArg, value, name, this);
- pairs = getHeaders(this);
- i++;
- }
- }
-
- /**
- * Overwrite header values given name
- *
- * @param String name Header name
- * @param String value Header value
- * @return Void
- */
- set(name, value) {
- name = `${name}`;
- value = `${value}`;
- validateName(name);
- validateValue(value);
- const key = find(this[MAP], name);
- this[MAP][key !== undefined ? key : name] = [value];
- }
-
- /**
- * Append a value onto existing header
- *
- * @param String name Header name
- * @param String value Header value
- * @return Void
- */
- append(name, value) {
- name = `${name}`;
- value = `${value}`;
- validateName(name);
- validateValue(value);
- const key = find(this[MAP], name);
- if (key !== undefined) {
- this[MAP][key].push(value);
- } else {
- this[MAP][name] = [value];
- }
- }
-
- /**
- * Check for header name existence
- *
- * @param String name Header name
- * @return Boolean
- */
- has(name) {
- name = `${name}`;
- validateName(name);
- return find(this[MAP], name) !== undefined;
- }
-
- /**
- * Delete all header values given name
- *
- * @param String name Header name
- * @return Void
- */
- delete(name) {
- name = `${name}`;
- validateName(name);
- const key = find(this[MAP], name);
- if (key !== undefined) {
- delete this[MAP][key];
- }
- }
-
- /**
- * Return raw headers (non-spec api)
- *
- * @return Object
- */
- raw() {
- return this[MAP];
- }
-
- /**
- * Get an iterator on keys.
- *
- * @return Iterator
- */
- keys() {
- return createHeadersIterator(this, 'key');
- }
-
- /**
- * Get an iterator on values.
- *
- * @return Iterator
- */
- values() {
- return createHeadersIterator(this, 'value');
- }
-
- /**
- * Get an iterator on entries.
- *
- * This is the default iterator of the Headers object.
- *
- * @return Iterator
- */
- [Symbol.iterator]() {
- return createHeadersIterator(this, 'key+value');
- }
-}
-Headers.prototype.entries = Headers.prototype[Symbol.iterator];
-
-Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
- value: 'Headers',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-Object.defineProperties(Headers.prototype, {
- get: { enumerable: true },
- forEach: { enumerable: true },
- set: { enumerable: true },
- append: { enumerable: true },
- has: { enumerable: true },
- delete: { enumerable: true },
- keys: { enumerable: true },
- values: { enumerable: true },
- entries: { enumerable: true }
-});
-
-function getHeaders(headers) {
- let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
-
- const keys = Object.keys(headers[MAP]).sort();
- return keys.map(kind === 'key' ? function (k) {
- return k.toLowerCase();
- } : kind === 'value' ? function (k) {
- return headers[MAP][k].join(', ');
- } : function (k) {
- return [k.toLowerCase(), headers[MAP][k].join(', ')];
- });
-}
-
-const INTERNAL = Symbol('internal');
-
-function createHeadersIterator(target, kind) {
- const iterator = Object.create(HeadersIteratorPrototype);
- iterator[INTERNAL] = {
- target,
- kind,
- index: 0
- };
- return iterator;
-}
-
-const HeadersIteratorPrototype = Object.setPrototypeOf({
- next() {
- // istanbul ignore if
- if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
- throw new TypeError('Value of `this` is not a HeadersIterator');
- }
-
- var _INTERNAL = this[INTERNAL];
- const target = _INTERNAL.target,
- kind = _INTERNAL.kind,
- index = _INTERNAL.index;
-
- const values = getHeaders(target, kind);
- const len = values.length;
- if (index >= len) {
- return {
- value: undefined,
- done: true
- };
- }
-
- this[INTERNAL].index = index + 1;
-
- return {
- value: values[index],
- done: false
- };
- }
-}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
-
-Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
- value: 'HeadersIterator',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-/**
- * Export the Headers object in a form that Node.js can consume.
- *
- * @param Headers headers
- * @return Object
- */
-function exportNodeCompatibleHeaders(headers) {
- const obj = Object.assign({ __proto__: null }, headers[MAP]);
-
- // http.request() only supports string as Host header. This hack makes
- // specifying custom Host header possible.
- const hostHeaderKey = find(headers[MAP], 'Host');
- if (hostHeaderKey !== undefined) {
- obj[hostHeaderKey] = obj[hostHeaderKey][0];
- }
-
- return obj;
-}
-
-/**
- * Create a Headers object from an object of headers, ignoring those that do
- * not conform to HTTP grammar productions.
- *
- * @param Object obj Object of headers
- * @return Headers
- */
-function createHeadersLenient(obj) {
- const headers = new Headers();
- for (const name of Object.keys(obj)) {
- if (invalidTokenRegex.test(name)) {
- continue;
- }
- if (Array.isArray(obj[name])) {
- for (const val of obj[name]) {
- if (invalidHeaderCharRegex.test(val)) {
- continue;
- }
- if (headers[MAP][name] === undefined) {
- headers[MAP][name] = [val];
- } else {
- headers[MAP][name].push(val);
- }
- }
- } else if (!invalidHeaderCharRegex.test(obj[name])) {
- headers[MAP][name] = [obj[name]];
- }
- }
- return headers;
-}
-
-const INTERNALS$1 = Symbol('Response internals');
-
-// fix an issue where "STATUS_CODES" aren't a named export for node <10
-const STATUS_CODES = http.STATUS_CODES;
-
-/**
- * Response class
- *
- * @param Stream body Readable stream
- * @param Object opts Response options
- * @return Void
- */
-class Response {
- constructor() {
- let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
- let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-
- Body.call(this, body, opts);
-
- const status = opts.status || 200;
- const headers = new Headers(opts.headers);
-
- if (body != null && !headers.has('Content-Type')) {
- const contentType = extractContentType(body);
- if (contentType) {
- headers.append('Content-Type', contentType);
- }
- }
-
- this[INTERNALS$1] = {
- url: opts.url,
- status,
- statusText: opts.statusText || STATUS_CODES[status],
- headers,
- counter: opts.counter
- };
- }
-
- get url() {
- return this[INTERNALS$1].url || '';
- }
-
- get status() {
- return this[INTERNALS$1].status;
- }
-
- /**
- * Convenience property representing if the request ended normally
- */
- get ok() {
- return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
- }
-
- get redirected() {
- return this[INTERNALS$1].counter > 0;
- }
-
- get statusText() {
- return this[INTERNALS$1].statusText;
- }
-
- get headers() {
- return this[INTERNALS$1].headers;
- }
-
- /**
- * Clone this response
- *
- * @return Response
- */
- clone() {
- return new Response(clone(this), {
- url: this.url,
- status: this.status,
- statusText: this.statusText,
- headers: this.headers,
- ok: this.ok,
- redirected: this.redirected
- });
- }
-}
-
-Body.mixIn(Response.prototype);
-
-Object.defineProperties(Response.prototype, {
- url: { enumerable: true },
- status: { enumerable: true },
- ok: { enumerable: true },
- redirected: { enumerable: true },
- statusText: { enumerable: true },
- headers: { enumerable: true },
- clone: { enumerable: true }
-});
-
-Object.defineProperty(Response.prototype, Symbol.toStringTag, {
- value: 'Response',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-const INTERNALS$2 = Symbol('Request internals');
-const URL = Url.URL || whatwgUrl.URL;
-
-// fix an issue where "format", "parse" aren't a named export for node <10
-const parse_url = Url.parse;
-const format_url = Url.format;
-
-/**
- * Wrapper around `new URL` to handle arbitrary URLs
- *
- * @param {string} urlStr
- * @return {void}
- */
-function parseURL(urlStr) {
- /*
- Check whether the URL is absolute or not
- Scheme: https://tools.ietf.org/html/rfc3986#section-3.1
- Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3
- */
- if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) {
- urlStr = new URL(urlStr).toString();
- }
-
- // Fallback to old implementation for arbitrary URLs
- return parse_url(urlStr);
-}
-
-const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
-
-/**
- * Check if a value is an instance of Request.
- *
- * @param Mixed input
- * @return Boolean
- */
-function isRequest(input) {
- return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
-}
-
-function isAbortSignal(signal) {
- const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
- return !!(proto && proto.constructor.name === 'AbortSignal');
-}
-
-/**
- * Request class
- *
- * @param Mixed input Url or Request instance
- * @param Object init Custom options
- * @return Void
- */
-class Request {
- constructor(input) {
- let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-
- let parsedURL;
-
- // normalize input
- if (!isRequest(input)) {
- if (input && input.href) {
- // in order to support Node.js' Url objects; though WHATWG's URL objects
- // will fall into this branch also (since their `toString()` will return
- // `href` property anyway)
- parsedURL = parseURL(input.href);
- } else {
- // coerce input to a string before attempting to parse
- parsedURL = parseURL(`${input}`);
- }
- input = {};
- } else {
- parsedURL = parseURL(input.url);
- }
-
- let method = init.method || input.method || 'GET';
- method = method.toUpperCase();
-
- if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
- throw new TypeError('Request with GET/HEAD method cannot have body');
- }
-
- let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
-
- Body.call(this, inputBody, {
- timeout: init.timeout || input.timeout || 0,
- size: init.size || input.size || 0
- });
-
- const headers = new Headers(init.headers || input.headers || {});
-
- if (inputBody != null && !headers.has('Content-Type')) {
- const contentType = extractContentType(inputBody);
- if (contentType) {
- headers.append('Content-Type', contentType);
- }
- }
-
- let signal = isRequest(input) ? input.signal : null;
- if ('signal' in init) signal = init.signal;
-
- if (signal != null && !isAbortSignal(signal)) {
- throw new TypeError('Expected signal to be an instanceof AbortSignal');
- }
-
- this[INTERNALS$2] = {
- method,
- redirect: init.redirect || input.redirect || 'follow',
- headers,
- parsedURL,
- signal
- };
-
- // node-fetch-only options
- this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
- this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
- this.counter = init.counter || input.counter || 0;
- this.agent = init.agent || input.agent;
- }
-
- get method() {
- return this[INTERNALS$2].method;
- }
-
- get url() {
- return format_url(this[INTERNALS$2].parsedURL);
- }
-
- get headers() {
- return this[INTERNALS$2].headers;
- }
-
- get redirect() {
- return this[INTERNALS$2].redirect;
- }
-
- get signal() {
- return this[INTERNALS$2].signal;
- }
-
- /**
- * Clone this request
- *
- * @return Request
- */
- clone() {
- return new Request(this);
- }
-}
-
-Body.mixIn(Request.prototype);
-
-Object.defineProperty(Request.prototype, Symbol.toStringTag, {
- value: 'Request',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-Object.defineProperties(Request.prototype, {
- method: { enumerable: true },
- url: { enumerable: true },
- headers: { enumerable: true },
- redirect: { enumerable: true },
- clone: { enumerable: true },
- signal: { enumerable: true }
-});
-
-/**
- * Convert a Request to Node.js http request options.
- *
- * @param Request A Request instance
- * @return Object The options object to be passed to http.request
- */
-function getNodeRequestOptions(request) {
- const parsedURL = request[INTERNALS$2].parsedURL;
- const headers = new Headers(request[INTERNALS$2].headers);
-
- // fetch step 1.3
- if (!headers.has('Accept')) {
- headers.set('Accept', '*/*');
- }
-
- // Basic fetch
- if (!parsedURL.protocol || !parsedURL.hostname) {
- throw new TypeError('Only absolute URLs are supported');
- }
-
- if (!/^https?:$/.test(parsedURL.protocol)) {
- throw new TypeError('Only HTTP(S) protocols are supported');
- }
-
- if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
- throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
- }
-
- // HTTP-network-or-cache fetch steps 2.4-2.7
- let contentLengthValue = null;
- if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
- contentLengthValue = '0';
- }
- if (request.body != null) {
- const totalBytes = getTotalBytes(request);
- if (typeof totalBytes === 'number') {
- contentLengthValue = String(totalBytes);
- }
- }
- if (contentLengthValue) {
- headers.set('Content-Length', contentLengthValue);
- }
-
- // HTTP-network-or-cache fetch step 2.11
- if (!headers.has('User-Agent')) {
- headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
- }
-
- // HTTP-network-or-cache fetch step 2.15
- if (request.compress && !headers.has('Accept-Encoding')) {
- headers.set('Accept-Encoding', 'gzip,deflate');
- }
-
- let agent = request.agent;
- if (typeof agent === 'function') {
- agent = agent(parsedURL);
- }
-
- // HTTP-network fetch step 4.2
- // chunked encoding is handled by Node.js
-
- return Object.assign({}, parsedURL, {
- method: request.method,
- headers: exportNodeCompatibleHeaders(headers),
- agent
- });
-}
-
-/**
- * abort-error.js
- *
- * AbortError interface for cancelled requests
- */
-
-/**
- * Create AbortError instance
- *
- * @param String message Error message for human
- * @return AbortError
- */
-function AbortError(message) {
- Error.call(this, message);
-
- this.type = 'aborted';
- this.message = message;
-
- // hide custom error implementation details from end-users
- Error.captureStackTrace(this, this.constructor);
-}
-
-AbortError.prototype = Object.create(Error.prototype);
-AbortError.prototype.constructor = AbortError;
-AbortError.prototype.name = 'AbortError';
-
-const URL$1 = Url.URL || whatwgUrl.URL;
-
-// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
-const PassThrough$1 = Stream.PassThrough;
-
-const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {
- const orig = new URL$1(original).hostname;
- const dest = new URL$1(destination).hostname;
-
- return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);
-};
-
-/**
- * isSameProtocol reports whether the two provided URLs use the same protocol.
- *
- * Both domains must already be in canonical form.
- * @param {string|URL} original
- * @param {string|URL} destination
- */
-const isSameProtocol = function isSameProtocol(destination, original) {
- const orig = new URL$1(original).protocol;
- const dest = new URL$1(destination).protocol;
-
- return orig === dest;
-};
-
-/**
- * Fetch function
- *
- * @param Mixed url Absolute url or Request instance
- * @param Object opts Fetch options
- * @return Promise
- */
-function fetch(url, opts) {
-
- // allow custom promise
- if (!fetch.Promise) {
- throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
- }
-
- Body.Promise = fetch.Promise;
-
- // wrap http.request into fetch
- return new fetch.Promise(function (resolve, reject) {
- // build request object
- const request = new Request(url, opts);
- const options = getNodeRequestOptions(request);
-
- const send = (options.protocol === 'https:' ? https : http).request;
- const signal = request.signal;
-
- let response = null;
-
- const abort = function abort() {
- let error = new AbortError('The user aborted a request.');
- reject(error);
- if (request.body && request.body instanceof Stream.Readable) {
- destroyStream(request.body, error);
- }
- if (!response || !response.body) return;
- response.body.emit('error', error);
- };
-
- if (signal && signal.aborted) {
- abort();
- return;
- }
-
- const abortAndFinalize = function abortAndFinalize() {
- abort();
- finalize();
- };
-
- // send request
- const req = send(options);
- let reqTimeout;
-
- if (signal) {
- signal.addEventListener('abort', abortAndFinalize);
- }
-
- function finalize() {
- req.abort();
- if (signal) signal.removeEventListener('abort', abortAndFinalize);
- clearTimeout(reqTimeout);
- }
-
- if (request.timeout) {
- req.once('socket', function (socket) {
- reqTimeout = setTimeout(function () {
- reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
- finalize();
- }, request.timeout);
- });
- }
-
- req.on('error', function (err) {
- reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
-
- if (response && response.body) {
- destroyStream(response.body, err);
- }
-
- finalize();
- });
-
- fixResponseChunkedTransferBadEnding(req, function (err) {
- if (signal && signal.aborted) {
- return;
- }
-
- if (response && response.body) {
- destroyStream(response.body, err);
- }
- });
-
- /* c8 ignore next 18 */
- if (parseInt(process.version.substring(1)) < 14) {
- // Before Node.js 14, pipeline() does not fully support async iterators and does not always
- // properly handle when the socket close/end events are out of order.
- req.on('socket', function (s) {
- s.addListener('close', function (hadError) {
- // if a data listener is still present we didn't end cleanly
- const hasDataListener = s.listenerCount('data') > 0;
-
- // if end happened before close but the socket didn't emit an error, do it now
- if (response && hasDataListener && !hadError && !(signal && signal.aborted)) {
- const err = new Error('Premature close');
- err.code = 'ERR_STREAM_PREMATURE_CLOSE';
- response.body.emit('error', err);
- }
- });
- });
- }
-
- req.on('response', function (res) {
- clearTimeout(reqTimeout);
-
- const headers = createHeadersLenient(res.headers);
-
- // HTTP fetch step 5
- if (fetch.isRedirect(res.statusCode)) {
- // HTTP fetch step 5.2
- const location = headers.get('Location');
-
- // HTTP fetch step 5.3
- let locationURL = null;
- try {
- locationURL = location === null ? null : new URL$1(location, request.url).toString();
- } catch (err) {
- // error here can only be invalid URL in Location: header
- // do not throw when options.redirect == manual
- // let the user extract the errorneous redirect URL
- if (request.redirect !== 'manual') {
- reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));
- finalize();
- return;
- }
- }
-
- // HTTP fetch step 5.5
- switch (request.redirect) {
- case 'error':
- reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
- finalize();
- return;
- case 'manual':
- // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
- if (locationURL !== null) {
- // handle corrupted header
- try {
- headers.set('Location', locationURL);
- } catch (err) {
- // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
- reject(err);
- }
- }
- break;
- case 'follow':
- // HTTP-redirect fetch step 2
- if (locationURL === null) {
- break;
- }
-
- // HTTP-redirect fetch step 5
- if (request.counter >= request.follow) {
- reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
- finalize();
- return;
- }
-
- // HTTP-redirect fetch step 6 (counter increment)
- // Create a new Request object.
- const requestOpts = {
- headers: new Headers(request.headers),
- follow: request.follow,
- counter: request.counter + 1,
- agent: request.agent,
- compress: request.compress,
- method: request.method,
- body: request.body,
- signal: request.signal,
- timeout: request.timeout,
- size: request.size
- };
-
- if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {
- for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {
- requestOpts.headers.delete(name);
- }
- }
-
- // HTTP-redirect fetch step 9
- if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
- reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
- finalize();
- return;
- }
-
- // HTTP-redirect fetch step 11
- if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
- requestOpts.method = 'GET';
- requestOpts.body = undefined;
- requestOpts.headers.delete('content-length');
- }
-
- // HTTP-redirect fetch step 15
- resolve(fetch(new Request(locationURL, requestOpts)));
- finalize();
- return;
- }
- }
-
- // prepare response
- res.once('end', function () {
- if (signal) signal.removeEventListener('abort', abortAndFinalize);
- });
- let body = res.pipe(new PassThrough$1());
-
- const response_options = {
- url: request.url,
- status: res.statusCode,
- statusText: res.statusMessage,
- headers: headers,
- size: request.size,
- timeout: request.timeout,
- counter: request.counter
- };
-
- // HTTP-network fetch step 12.1.1.3
- const codings = headers.get('Content-Encoding');
-
- // HTTP-network fetch step 12.1.1.4: handle content codings
-
- // in following scenarios we ignore compression support
- // 1. compression support is disabled
- // 2. HEAD request
- // 3. no Content-Encoding header
- // 4. no content response (204)
- // 5. content not modified response (304)
- if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
- response = new Response(body, response_options);
- resolve(response);
- return;
- }
-
- // For Node v6+
- // Be less strict when decoding compressed responses, since sometimes
- // servers send slightly invalid responses that are still accepted
- // by common browsers.
- // Always using Z_SYNC_FLUSH is what cURL does.
- const zlibOptions = {
- flush: zlib.Z_SYNC_FLUSH,
- finishFlush: zlib.Z_SYNC_FLUSH
- };
-
- // for gzip
- if (codings == 'gzip' || codings == 'x-gzip') {
- body = body.pipe(zlib.createGunzip(zlibOptions));
- response = new Response(body, response_options);
- resolve(response);
- return;
- }
-
- // for deflate
- if (codings == 'deflate' || codings == 'x-deflate') {
- // handle the infamous raw deflate response from old servers
- // a hack for old IIS and Apache servers
- const raw = res.pipe(new PassThrough$1());
- raw.once('data', function (chunk) {
- // see http://stackoverflow.com/questions/37519828
- if ((chunk[0] & 0x0F) === 0x08) {
- body = body.pipe(zlib.createInflate());
- } else {
- body = body.pipe(zlib.createInflateRaw());
- }
- response = new Response(body, response_options);
- resolve(response);
- });
- raw.on('end', function () {
- // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted.
- if (!response) {
- response = new Response(body, response_options);
- resolve(response);
- }
- });
- return;
- }
-
- // for br
- if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
- body = body.pipe(zlib.createBrotliDecompress());
- response = new Response(body, response_options);
- resolve(response);
- return;
- }
-
- // otherwise, use response as-is
- response = new Response(body, response_options);
- resolve(response);
- });
-
- writeToStream(req, request);
- });
-}
-function fixResponseChunkedTransferBadEnding(request, errorCallback) {
- let socket;
-
- request.on('socket', function (s) {
- socket = s;
- });
-
- request.on('response', function (response) {
- const headers = response.headers;
-
- if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) {
- response.once('close', function (hadError) {
- // tests for socket presence, as in some situations the
- // the 'socket' event is not triggered for the request
- // (happens in deno), avoids `TypeError`
- // if a data listener is still present we didn't end cleanly
- const hasDataListener = socket && socket.listenerCount('data') > 0;
-
- if (hasDataListener && !hadError) {
- const err = new Error('Premature close');
- err.code = 'ERR_STREAM_PREMATURE_CLOSE';
- errorCallback(err);
- }
- });
- }
- });
-}
-
-function destroyStream(stream, err) {
- if (stream.destroy) {
- stream.destroy(err);
- } else {
- // node < 8
- stream.emit('error', err);
- stream.end();
- }
-}
-
-/**
- * Redirect code matching
- *
- * @param Number code Status code
- * @return Boolean
- */
-fetch.isRedirect = function (code) {
- return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
-};
-
-// expose Promise
-fetch.Promise = global.Promise;
-
-module.exports = exports = fetch;
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports["default"] = exports;
-exports.Headers = Headers;
-exports.Request = Request;
-exports.Response = Response;
-exports.FetchError = FetchError;
-exports.AbortError = AbortError;
-
-
-/***/ }),
-
-/***/ 7952:
-/***/ ((module) => {
-
-"use strict";
-
-
-// https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
-const DATA_URL_DEFAULT_MIME_TYPE = 'text/plain';
-const DATA_URL_DEFAULT_CHARSET = 'us-ascii';
-
-const testParameter = (name, filters) => {
- return filters.some(filter => filter instanceof RegExp ? filter.test(name) : filter === name);
-};
-
-const normalizeDataURL = (urlString, {stripHash}) => {
- const match = /^data:(?[^,]*?),(?[^#]*?)(?:#(?.*))?$/.exec(urlString);
-
- if (!match) {
- throw new Error(`Invalid URL: ${urlString}`);
- }
-
- let {type, data, hash} = match.groups;
- const mediaType = type.split(';');
- hash = stripHash ? '' : hash;
-
- let isBase64 = false;
- if (mediaType[mediaType.length - 1] === 'base64') {
- mediaType.pop();
- isBase64 = true;
- }
-
- // Lowercase MIME type
- const mimeType = (mediaType.shift() || '').toLowerCase();
- const attributes = mediaType
- .map(attribute => {
- let [key, value = ''] = attribute.split('=').map(string => string.trim());
-
- // Lowercase `charset`
- if (key === 'charset') {
- value = value.toLowerCase();
-
- if (value === DATA_URL_DEFAULT_CHARSET) {
- return '';
- }
- }
-
- return `${key}${value ? `=${value}` : ''}`;
- })
- .filter(Boolean);
-
- const normalizedMediaType = [
- ...attributes
- ];
-
- if (isBase64) {
- normalizedMediaType.push('base64');
- }
-
- if (normalizedMediaType.length !== 0 || (mimeType && mimeType !== DATA_URL_DEFAULT_MIME_TYPE)) {
- normalizedMediaType.unshift(mimeType);
- }
-
- return `data:${normalizedMediaType.join(';')},${isBase64 ? data.trim() : data}${hash ? `#${hash}` : ''}`;
-};
-
-const normalizeUrl = (urlString, options) => {
- options = {
- defaultProtocol: 'http:',
- normalizeProtocol: true,
- forceHttp: false,
- forceHttps: false,
- stripAuthentication: true,
- stripHash: false,
- stripTextFragment: true,
- stripWWW: true,
- removeQueryParameters: [/^utm_\w+/i],
- removeTrailingSlash: true,
- removeSingleSlash: true,
- removeDirectoryIndex: false,
- sortQueryParameters: true,
- ...options
- };
-
- urlString = urlString.trim();
-
- // Data URL
- if (/^data:/i.test(urlString)) {
- return normalizeDataURL(urlString, options);
- }
-
- if (/^view-source:/i.test(urlString)) {
- throw new Error('`view-source:` is not supported as it is a non-standard protocol');
- }
-
- const hasRelativeProtocol = urlString.startsWith('//');
- const isRelativeUrl = !hasRelativeProtocol && /^\.*\//.test(urlString);
-
- // Prepend protocol
- if (!isRelativeUrl) {
- urlString = urlString.replace(/^(?!(?:\w+:)?\/\/)|^\/\//, options.defaultProtocol);
- }
-
- const urlObj = new URL(urlString);
-
- if (options.forceHttp && options.forceHttps) {
- throw new Error('The `forceHttp` and `forceHttps` options cannot be used together');
- }
-
- if (options.forceHttp && urlObj.protocol === 'https:') {
- urlObj.protocol = 'http:';
- }
-
- if (options.forceHttps && urlObj.protocol === 'http:') {
- urlObj.protocol = 'https:';
- }
-
- // Remove auth
- if (options.stripAuthentication) {
- urlObj.username = '';
- urlObj.password = '';
- }
-
- // Remove hash
- if (options.stripHash) {
- urlObj.hash = '';
- } else if (options.stripTextFragment) {
- urlObj.hash = urlObj.hash.replace(/#?:~:text.*?$/i, '');
- }
-
- // Remove duplicate slashes if not preceded by a protocol
- if (urlObj.pathname) {
- urlObj.pathname = urlObj.pathname.replace(/(? 0) {
- let pathComponents = urlObj.pathname.split('/');
- const lastComponent = pathComponents[pathComponents.length - 1];
-
- if (testParameter(lastComponent, options.removeDirectoryIndex)) {
- pathComponents = pathComponents.slice(0, pathComponents.length - 1);
- urlObj.pathname = pathComponents.slice(1).join('/') + '/';
- }
- }
-
- if (urlObj.hostname) {
- // Remove trailing dot
- urlObj.hostname = urlObj.hostname.replace(/\.$/, '');
-
- // Remove `www.`
- if (options.stripWWW && /^www\.(?!www\.)(?:[a-z\-\d]{1,63})\.(?:[a-z.\-\d]{2,63})$/.test(urlObj.hostname)) {
- // Each label should be max 63 at length (min: 1).
- // Source: https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names
- // Each TLD should be up to 63 characters long (min: 2).
- // It is technically possible to have a single character TLD, but none currently exist.
- urlObj.hostname = urlObj.hostname.replace(/^www\./, '');
- }
- }
-
- // Remove query unwanted parameters
- if (Array.isArray(options.removeQueryParameters)) {
- for (const key of [...urlObj.searchParams.keys()]) {
- if (testParameter(key, options.removeQueryParameters)) {
- urlObj.searchParams.delete(key);
- }
- }
- }
-
- if (options.removeQueryParameters === true) {
- urlObj.search = '';
- }
-
- // Sort query parameters
- if (options.sortQueryParameters) {
- urlObj.searchParams.sort();
- }
-
- if (options.removeTrailingSlash) {
- urlObj.pathname = urlObj.pathname.replace(/\/$/, '');
- }
-
- const oldUrlString = urlString;
-
- // Take advantage of many of the Node `url` normalizations
- urlString = urlObj.toString();
-
- if (!options.removeSingleSlash && urlObj.pathname === '/' && !oldUrlString.endsWith('/') && urlObj.hash === '') {
- urlString = urlString.replace(/\/$/, '');
- }
-
- // Remove ending `/` unless removeSingleSlash is false
- if ((options.removeTrailingSlash || urlObj.pathname === '/') && urlObj.hash === '' && options.removeSingleSlash) {
- urlString = urlString.replace(/\/$/, '');
- }
-
- // Restore relative protocol, if applicable
- if (hasRelativeProtocol && !options.normalizeProtocol) {
- urlString = urlString.replace(/^http:\/\//, '//');
- }
-
- // Remove http/https
- if (options.stripProtocol) {
- urlString = urlString.replace(/^(?:https?:)?\/\//, '');
- }
-
- return urlString;
-};
-
-module.exports = normalizeUrl;
-
-
-/***/ }),
-
-/***/ 1223:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-var wrappy = __nccwpck_require__(2940)
-module.exports = wrappy(once)
-module.exports.strict = wrappy(onceStrict)
-
-once.proto = once(function () {
- Object.defineProperty(Function.prototype, 'once', {
- value: function () {
- return once(this)
- },
- configurable: true
- })
-
- Object.defineProperty(Function.prototype, 'onceStrict', {
- value: function () {
- return onceStrict(this)
- },
- configurable: true
- })
-})
-
-function once (fn) {
- var f = function () {
- if (f.called) return f.value
- f.called = true
- return f.value = fn.apply(this, arguments)
- }
- f.called = false
- return f
-}
-
-function onceStrict (fn) {
- var f = function () {
- if (f.called)
- throw new Error(f.onceError)
- f.called = true
- return f.value = fn.apply(this, arguments)
- }
- var name = fn.name || 'Function wrapped with `once`'
- f.onceError = name + " shouldn't be called more than once"
- f.called = false
- return f
-}
-
-
-/***/ }),
-
-/***/ 9072:
-/***/ ((module) => {
-
-"use strict";
-
-
-class CancelError extends Error {
- constructor(reason) {
- super(reason || 'Promise was canceled');
- this.name = 'CancelError';
- }
-
- get isCanceled() {
- return true;
- }
-}
-
-class PCancelable {
- static fn(userFn) {
- return (...arguments_) => {
- return new PCancelable((resolve, reject, onCancel) => {
- arguments_.push(onCancel);
- // eslint-disable-next-line promise/prefer-await-to-then
- userFn(...arguments_).then(resolve, reject);
- });
- };
- }
-
- constructor(executor) {
- this._cancelHandlers = [];
- this._isPending = true;
- this._isCanceled = false;
- this._rejectOnCancel = true;
-
- this._promise = new Promise((resolve, reject) => {
- this._reject = reject;
-
- const onResolve = value => {
- if (!this._isCanceled || !onCancel.shouldReject) {
- this._isPending = false;
- resolve(value);
- }
- };
-
- const onReject = error => {
- this._isPending = false;
- reject(error);
- };
-
- const onCancel = handler => {
- if (!this._isPending) {
- throw new Error('The `onCancel` handler was attached after the promise settled.');
- }
-
- this._cancelHandlers.push(handler);
- };
-
- Object.defineProperties(onCancel, {
- shouldReject: {
- get: () => this._rejectOnCancel,
- set: boolean => {
- this._rejectOnCancel = boolean;
- }
- }
- });
-
- return executor(onResolve, onReject, onCancel);
- });
- }
-
- then(onFulfilled, onRejected) {
- // eslint-disable-next-line promise/prefer-await-to-then
- return this._promise.then(onFulfilled, onRejected);
- }
-
- catch(onRejected) {
- return this._promise.catch(onRejected);
- }
-
- finally(onFinally) {
- return this._promise.finally(onFinally);
- }
-
- cancel(reason) {
- if (!this._isPending || this._isCanceled) {
- return;
- }
-
- this._isCanceled = true;
-
- if (this._cancelHandlers.length > 0) {
- try {
- for (const handler of this._cancelHandlers) {
- handler();
- }
- } catch (error) {
- this._reject(error);
- return;
- }
- }
-
- if (this._rejectOnCancel) {
- this._reject(new CancelError(reason));
- }
- }
-
- get isCanceled() {
- return this._isCanceled;
- }
-}
-
-Object.setPrototypeOf(PCancelable.prototype, Promise.prototype);
-
-module.exports = PCancelable;
-module.exports.CancelError = CancelError;
-
-
-/***/ }),
-
-/***/ 8569:
+/***/ 2864:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-module.exports = __nccwpck_require__(3322);
+module.exports = __nccwpck_require__(555);
/***/ }),
-/***/ 6099:
+/***/ 6476:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
@@ -25189,14 +24279,14 @@ module.exports = {
/***/ }),
-/***/ 2139:
+/***/ 5961:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-const constants = __nccwpck_require__(6099);
-const utils = __nccwpck_require__(479);
+const constants = __nccwpck_require__(6476);
+const utils = __nccwpck_require__(7426);
/**
* Constants
@@ -26288,17 +25378,17 @@ module.exports = parse;
/***/ }),
-/***/ 3322:
+/***/ 555:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
const path = __nccwpck_require__(1017);
-const scan = __nccwpck_require__(2429);
-const parse = __nccwpck_require__(2139);
-const utils = __nccwpck_require__(479);
-const constants = __nccwpck_require__(6099);
+const scan = __nccwpck_require__(7751);
+const parse = __nccwpck_require__(5961);
+const utils = __nccwpck_require__(7426);
+const constants = __nccwpck_require__(6476);
const isObject = val => val && typeof val === 'object' && !Array.isArray(val);
/**
@@ -26636,6 +25726,2577 @@ picomatch.constants = constants;
module.exports = picomatch;
+/***/ }),
+
+/***/ 7751:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const utils = __nccwpck_require__(7426);
+const {
+ CHAR_ASTERISK, /* * */
+ CHAR_AT, /* @ */
+ CHAR_BACKWARD_SLASH, /* \ */
+ CHAR_COMMA, /* , */
+ CHAR_DOT, /* . */
+ CHAR_EXCLAMATION_MARK, /* ! */
+ CHAR_FORWARD_SLASH, /* / */
+ CHAR_LEFT_CURLY_BRACE, /* { */
+ CHAR_LEFT_PARENTHESES, /* ( */
+ CHAR_LEFT_SQUARE_BRACKET, /* [ */
+ CHAR_PLUS, /* + */
+ CHAR_QUESTION_MARK, /* ? */
+ CHAR_RIGHT_CURLY_BRACE, /* } */
+ CHAR_RIGHT_PARENTHESES, /* ) */
+ CHAR_RIGHT_SQUARE_BRACKET /* ] */
+} = __nccwpck_require__(6476);
+
+const isPathSeparator = code => {
+ return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH;
+};
+
+const depth = token => {
+ if (token.isPrefix !== true) {
+ token.depth = token.isGlobstar ? Infinity : 1;
+ }
+};
+
+/**
+ * Quickly scans a glob pattern and returns an object with a handful of
+ * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists),
+ * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not
+ * with `!(`) and `negatedExtglob` (true if the path starts with `!(`).
+ *
+ * ```js
+ * const pm = require('picomatch');
+ * console.log(pm.scan('foo/bar/*.js'));
+ * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' }
+ * ```
+ * @param {String} `str`
+ * @param {Object} `options`
+ * @return {Object} Returns an object with tokens and regex source string.
+ * @api public
+ */
+
+const scan = (input, options) => {
+ const opts = options || {};
+
+ const length = input.length - 1;
+ const scanToEnd = opts.parts === true || opts.scanToEnd === true;
+ const slashes = [];
+ const tokens = [];
+ const parts = [];
+
+ let str = input;
+ let index = -1;
+ let start = 0;
+ let lastIndex = 0;
+ let isBrace = false;
+ let isBracket = false;
+ let isGlob = false;
+ let isExtglob = false;
+ let isGlobstar = false;
+ let braceEscaped = false;
+ let backslashes = false;
+ let negated = false;
+ let negatedExtglob = false;
+ let finished = false;
+ let braces = 0;
+ let prev;
+ let code;
+ let token = { value: '', depth: 0, isGlob: false };
+
+ const eos = () => index >= length;
+ const peek = () => str.charCodeAt(index + 1);
+ const advance = () => {
+ prev = code;
+ return str.charCodeAt(++index);
+ };
+
+ while (index < length) {
+ code = advance();
+ let next;
+
+ if (code === CHAR_BACKWARD_SLASH) {
+ backslashes = token.backslashes = true;
+ code = advance();
+
+ if (code === CHAR_LEFT_CURLY_BRACE) {
+ braceEscaped = true;
+ }
+ continue;
+ }
+
+ if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) {
+ braces++;
+
+ while (eos() !== true && (code = advance())) {
+ if (code === CHAR_BACKWARD_SLASH) {
+ backslashes = token.backslashes = true;
+ advance();
+ continue;
+ }
+
+ if (code === CHAR_LEFT_CURLY_BRACE) {
+ braces++;
+ continue;
+ }
+
+ if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) {
+ isBrace = token.isBrace = true;
+ isGlob = token.isGlob = true;
+ finished = true;
+
+ if (scanToEnd === true) {
+ continue;
+ }
+
+ break;
+ }
+
+ if (braceEscaped !== true && code === CHAR_COMMA) {
+ isBrace = token.isBrace = true;
+ isGlob = token.isGlob = true;
+ finished = true;
+
+ if (scanToEnd === true) {
+ continue;
+ }
+
+ break;
+ }
+
+ if (code === CHAR_RIGHT_CURLY_BRACE) {
+ braces--;
+
+ if (braces === 0) {
+ braceEscaped = false;
+ isBrace = token.isBrace = true;
+ finished = true;
+ break;
+ }
+ }
+ }
+
+ if (scanToEnd === true) {
+ continue;
+ }
+
+ break;
+ }
+
+ if (code === CHAR_FORWARD_SLASH) {
+ slashes.push(index);
+ tokens.push(token);
+ token = { value: '', depth: 0, isGlob: false };
+
+ if (finished === true) continue;
+ if (prev === CHAR_DOT && index === (start + 1)) {
+ start += 2;
+ continue;
+ }
+
+ lastIndex = index + 1;
+ continue;
+ }
+
+ if (opts.noext !== true) {
+ const isExtglobChar = code === CHAR_PLUS
+ || code === CHAR_AT
+ || code === CHAR_ASTERISK
+ || code === CHAR_QUESTION_MARK
+ || code === CHAR_EXCLAMATION_MARK;
+
+ if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) {
+ isGlob = token.isGlob = true;
+ isExtglob = token.isExtglob = true;
+ finished = true;
+ if (code === CHAR_EXCLAMATION_MARK && index === start) {
+ negatedExtglob = true;
+ }
+
+ if (scanToEnd === true) {
+ while (eos() !== true && (code = advance())) {
+ if (code === CHAR_BACKWARD_SLASH) {
+ backslashes = token.backslashes = true;
+ code = advance();
+ continue;
+ }
+
+ if (code === CHAR_RIGHT_PARENTHESES) {
+ isGlob = token.isGlob = true;
+ finished = true;
+ break;
+ }
+ }
+ continue;
+ }
+ break;
+ }
+ }
+
+ if (code === CHAR_ASTERISK) {
+ if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true;
+ isGlob = token.isGlob = true;
+ finished = true;
+
+ if (scanToEnd === true) {
+ continue;
+ }
+ break;
+ }
+
+ if (code === CHAR_QUESTION_MARK) {
+ isGlob = token.isGlob = true;
+ finished = true;
+
+ if (scanToEnd === true) {
+ continue;
+ }
+ break;
+ }
+
+ if (code === CHAR_LEFT_SQUARE_BRACKET) {
+ while (eos() !== true && (next = advance())) {
+ if (next === CHAR_BACKWARD_SLASH) {
+ backslashes = token.backslashes = true;
+ advance();
+ continue;
+ }
+
+ if (next === CHAR_RIGHT_SQUARE_BRACKET) {
+ isBracket = token.isBracket = true;
+ isGlob = token.isGlob = true;
+ finished = true;
+ break;
+ }
+ }
+
+ if (scanToEnd === true) {
+ continue;
+ }
+
+ break;
+ }
+
+ if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) {
+ negated = token.negated = true;
+ start++;
+ continue;
+ }
+
+ if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) {
+ isGlob = token.isGlob = true;
+
+ if (scanToEnd === true) {
+ while (eos() !== true && (code = advance())) {
+ if (code === CHAR_LEFT_PARENTHESES) {
+ backslashes = token.backslashes = true;
+ code = advance();
+ continue;
+ }
+
+ if (code === CHAR_RIGHT_PARENTHESES) {
+ finished = true;
+ break;
+ }
+ }
+ continue;
+ }
+ break;
+ }
+
+ if (isGlob === true) {
+ finished = true;
+
+ if (scanToEnd === true) {
+ continue;
+ }
+
+ break;
+ }
+ }
+
+ if (opts.noext === true) {
+ isExtglob = false;
+ isGlob = false;
+ }
+
+ let base = str;
+ let prefix = '';
+ let glob = '';
+
+ if (start > 0) {
+ prefix = str.slice(0, start);
+ str = str.slice(start);
+ lastIndex -= start;
+ }
+
+ if (base && isGlob === true && lastIndex > 0) {
+ base = str.slice(0, lastIndex);
+ glob = str.slice(lastIndex);
+ } else if (isGlob === true) {
+ base = '';
+ glob = str;
+ } else {
+ base = str;
+ }
+
+ if (base && base !== '' && base !== '/' && base !== str) {
+ if (isPathSeparator(base.charCodeAt(base.length - 1))) {
+ base = base.slice(0, -1);
+ }
+ }
+
+ if (opts.unescape === true) {
+ if (glob) glob = utils.removeBackslashes(glob);
+
+ if (base && backslashes === true) {
+ base = utils.removeBackslashes(base);
+ }
+ }
+
+ const state = {
+ prefix,
+ input,
+ start,
+ base,
+ glob,
+ isBrace,
+ isBracket,
+ isGlob,
+ isExtglob,
+ isGlobstar,
+ negated,
+ negatedExtglob
+ };
+
+ if (opts.tokens === true) {
+ state.maxDepth = 0;
+ if (!isPathSeparator(code)) {
+ tokens.push(token);
+ }
+ state.tokens = tokens;
+ }
+
+ if (opts.parts === true || opts.tokens === true) {
+ let prevIndex;
+
+ for (let idx = 0; idx < slashes.length; idx++) {
+ const n = prevIndex ? prevIndex + 1 : start;
+ const i = slashes[idx];
+ const value = input.slice(n, i);
+ if (opts.tokens) {
+ if (idx === 0 && start !== 0) {
+ tokens[idx].isPrefix = true;
+ tokens[idx].value = prefix;
+ } else {
+ tokens[idx].value = value;
+ }
+ depth(tokens[idx]);
+ state.maxDepth += tokens[idx].depth;
+ }
+ if (idx !== 0 || value !== '') {
+ parts.push(value);
+ }
+ prevIndex = i;
+ }
+
+ if (prevIndex && prevIndex + 1 < input.length) {
+ const value = input.slice(prevIndex + 1);
+ parts.push(value);
+
+ if (opts.tokens) {
+ tokens[tokens.length - 1].value = value;
+ depth(tokens[tokens.length - 1]);
+ state.maxDepth += tokens[tokens.length - 1].depth;
+ }
+ }
+
+ state.slashes = slashes;
+ state.parts = parts;
+ }
+
+ return state;
+};
+
+module.exports = scan;
+
+
+/***/ }),
+
+/***/ 7426:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const path = __nccwpck_require__(1017);
+const win32 = process.platform === 'win32';
+const {
+ REGEX_BACKSLASH,
+ REGEX_REMOVE_BACKSLASH,
+ REGEX_SPECIAL_CHARS,
+ REGEX_SPECIAL_CHARS_GLOBAL
+} = __nccwpck_require__(6476);
+
+exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);
+exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str);
+exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str);
+exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1');
+exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/');
+
+exports.removeBackslashes = str => {
+ return str.replace(REGEX_REMOVE_BACKSLASH, match => {
+ return match === '\\' ? '' : match;
+ });
+};
+
+exports.supportsLookbehinds = () => {
+ const segs = process.version.slice(1).split('.').map(Number);
+ if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) {
+ return true;
+ }
+ return false;
+};
+
+exports.isWindows = options => {
+ if (options && typeof options.windows === 'boolean') {
+ return options.windows;
+ }
+ return win32 === true || path.sep === '\\';
+};
+
+exports.escapeLast = (input, char, lastIdx) => {
+ const idx = input.lastIndexOf(char, lastIdx);
+ if (idx === -1) return input;
+ if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1);
+ return `${input.slice(0, idx)}\\${input.slice(idx)}`;
+};
+
+exports.removePrefix = (input, state = {}) => {
+ let output = input;
+ if (output.startsWith('./')) {
+ output = output.slice(2);
+ state.prefix = './';
+ }
+ return output;
+};
+
+exports.wrapOutput = (input, state = {}, options = {}) => {
+ const prepend = options.contains ? '' : '^';
+ const append = options.contains ? '' : '$';
+
+ let output = `${prepend}(?:${input})${append}`;
+ if (state.negated === true) {
+ output = `(?:^(?!${output}).*$)`;
+ }
+ return output;
+};
+
+
+/***/ }),
+
+/***/ 2610:
+/***/ ((module) => {
+
+"use strict";
+
+
+// We define these manually to ensure they're always copied
+// even if they would move up the prototype chain
+// https://nodejs.org/api/http.html#http_class_http_incomingmessage
+const knownProps = [
+ 'destroy',
+ 'setTimeout',
+ 'socket',
+ 'headers',
+ 'trailers',
+ 'rawHeaders',
+ 'statusCode',
+ 'httpVersion',
+ 'httpVersionMinor',
+ 'httpVersionMajor',
+ 'rawTrailers',
+ 'statusMessage'
+];
+
+module.exports = (fromStream, toStream) => {
+ const fromProps = new Set(Object.keys(fromStream).concat(knownProps));
+
+ for (const prop of fromProps) {
+ // Don't overwrite existing properties
+ if (prop in toStream) {
+ continue;
+ }
+
+ toStream[prop] = typeof fromStream[prop] === 'function' ? fromStream[prop].bind(fromStream) : fromStream[prop];
+ }
+};
+
+
+/***/ }),
+
+/***/ 7952:
+/***/ ((module) => {
+
+"use strict";
+
+
+// https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
+const DATA_URL_DEFAULT_MIME_TYPE = 'text/plain';
+const DATA_URL_DEFAULT_CHARSET = 'us-ascii';
+
+const testParameter = (name, filters) => {
+ return filters.some(filter => filter instanceof RegExp ? filter.test(name) : filter === name);
+};
+
+const normalizeDataURL = (urlString, {stripHash}) => {
+ const match = /^data:(?[^,]*?),(?[^#]*?)(?:#(?.*))?$/.exec(urlString);
+
+ if (!match) {
+ throw new Error(`Invalid URL: ${urlString}`);
+ }
+
+ let {type, data, hash} = match.groups;
+ const mediaType = type.split(';');
+ hash = stripHash ? '' : hash;
+
+ let isBase64 = false;
+ if (mediaType[mediaType.length - 1] === 'base64') {
+ mediaType.pop();
+ isBase64 = true;
+ }
+
+ // Lowercase MIME type
+ const mimeType = (mediaType.shift() || '').toLowerCase();
+ const attributes = mediaType
+ .map(attribute => {
+ let [key, value = ''] = attribute.split('=').map(string => string.trim());
+
+ // Lowercase `charset`
+ if (key === 'charset') {
+ value = value.toLowerCase();
+
+ if (value === DATA_URL_DEFAULT_CHARSET) {
+ return '';
+ }
+ }
+
+ return `${key}${value ? `=${value}` : ''}`;
+ })
+ .filter(Boolean);
+
+ const normalizedMediaType = [
+ ...attributes
+ ];
+
+ if (isBase64) {
+ normalizedMediaType.push('base64');
+ }
+
+ if (normalizedMediaType.length !== 0 || (mimeType && mimeType !== DATA_URL_DEFAULT_MIME_TYPE)) {
+ normalizedMediaType.unshift(mimeType);
+ }
+
+ return `data:${normalizedMediaType.join(';')},${isBase64 ? data.trim() : data}${hash ? `#${hash}` : ''}`;
+};
+
+const normalizeUrl = (urlString, options) => {
+ options = {
+ defaultProtocol: 'http:',
+ normalizeProtocol: true,
+ forceHttp: false,
+ forceHttps: false,
+ stripAuthentication: true,
+ stripHash: false,
+ stripTextFragment: true,
+ stripWWW: true,
+ removeQueryParameters: [/^utm_\w+/i],
+ removeTrailingSlash: true,
+ removeSingleSlash: true,
+ removeDirectoryIndex: false,
+ sortQueryParameters: true,
+ ...options
+ };
+
+ urlString = urlString.trim();
+
+ // Data URL
+ if (/^data:/i.test(urlString)) {
+ return normalizeDataURL(urlString, options);
+ }
+
+ if (/^view-source:/i.test(urlString)) {
+ throw new Error('`view-source:` is not supported as it is a non-standard protocol');
+ }
+
+ const hasRelativeProtocol = urlString.startsWith('//');
+ const isRelativeUrl = !hasRelativeProtocol && /^\.*\//.test(urlString);
+
+ // Prepend protocol
+ if (!isRelativeUrl) {
+ urlString = urlString.replace(/^(?!(?:\w+:)?\/\/)|^\/\//, options.defaultProtocol);
+ }
+
+ const urlObj = new URL(urlString);
+
+ if (options.forceHttp && options.forceHttps) {
+ throw new Error('The `forceHttp` and `forceHttps` options cannot be used together');
+ }
+
+ if (options.forceHttp && urlObj.protocol === 'https:') {
+ urlObj.protocol = 'http:';
+ }
+
+ if (options.forceHttps && urlObj.protocol === 'http:') {
+ urlObj.protocol = 'https:';
+ }
+
+ // Remove auth
+ if (options.stripAuthentication) {
+ urlObj.username = '';
+ urlObj.password = '';
+ }
+
+ // Remove hash
+ if (options.stripHash) {
+ urlObj.hash = '';
+ } else if (options.stripTextFragment) {
+ urlObj.hash = urlObj.hash.replace(/#?:~:text.*?$/i, '');
+ }
+
+ // Remove duplicate slashes if not preceded by a protocol
+ if (urlObj.pathname) {
+ urlObj.pathname = urlObj.pathname.replace(/(? 0) {
+ let pathComponents = urlObj.pathname.split('/');
+ const lastComponent = pathComponents[pathComponents.length - 1];
+
+ if (testParameter(lastComponent, options.removeDirectoryIndex)) {
+ pathComponents = pathComponents.slice(0, pathComponents.length - 1);
+ urlObj.pathname = pathComponents.slice(1).join('/') + '/';
+ }
+ }
+
+ if (urlObj.hostname) {
+ // Remove trailing dot
+ urlObj.hostname = urlObj.hostname.replace(/\.$/, '');
+
+ // Remove `www.`
+ if (options.stripWWW && /^www\.(?!www\.)(?:[a-z\-\d]{1,63})\.(?:[a-z.\-\d]{2,63})$/.test(urlObj.hostname)) {
+ // Each label should be max 63 at length (min: 1).
+ // Source: https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names
+ // Each TLD should be up to 63 characters long (min: 2).
+ // It is technically possible to have a single character TLD, but none currently exist.
+ urlObj.hostname = urlObj.hostname.replace(/^www\./, '');
+ }
+ }
+
+ // Remove query unwanted parameters
+ if (Array.isArray(options.removeQueryParameters)) {
+ for (const key of [...urlObj.searchParams.keys()]) {
+ if (testParameter(key, options.removeQueryParameters)) {
+ urlObj.searchParams.delete(key);
+ }
+ }
+ }
+
+ if (options.removeQueryParameters === true) {
+ urlObj.search = '';
+ }
+
+ // Sort query parameters
+ if (options.sortQueryParameters) {
+ urlObj.searchParams.sort();
+ }
+
+ if (options.removeTrailingSlash) {
+ urlObj.pathname = urlObj.pathname.replace(/\/$/, '');
+ }
+
+ const oldUrlString = urlString;
+
+ // Take advantage of many of the Node `url` normalizations
+ urlString = urlObj.toString();
+
+ if (!options.removeSingleSlash && urlObj.pathname === '/' && !oldUrlString.endsWith('/') && urlObj.hash === '') {
+ urlString = urlString.replace(/\/$/, '');
+ }
+
+ // Remove ending `/` unless removeSingleSlash is false
+ if ((options.removeTrailingSlash || urlObj.pathname === '/') && urlObj.hash === '' && options.removeSingleSlash) {
+ urlString = urlString.replace(/\/$/, '');
+ }
+
+ // Restore relative protocol, if applicable
+ if (hasRelativeProtocol && !options.normalizeProtocol) {
+ urlString = urlString.replace(/^http:\/\//, '//');
+ }
+
+ // Remove http/https
+ if (options.stripProtocol) {
+ urlString = urlString.replace(/^(?:https?:)?\/\//, '');
+ }
+
+ return urlString;
+};
+
+module.exports = normalizeUrl;
+
+
+/***/ }),
+
+/***/ 1223:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var wrappy = __nccwpck_require__(2940)
+module.exports = wrappy(once)
+module.exports.strict = wrappy(onceStrict)
+
+once.proto = once(function () {
+ Object.defineProperty(Function.prototype, 'once', {
+ value: function () {
+ return once(this)
+ },
+ configurable: true
+ })
+
+ Object.defineProperty(Function.prototype, 'onceStrict', {
+ value: function () {
+ return onceStrict(this)
+ },
+ configurable: true
+ })
+})
+
+function once (fn) {
+ var f = function () {
+ if (f.called) return f.value
+ f.called = true
+ return f.value = fn.apply(this, arguments)
+ }
+ f.called = false
+ return f
+}
+
+function onceStrict (fn) {
+ var f = function () {
+ if (f.called)
+ throw new Error(f.onceError)
+ f.called = true
+ return f.value = fn.apply(this, arguments)
+ }
+ var name = fn.name || 'Function wrapped with `once`'
+ f.onceError = name + " shouldn't be called more than once"
+ f.called = false
+ return f
+}
+
+
+/***/ }),
+
+/***/ 9072:
+/***/ ((module) => {
+
+"use strict";
+
+
+class CancelError extends Error {
+ constructor(reason) {
+ super(reason || 'Promise was canceled');
+ this.name = 'CancelError';
+ }
+
+ get isCanceled() {
+ return true;
+ }
+}
+
+class PCancelable {
+ static fn(userFn) {
+ return (...arguments_) => {
+ return new PCancelable((resolve, reject, onCancel) => {
+ arguments_.push(onCancel);
+ // eslint-disable-next-line promise/prefer-await-to-then
+ userFn(...arguments_).then(resolve, reject);
+ });
+ };
+ }
+
+ constructor(executor) {
+ this._cancelHandlers = [];
+ this._isPending = true;
+ this._isCanceled = false;
+ this._rejectOnCancel = true;
+
+ this._promise = new Promise((resolve, reject) => {
+ this._reject = reject;
+
+ const onResolve = value => {
+ if (!this._isCanceled || !onCancel.shouldReject) {
+ this._isPending = false;
+ resolve(value);
+ }
+ };
+
+ const onReject = error => {
+ this._isPending = false;
+ reject(error);
+ };
+
+ const onCancel = handler => {
+ if (!this._isPending) {
+ throw new Error('The `onCancel` handler was attached after the promise settled.');
+ }
+
+ this._cancelHandlers.push(handler);
+ };
+
+ Object.defineProperties(onCancel, {
+ shouldReject: {
+ get: () => this._rejectOnCancel,
+ set: boolean => {
+ this._rejectOnCancel = boolean;
+ }
+ }
+ });
+
+ return executor(onResolve, onReject, onCancel);
+ });
+ }
+
+ then(onFulfilled, onRejected) {
+ // eslint-disable-next-line promise/prefer-await-to-then
+ return this._promise.then(onFulfilled, onRejected);
+ }
+
+ catch(onRejected) {
+ return this._promise.catch(onRejected);
+ }
+
+ finally(onFinally) {
+ return this._promise.finally(onFinally);
+ }
+
+ cancel(reason) {
+ if (!this._isPending || this._isCanceled) {
+ return;
+ }
+
+ this._isCanceled = true;
+
+ if (this._cancelHandlers.length > 0) {
+ try {
+ for (const handler of this._cancelHandlers) {
+ handler();
+ }
+ } catch (error) {
+ this._reject(error);
+ return;
+ }
+ }
+
+ if (this._rejectOnCancel) {
+ this._reject(new CancelError(reason));
+ }
+ }
+
+ get isCanceled() {
+ return this._isCanceled;
+ }
+}
+
+Object.setPrototypeOf(PCancelable.prototype, Promise.prototype);
+
+module.exports = PCancelable;
+module.exports.CancelError = CancelError;
+
+
+/***/ }),
+
+/***/ 8569:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const os = __nccwpck_require__(2037);
+const pico = __nccwpck_require__(3322);
+
+const isWindows = os.platform() === 'win32';
+
+function picomatch(glob, options, returnState = false) {
+ // default to os.platform()
+ if (options && (options.windows === null || options.windows === undefined)) {
+ // don't mutate the original options object
+ options = { ...options, windows: isWindows };
+ }
+ return pico(glob, options, returnState);
+}
+
+module.exports = picomatch;
+// public api
+module.exports.test = pico.test;
+module.exports.matchBase = pico.matchBase;
+module.exports.isMatch = pico.isMatch;
+module.exports.parse = pico.parse;
+module.exports.scan = pico.scan;
+module.exports.compileRe = pico.compileRe;
+module.exports.toRegex = pico.toRegex;
+// for tests
+module.exports.makeRe = pico.makeRe;
+
+
+/***/ }),
+
+/***/ 6099:
+/***/ ((module) => {
+
+"use strict";
+
+
+const WIN_SLASH = '\\\\/';
+const WIN_NO_SLASH = `[^${WIN_SLASH}]`;
+
+/**
+ * Posix glob regex
+ */
+
+const DOT_LITERAL = '\\.';
+const PLUS_LITERAL = '\\+';
+const QMARK_LITERAL = '\\?';
+const SLASH_LITERAL = '\\/';
+const ONE_CHAR = '(?=.)';
+const QMARK = '[^/]';
+const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`;
+const START_ANCHOR = `(?:^|${SLASH_LITERAL})`;
+const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`;
+const NO_DOT = `(?!${DOT_LITERAL})`;
+const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`;
+const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`;
+const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`;
+const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`;
+const STAR = `${QMARK}*?`;
+const SEP = '/';
+
+const POSIX_CHARS = {
+ DOT_LITERAL,
+ PLUS_LITERAL,
+ QMARK_LITERAL,
+ SLASH_LITERAL,
+ ONE_CHAR,
+ QMARK,
+ END_ANCHOR,
+ DOTS_SLASH,
+ NO_DOT,
+ NO_DOTS,
+ NO_DOT_SLASH,
+ NO_DOTS_SLASH,
+ QMARK_NO_DOT,
+ STAR,
+ START_ANCHOR,
+ SEP
+};
+
+/**
+ * Windows glob regex
+ */
+
+const WINDOWS_CHARS = {
+ ...POSIX_CHARS,
+
+ SLASH_LITERAL: `[${WIN_SLASH}]`,
+ QMARK: WIN_NO_SLASH,
+ STAR: `${WIN_NO_SLASH}*?`,
+ DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`,
+ NO_DOT: `(?!${DOT_LITERAL})`,
+ NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
+ NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`,
+ NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
+ QMARK_NO_DOT: `[^.${WIN_SLASH}]`,
+ START_ANCHOR: `(?:^|[${WIN_SLASH}])`,
+ END_ANCHOR: `(?:[${WIN_SLASH}]|$)`,
+ SEP: '\\'
+};
+
+/**
+ * POSIX Bracket Regex
+ */
+
+const POSIX_REGEX_SOURCE = {
+ alnum: 'a-zA-Z0-9',
+ alpha: 'a-zA-Z',
+ ascii: '\\x00-\\x7F',
+ blank: ' \\t',
+ cntrl: '\\x00-\\x1F\\x7F',
+ digit: '0-9',
+ graph: '\\x21-\\x7E',
+ lower: 'a-z',
+ print: '\\x20-\\x7E ',
+ punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~',
+ space: ' \\t\\r\\n\\v\\f',
+ upper: 'A-Z',
+ word: 'A-Za-z0-9_',
+ xdigit: 'A-Fa-f0-9'
+};
+
+module.exports = {
+ MAX_LENGTH: 1024 * 64,
+ POSIX_REGEX_SOURCE,
+
+ // regular expressions
+ REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g,
+ REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/,
+ REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/,
+ REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g,
+ REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g,
+ REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g,
+
+ // Replace globs with equivalent patterns to reduce parsing time.
+ REPLACEMENTS: {
+ '***': '*',
+ '**/**': '**',
+ '**/**/**': '**'
+ },
+
+ // Digits
+ CHAR_0: 48, /* 0 */
+ CHAR_9: 57, /* 9 */
+
+ // Alphabet chars.
+ CHAR_UPPERCASE_A: 65, /* A */
+ CHAR_LOWERCASE_A: 97, /* a */
+ CHAR_UPPERCASE_Z: 90, /* Z */
+ CHAR_LOWERCASE_Z: 122, /* z */
+
+ CHAR_LEFT_PARENTHESES: 40, /* ( */
+ CHAR_RIGHT_PARENTHESES: 41, /* ) */
+
+ CHAR_ASTERISK: 42, /* * */
+
+ // Non-alphabetic chars.
+ CHAR_AMPERSAND: 38, /* & */
+ CHAR_AT: 64, /* @ */
+ CHAR_BACKWARD_SLASH: 92, /* \ */
+ CHAR_CARRIAGE_RETURN: 13, /* \r */
+ CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */
+ CHAR_COLON: 58, /* : */
+ CHAR_COMMA: 44, /* , */
+ CHAR_DOT: 46, /* . */
+ CHAR_DOUBLE_QUOTE: 34, /* " */
+ CHAR_EQUAL: 61, /* = */
+ CHAR_EXCLAMATION_MARK: 33, /* ! */
+ CHAR_FORM_FEED: 12, /* \f */
+ CHAR_FORWARD_SLASH: 47, /* / */
+ CHAR_GRAVE_ACCENT: 96, /* ` */
+ CHAR_HASH: 35, /* # */
+ CHAR_HYPHEN_MINUS: 45, /* - */
+ CHAR_LEFT_ANGLE_BRACKET: 60, /* < */
+ CHAR_LEFT_CURLY_BRACE: 123, /* { */
+ CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */
+ CHAR_LINE_FEED: 10, /* \n */
+ CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */
+ CHAR_PERCENT: 37, /* % */
+ CHAR_PLUS: 43, /* + */
+ CHAR_QUESTION_MARK: 63, /* ? */
+ CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */
+ CHAR_RIGHT_CURLY_BRACE: 125, /* } */
+ CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */
+ CHAR_SEMICOLON: 59, /* ; */
+ CHAR_SINGLE_QUOTE: 39, /* ' */
+ CHAR_SPACE: 32, /* */
+ CHAR_TAB: 9, /* \t */
+ CHAR_UNDERSCORE: 95, /* _ */
+ CHAR_VERTICAL_LINE: 124, /* | */
+ CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */
+
+ /**
+ * Create EXTGLOB_CHARS
+ */
+
+ extglobChars(chars) {
+ return {
+ '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` },
+ '?': { type: 'qmark', open: '(?:', close: ')?' },
+ '+': { type: 'plus', open: '(?:', close: ')+' },
+ '*': { type: 'star', open: '(?:', close: ')*' },
+ '@': { type: 'at', open: '(?:', close: ')' }
+ };
+ },
+
+ /**
+ * Create GLOB_CHARS
+ */
+
+ globChars(win32) {
+ return win32 === true ? WINDOWS_CHARS : POSIX_CHARS;
+ }
+};
+
+
+/***/ }),
+
+/***/ 2139:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const constants = __nccwpck_require__(6099);
+const utils = __nccwpck_require__(479);
+
+/**
+ * Constants
+ */
+
+const {
+ MAX_LENGTH,
+ POSIX_REGEX_SOURCE,
+ REGEX_NON_SPECIAL_CHARS,
+ REGEX_SPECIAL_CHARS_BACKREF,
+ REPLACEMENTS
+} = constants;
+
+/**
+ * Helpers
+ */
+
+const expandRange = (args, options) => {
+ if (typeof options.expandRange === 'function') {
+ return options.expandRange(...args, options);
+ }
+
+ args.sort();
+ const value = `[${args.join('-')}]`;
+
+ try {
+ /* eslint-disable-next-line no-new */
+ new RegExp(value);
+ } catch (ex) {
+ return args.map(v => utils.escapeRegex(v)).join('..');
+ }
+
+ return value;
+};
+
+/**
+ * Create the message for a syntax error
+ */
+
+const syntaxError = (type, char) => {
+ return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`;
+};
+
+/**
+ * Parse the given input string.
+ * @param {String} input
+ * @param {Object} options
+ * @return {Object}
+ */
+
+const parse = (input, options) => {
+ if (typeof input !== 'string') {
+ throw new TypeError('Expected a string');
+ }
+
+ input = REPLACEMENTS[input] || input;
+
+ const opts = { ...options };
+ const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
+
+ let len = input.length;
+ if (len > max) {
+ throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);
+ }
+
+ const bos = { type: 'bos', value: '', output: opts.prepend || '' };
+ const tokens = [bos];
+
+ const capture = opts.capture ? '' : '?:';
+
+ // create constants based on platform, for windows or posix
+ const PLATFORM_CHARS = constants.globChars(opts.windows);
+ const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS);
+
+ const {
+ DOT_LITERAL,
+ PLUS_LITERAL,
+ SLASH_LITERAL,
+ ONE_CHAR,
+ DOTS_SLASH,
+ NO_DOT,
+ NO_DOT_SLASH,
+ NO_DOTS_SLASH,
+ QMARK,
+ QMARK_NO_DOT,
+ STAR,
+ START_ANCHOR
+ } = PLATFORM_CHARS;
+
+ const globstar = opts => {
+ return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;
+ };
+
+ const nodot = opts.dot ? '' : NO_DOT;
+ const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT;
+ let star = opts.bash === true ? globstar(opts) : STAR;
+
+ if (opts.capture) {
+ star = `(${star})`;
+ }
+
+ // minimatch options support
+ if (typeof opts.noext === 'boolean') {
+ opts.noextglob = opts.noext;
+ }
+
+ const state = {
+ input,
+ index: -1,
+ start: 0,
+ dot: opts.dot === true,
+ consumed: '',
+ output: '',
+ prefix: '',
+ backtrack: false,
+ negated: false,
+ brackets: 0,
+ braces: 0,
+ parens: 0,
+ quotes: 0,
+ globstar: false,
+ tokens
+ };
+
+ input = utils.removePrefix(input, state);
+ len = input.length;
+
+ const extglobs = [];
+ const braces = [];
+ const stack = [];
+ let prev = bos;
+ let value;
+
+ /**
+ * Tokenizing helpers
+ */
+
+ const eos = () => state.index === len - 1;
+ const peek = state.peek = (n = 1) => input[state.index + n];
+ const advance = state.advance = () => input[++state.index] || '';
+ const remaining = () => input.slice(state.index + 1);
+ const consume = (value = '', num = 0) => {
+ state.consumed += value;
+ state.index += num;
+ };
+
+ const append = token => {
+ state.output += token.output != null ? token.output : token.value;
+ consume(token.value);
+ };
+
+ const negate = () => {
+ let count = 1;
+
+ while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) {
+ advance();
+ state.start++;
+ count++;
+ }
+
+ if (count % 2 === 0) {
+ return false;
+ }
+
+ state.negated = true;
+ state.start++;
+ return true;
+ };
+
+ const increment = type => {
+ state[type]++;
+ stack.push(type);
+ };
+
+ const decrement = type => {
+ state[type]--;
+ stack.pop();
+ };
+
+ /**
+ * Push tokens onto the tokens array. This helper speeds up
+ * tokenizing by 1) helping us avoid backtracking as much as possible,
+ * and 2) helping us avoid creating extra tokens when consecutive
+ * characters are plain text. This improves performance and simplifies
+ * lookbehinds.
+ */
+
+ const push = tok => {
+ if (prev.type === 'globstar') {
+ const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace');
+ const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren'));
+
+ if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) {
+ state.output = state.output.slice(0, -prev.output.length);
+ prev.type = 'star';
+ prev.value = '*';
+ prev.output = star;
+ state.output += prev.output;
+ }
+ }
+
+ if (extglobs.length && tok.type !== 'paren') {
+ extglobs[extglobs.length - 1].inner += tok.value;
+ }
+
+ if (tok.value || tok.output) append(tok);
+ if (prev && prev.type === 'text' && tok.type === 'text') {
+ prev.value += tok.value;
+ prev.output = (prev.output || '') + tok.value;
+ return;
+ }
+
+ tok.prev = prev;
+ tokens.push(tok);
+ prev = tok;
+ };
+
+ const extglobOpen = (type, value) => {
+ const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' };
+
+ token.prev = prev;
+ token.parens = state.parens;
+ token.output = state.output;
+ const output = (opts.capture ? '(' : '') + token.open;
+
+ increment('parens');
+ push({ type, value, output: state.output ? '' : ONE_CHAR });
+ push({ type: 'paren', extglob: true, value: advance(), output });
+ extglobs.push(token);
+ };
+
+ const extglobClose = token => {
+ let output = token.close + (opts.capture ? ')' : '');
+ let rest;
+
+ if (token.type === 'negate') {
+ let extglobStar = star;
+
+ if (token.inner && token.inner.length > 1 && token.inner.includes('/')) {
+ extglobStar = globstar(opts);
+ }
+
+ if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) {
+ output = token.close = `)$))${extglobStar}`;
+ }
+
+ if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) {
+ // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis.
+ // In this case, we need to parse the string and use it in the output of the original pattern.
+ // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`.
+ //
+ // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`.
+ const expression = parse(rest, { ...options, fastpaths: false }).output;
+
+ output = token.close = `)${expression})${extglobStar})`;
+ }
+
+ if (token.prev.type === 'bos') {
+ state.negatedExtglob = true;
+ }
+ }
+
+ push({ type: 'paren', extglob: true, value, output });
+ decrement('parens');
+ };
+
+ /**
+ * Fast paths
+ */
+
+ if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) {
+ let backslashes = false;
+
+ let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => {
+ if (first === '\\') {
+ backslashes = true;
+ return m;
+ }
+
+ if (first === '?') {
+ if (esc) {
+ return esc + first + (rest ? QMARK.repeat(rest.length) : '');
+ }
+ if (index === 0) {
+ return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : '');
+ }
+ return QMARK.repeat(chars.length);
+ }
+
+ if (first === '.') {
+ return DOT_LITERAL.repeat(chars.length);
+ }
+
+ if (first === '*') {
+ if (esc) {
+ return esc + first + (rest ? star : '');
+ }
+ return star;
+ }
+ return esc ? m : `\\${m}`;
+ });
+
+ if (backslashes === true) {
+ if (opts.unescape === true) {
+ output = output.replace(/\\/g, '');
+ } else {
+ output = output.replace(/\\+/g, m => {
+ return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : '');
+ });
+ }
+ }
+
+ if (output === input && opts.contains === true) {
+ state.output = input;
+ return state;
+ }
+
+ state.output = utils.wrapOutput(output, state, options);
+ return state;
+ }
+
+ /**
+ * Tokenize input until we reach end-of-string
+ */
+
+ while (!eos()) {
+ value = advance();
+
+ if (value === '\u0000') {
+ continue;
+ }
+
+ /**
+ * Escaped characters
+ */
+
+ if (value === '\\') {
+ const next = peek();
+
+ if (next === '/' && opts.bash !== true) {
+ continue;
+ }
+
+ if (next === '.' || next === ';') {
+ continue;
+ }
+
+ if (!next) {
+ value += '\\';
+ push({ type: 'text', value });
+ continue;
+ }
+
+ // collapse slashes to reduce potential for exploits
+ const match = /^\\+/.exec(remaining());
+ let slashes = 0;
+
+ if (match && match[0].length > 2) {
+ slashes = match[0].length;
+ state.index += slashes;
+ if (slashes % 2 !== 0) {
+ value += '\\';
+ }
+ }
+
+ if (opts.unescape === true) {
+ value = advance();
+ } else {
+ value += advance();
+ }
+
+ if (state.brackets === 0) {
+ push({ type: 'text', value });
+ continue;
+ }
+ }
+
+ /**
+ * If we're inside a regex character class, continue
+ * until we reach the closing bracket.
+ */
+
+ if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) {
+ if (opts.posix !== false && value === ':') {
+ const inner = prev.value.slice(1);
+ if (inner.includes('[')) {
+ prev.posix = true;
+
+ if (inner.includes(':')) {
+ const idx = prev.value.lastIndexOf('[');
+ const pre = prev.value.slice(0, idx);
+ const rest = prev.value.slice(idx + 2);
+ const posix = POSIX_REGEX_SOURCE[rest];
+ if (posix) {
+ prev.value = pre + posix;
+ state.backtrack = true;
+ advance();
+
+ if (!bos.output && tokens.indexOf(prev) === 1) {
+ bos.output = ONE_CHAR;
+ }
+ continue;
+ }
+ }
+ }
+ }
+
+ if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) {
+ value = `\\${value}`;
+ }
+
+ if (value === ']' && (prev.value === '[' || prev.value === '[^')) {
+ value = `\\${value}`;
+ }
+
+ if (opts.posix === true && value === '!' && prev.value === '[') {
+ value = '^';
+ }
+
+ prev.value += value;
+ append({ value });
+ continue;
+ }
+
+ /**
+ * If we're inside a quoted string, continue
+ * until we reach the closing double quote.
+ */
+
+ if (state.quotes === 1 && value !== '"') {
+ value = utils.escapeRegex(value);
+ prev.value += value;
+ append({ value });
+ continue;
+ }
+
+ /**
+ * Double quotes
+ */
+
+ if (value === '"') {
+ state.quotes = state.quotes === 1 ? 0 : 1;
+ if (opts.keepQuotes === true) {
+ push({ type: 'text', value });
+ }
+ continue;
+ }
+
+ /**
+ * Parentheses
+ */
+
+ if (value === '(') {
+ increment('parens');
+ push({ type: 'paren', value });
+ continue;
+ }
+
+ if (value === ')') {
+ if (state.parens === 0 && opts.strictBrackets === true) {
+ throw new SyntaxError(syntaxError('opening', '('));
+ }
+
+ const extglob = extglobs[extglobs.length - 1];
+ if (extglob && state.parens === extglob.parens + 1) {
+ extglobClose(extglobs.pop());
+ continue;
+ }
+
+ push({ type: 'paren', value, output: state.parens ? ')' : '\\)' });
+ decrement('parens');
+ continue;
+ }
+
+ /**
+ * Square brackets
+ */
+
+ if (value === '[') {
+ if (opts.nobracket === true || !remaining().includes(']')) {
+ if (opts.nobracket !== true && opts.strictBrackets === true) {
+ throw new SyntaxError(syntaxError('closing', ']'));
+ }
+
+ value = `\\${value}`;
+ } else {
+ increment('brackets');
+ }
+
+ push({ type: 'bracket', value });
+ continue;
+ }
+
+ if (value === ']') {
+ if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) {
+ push({ type: 'text', value, output: `\\${value}` });
+ continue;
+ }
+
+ if (state.brackets === 0) {
+ if (opts.strictBrackets === true) {
+ throw new SyntaxError(syntaxError('opening', '['));
+ }
+
+ push({ type: 'text', value, output: `\\${value}` });
+ continue;
+ }
+
+ decrement('brackets');
+
+ const prevValue = prev.value.slice(1);
+ if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) {
+ value = `/${value}`;
+ }
+
+ prev.value += value;
+ append({ value });
+
+ // when literal brackets are explicitly disabled
+ // assume we should match with a regex character class
+ if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) {
+ continue;
+ }
+
+ const escaped = utils.escapeRegex(prev.value);
+ state.output = state.output.slice(0, -prev.value.length);
+
+ // when literal brackets are explicitly enabled
+ // assume we should escape the brackets to match literal characters
+ if (opts.literalBrackets === true) {
+ state.output += escaped;
+ prev.value = escaped;
+ continue;
+ }
+
+ // when the user specifies nothing, try to match both
+ prev.value = `(${capture}${escaped}|${prev.value})`;
+ state.output += prev.value;
+ continue;
+ }
+
+ /**
+ * Braces
+ */
+
+ if (value === '{' && opts.nobrace !== true) {
+ increment('braces');
+
+ const open = {
+ type: 'brace',
+ value,
+ output: '(',
+ outputIndex: state.output.length,
+ tokensIndex: state.tokens.length
+ };
+
+ braces.push(open);
+ push(open);
+ continue;
+ }
+
+ if (value === '}') {
+ const brace = braces[braces.length - 1];
+
+ if (opts.nobrace === true || !brace) {
+ push({ type: 'text', value, output: value });
+ continue;
+ }
+
+ let output = ')';
+
+ if (brace.dots === true) {
+ const arr = tokens.slice();
+ const range = [];
+
+ for (let i = arr.length - 1; i >= 0; i--) {
+ tokens.pop();
+ if (arr[i].type === 'brace') {
+ break;
+ }
+ if (arr[i].type !== 'dots') {
+ range.unshift(arr[i].value);
+ }
+ }
+
+ output = expandRange(range, opts);
+ state.backtrack = true;
+ }
+
+ if (brace.comma !== true && brace.dots !== true) {
+ const out = state.output.slice(0, brace.outputIndex);
+ const toks = state.tokens.slice(brace.tokensIndex);
+ brace.value = brace.output = '\\{';
+ value = output = '\\}';
+ state.output = out;
+ for (const t of toks) {
+ state.output += (t.output || t.value);
+ }
+ }
+
+ push({ type: 'brace', value, output });
+ decrement('braces');
+ braces.pop();
+ continue;
+ }
+
+ /**
+ * Pipes
+ */
+
+ if (value === '|') {
+ if (extglobs.length > 0) {
+ extglobs[extglobs.length - 1].conditions++;
+ }
+ push({ type: 'text', value });
+ continue;
+ }
+
+ /**
+ * Commas
+ */
+
+ if (value === ',') {
+ let output = value;
+
+ const brace = braces[braces.length - 1];
+ if (brace && stack[stack.length - 1] === 'braces') {
+ brace.comma = true;
+ output = '|';
+ }
+
+ push({ type: 'comma', value, output });
+ continue;
+ }
+
+ /**
+ * Slashes
+ */
+
+ if (value === '/') {
+ // if the beginning of the glob is "./", advance the start
+ // to the current index, and don't add the "./" characters
+ // to the state. This greatly simplifies lookbehinds when
+ // checking for BOS characters like "!" and "." (not "./")
+ if (prev.type === 'dot' && state.index === state.start + 1) {
+ state.start = state.index + 1;
+ state.consumed = '';
+ state.output = '';
+ tokens.pop();
+ prev = bos; // reset "prev" to the first token
+ continue;
+ }
+
+ push({ type: 'slash', value, output: SLASH_LITERAL });
+ continue;
+ }
+
+ /**
+ * Dots
+ */
+
+ if (value === '.') {
+ if (state.braces > 0 && prev.type === 'dot') {
+ if (prev.value === '.') prev.output = DOT_LITERAL;
+ const brace = braces[braces.length - 1];
+ prev.type = 'dots';
+ prev.output += value;
+ prev.value += value;
+ brace.dots = true;
+ continue;
+ }
+
+ if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') {
+ push({ type: 'text', value, output: DOT_LITERAL });
+ continue;
+ }
+
+ push({ type: 'dot', value, output: DOT_LITERAL });
+ continue;
+ }
+
+ /**
+ * Question marks
+ */
+
+ if (value === '?') {
+ const isGroup = prev && prev.value === '(';
+ if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
+ extglobOpen('qmark', value);
+ continue;
+ }
+
+ if (prev && prev.type === 'paren') {
+ const next = peek();
+ let output = value;
+
+ if (next === '<' && !utils.supportsLookbehinds()) {
+ throw new Error('Node.js v10 or higher is required for regex lookbehinds');
+ }
+
+ if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) {
+ output = `\\${value}`;
+ }
+
+ push({ type: 'text', value, output });
+ continue;
+ }
+
+ if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) {
+ push({ type: 'qmark', value, output: QMARK_NO_DOT });
+ continue;
+ }
+
+ push({ type: 'qmark', value, output: QMARK });
+ continue;
+ }
+
+ /**
+ * Exclamation
+ */
+
+ if (value === '!') {
+ if (opts.noextglob !== true && peek() === '(') {
+ if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) {
+ extglobOpen('negate', value);
+ continue;
+ }
+ }
+
+ if (opts.nonegate !== true && state.index === 0) {
+ negate();
+ continue;
+ }
+ }
+
+ /**
+ * Plus
+ */
+
+ if (value === '+') {
+ if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
+ extglobOpen('plus', value);
+ continue;
+ }
+
+ if ((prev && prev.value === '(') || opts.regex === false) {
+ push({ type: 'plus', value, output: PLUS_LITERAL });
+ continue;
+ }
+
+ if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) {
+ push({ type: 'plus', value });
+ continue;
+ }
+
+ push({ type: 'plus', value: PLUS_LITERAL });
+ continue;
+ }
+
+ /**
+ * Plain text
+ */
+
+ if (value === '@') {
+ if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
+ push({ type: 'at', extglob: true, value, output: '' });
+ continue;
+ }
+
+ push({ type: 'text', value });
+ continue;
+ }
+
+ /**
+ * Plain text
+ */
+
+ if (value !== '*') {
+ if (value === '$' || value === '^') {
+ value = `\\${value}`;
+ }
+
+ const match = REGEX_NON_SPECIAL_CHARS.exec(remaining());
+ if (match) {
+ value += match[0];
+ state.index += match[0].length;
+ }
+
+ push({ type: 'text', value });
+ continue;
+ }
+
+ /**
+ * Stars
+ */
+
+ if (prev && (prev.type === 'globstar' || prev.star === true)) {
+ prev.type = 'star';
+ prev.star = true;
+ prev.value += value;
+ prev.output = star;
+ state.backtrack = true;
+ state.globstar = true;
+ consume(value);
+ continue;
+ }
+
+ let rest = remaining();
+ if (opts.noextglob !== true && /^\([^?]/.test(rest)) {
+ extglobOpen('star', value);
+ continue;
+ }
+
+ if (prev.type === 'star') {
+ if (opts.noglobstar === true) {
+ consume(value);
+ continue;
+ }
+
+ const prior = prev.prev;
+ const before = prior.prev;
+ const isStart = prior.type === 'slash' || prior.type === 'bos';
+ const afterStar = before && (before.type === 'star' || before.type === 'globstar');
+
+ if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) {
+ push({ type: 'star', value, output: '' });
+ continue;
+ }
+
+ const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace');
+ const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren');
+ if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) {
+ push({ type: 'star', value, output: '' });
+ continue;
+ }
+
+ // strip consecutive `/**/`
+ while (rest.slice(0, 3) === '/**') {
+ const after = input[state.index + 4];
+ if (after && after !== '/') {
+ break;
+ }
+ rest = rest.slice(3);
+ consume('/**', 3);
+ }
+
+ if (prior.type === 'bos' && eos()) {
+ prev.type = 'globstar';
+ prev.value += value;
+ prev.output = globstar(opts);
+ state.output = prev.output;
+ state.globstar = true;
+ consume(value);
+ continue;
+ }
+
+ if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) {
+ state.output = state.output.slice(0, -(prior.output + prev.output).length);
+ prior.output = `(?:${prior.output}`;
+
+ prev.type = 'globstar';
+ prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)');
+ prev.value += value;
+ state.globstar = true;
+ state.output += prior.output + prev.output;
+ consume(value);
+ continue;
+ }
+
+ if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') {
+ const end = rest[1] !== void 0 ? '|$' : '';
+
+ state.output = state.output.slice(0, -(prior.output + prev.output).length);
+ prior.output = `(?:${prior.output}`;
+
+ prev.type = 'globstar';
+ prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`;
+ prev.value += value;
+
+ state.output += prior.output + prev.output;
+ state.globstar = true;
+
+ consume(value + advance());
+
+ push({ type: 'slash', value: '/', output: '' });
+ continue;
+ }
+
+ if (prior.type === 'bos' && rest[0] === '/') {
+ prev.type = 'globstar';
+ prev.value += value;
+ prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`;
+ state.output = prev.output;
+ state.globstar = true;
+ consume(value + advance());
+ push({ type: 'slash', value: '/', output: '' });
+ continue;
+ }
+
+ // remove single star from output
+ state.output = state.output.slice(0, -prev.output.length);
+
+ // reset previous token to globstar
+ prev.type = 'globstar';
+ prev.output = globstar(opts);
+ prev.value += value;
+
+ // reset output with globstar
+ state.output += prev.output;
+ state.globstar = true;
+ consume(value);
+ continue;
+ }
+
+ const token = { type: 'star', value, output: star };
+
+ if (opts.bash === true) {
+ token.output = '.*?';
+ if (prev.type === 'bos' || prev.type === 'slash') {
+ token.output = nodot + token.output;
+ }
+ push(token);
+ continue;
+ }
+
+ if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) {
+ token.output = value;
+ push(token);
+ continue;
+ }
+
+ if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') {
+ if (prev.type === 'dot') {
+ state.output += NO_DOT_SLASH;
+ prev.output += NO_DOT_SLASH;
+
+ } else if (opts.dot === true) {
+ state.output += NO_DOTS_SLASH;
+ prev.output += NO_DOTS_SLASH;
+
+ } else {
+ state.output += nodot;
+ prev.output += nodot;
+ }
+
+ if (peek() !== '*') {
+ state.output += ONE_CHAR;
+ prev.output += ONE_CHAR;
+ }
+ }
+
+ push(token);
+ }
+
+ while (state.brackets > 0) {
+ if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']'));
+ state.output = utils.escapeLast(state.output, '[');
+ decrement('brackets');
+ }
+
+ while (state.parens > 0) {
+ if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')'));
+ state.output = utils.escapeLast(state.output, '(');
+ decrement('parens');
+ }
+
+ while (state.braces > 0) {
+ if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}'));
+ state.output = utils.escapeLast(state.output, '{');
+ decrement('braces');
+ }
+
+ if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) {
+ push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` });
+ }
+
+ // rebuild the output if we had to backtrack at any point
+ if (state.backtrack === true) {
+ state.output = '';
+
+ for (const token of state.tokens) {
+ state.output += token.output != null ? token.output : token.value;
+
+ if (token.suffix) {
+ state.output += token.suffix;
+ }
+ }
+ }
+
+ return state;
+};
+
+/**
+ * Fast paths for creating regular expressions for common glob patterns.
+ * This can significantly speed up processing and has very little downside
+ * impact when none of the fast paths match.
+ */
+
+parse.fastpaths = (input, options) => {
+ const opts = { ...options };
+ const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
+ const len = input.length;
+ if (len > max) {
+ throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);
+ }
+
+ input = REPLACEMENTS[input] || input;
+
+ // create constants based on platform, for windows or posix
+ const {
+ DOT_LITERAL,
+ SLASH_LITERAL,
+ ONE_CHAR,
+ DOTS_SLASH,
+ NO_DOT,
+ NO_DOTS,
+ NO_DOTS_SLASH,
+ STAR,
+ START_ANCHOR
+ } = constants.globChars(opts.windows);
+
+ const nodot = opts.dot ? NO_DOTS : NO_DOT;
+ const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT;
+ const capture = opts.capture ? '' : '?:';
+ const state = { negated: false, prefix: '' };
+ let star = opts.bash === true ? '.*?' : STAR;
+
+ if (opts.capture) {
+ star = `(${star})`;
+ }
+
+ const globstar = opts => {
+ if (opts.noglobstar === true) return star;
+ return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;
+ };
+
+ const create = str => {
+ switch (str) {
+ case '*':
+ return `${nodot}${ONE_CHAR}${star}`;
+
+ case '.*':
+ return `${DOT_LITERAL}${ONE_CHAR}${star}`;
+
+ case '*.*':
+ return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;
+
+ case '*/*':
+ return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`;
+
+ case '**':
+ return nodot + globstar(opts);
+
+ case '**/*':
+ return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`;
+
+ case '**/*.*':
+ return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;
+
+ case '**/.*':
+ return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`;
+
+ default: {
+ const match = /^(.*?)\.(\w+)$/.exec(str);
+ if (!match) return;
+
+ const source = create(match[1]);
+ if (!source) return;
+
+ return source + DOT_LITERAL + match[2];
+ }
+ }
+ };
+
+ const output = utils.removePrefix(input, state);
+ let source = create(output);
+
+ if (source && opts.strictSlashes !== true) {
+ source += `${SLASH_LITERAL}?`;
+ }
+
+ return source;
+};
+
+module.exports = parse;
+
+
+/***/ }),
+
+/***/ 3322:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const scan = __nccwpck_require__(2429);
+const parse = __nccwpck_require__(2139);
+const utils = __nccwpck_require__(479);
+const constants = __nccwpck_require__(6099);
+const isObject = val => val && typeof val === 'object' && !Array.isArray(val);
+
+/**
+ * Creates a matcher function from one or more glob patterns. The
+ * returned function takes a string to match as its first argument,
+ * and returns true if the string is a match. The returned matcher
+ * function also takes a boolean as the second argument that, when true,
+ * returns an object with additional information.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch(glob[, options]);
+ *
+ * const isMatch = picomatch('*.!(*a)');
+ * console.log(isMatch('a.a')); //=> false
+ * console.log(isMatch('a.b')); //=> true
+ * ```
+ * @name picomatch
+ * @param {String|Array} `globs` One or more glob patterns.
+ * @param {Object=} `options`
+ * @return {Function=} Returns a matcher function.
+ * @api public
+ */
+
+const picomatch = (glob, options, returnState = false) => {
+ if (Array.isArray(glob)) {
+ const fns = glob.map(input => picomatch(input, options, returnState));
+ const arrayMatcher = str => {
+ for (const isMatch of fns) {
+ const state = isMatch(str);
+ if (state) return state;
+ }
+ return false;
+ };
+ return arrayMatcher;
+ }
+
+ const isState = isObject(glob) && glob.tokens && glob.input;
+
+ if (glob === '' || (typeof glob !== 'string' && !isState)) {
+ throw new TypeError('Expected pattern to be a non-empty string');
+ }
+
+ const opts = options || {};
+ const posix = opts.windows;
+ const regex = isState
+ ? picomatch.compileRe(glob, options)
+ : picomatch.makeRe(glob, options, false, true);
+
+ const state = regex.state;
+ delete regex.state;
+
+ let isIgnored = () => false;
+ if (opts.ignore) {
+ const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null };
+ isIgnored = picomatch(opts.ignore, ignoreOpts, returnState);
+ }
+
+ const matcher = (input, returnObject = false) => {
+ const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix });
+ const result = { glob, state, regex, posix, input, output, match, isMatch };
+
+ if (typeof opts.onResult === 'function') {
+ opts.onResult(result);
+ }
+
+ if (isMatch === false) {
+ result.isMatch = false;
+ return returnObject ? result : false;
+ }
+
+ if (isIgnored(input)) {
+ if (typeof opts.onIgnore === 'function') {
+ opts.onIgnore(result);
+ }
+ result.isMatch = false;
+ return returnObject ? result : false;
+ }
+
+ if (typeof opts.onMatch === 'function') {
+ opts.onMatch(result);
+ }
+ return returnObject ? result : true;
+ };
+
+ if (returnState) {
+ matcher.state = state;
+ }
+
+ return matcher;
+};
+
+/**
+ * Test `input` with the given `regex`. This is used by the main
+ * `picomatch()` function to test the input string.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch.test(input, regex[, options]);
+ *
+ * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/));
+ * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' }
+ * ```
+ * @param {String} `input` String to test.
+ * @param {RegExp} `regex`
+ * @return {Object} Returns an object with matching info.
+ * @api public
+ */
+
+picomatch.test = (input, regex, options, { glob, posix } = {}) => {
+ if (typeof input !== 'string') {
+ throw new TypeError('Expected input to be a string');
+ }
+
+ if (input === '') {
+ return { isMatch: false, output: '' };
+ }
+
+ const opts = options || {};
+ const format = opts.format || (posix ? utils.toPosixSlashes : null);
+ let match = input === glob;
+ let output = (match && format) ? format(input) : input;
+
+ if (match === false) {
+ output = format ? format(input) : input;
+ match = output === glob;
+ }
+
+ if (match === false || opts.capture === true) {
+ if (opts.matchBase === true || opts.basename === true) {
+ match = picomatch.matchBase(input, regex, options, posix);
+ } else {
+ match = regex.exec(output);
+ }
+ }
+
+ return { isMatch: Boolean(match), match, output };
+};
+
+/**
+ * Match the basename of a filepath.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch.matchBase(input, glob[, options]);
+ * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true
+ * ```
+ * @param {String} `input` String to test.
+ * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe).
+ * @return {Boolean}
+ * @api public
+ */
+
+picomatch.matchBase = (input, glob, options) => {
+ const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options);
+ return regex.test(utils.basename(input));
+};
+
+/**
+ * Returns true if **any** of the given glob `patterns` match the specified `string`.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch.isMatch(string, patterns[, options]);
+ *
+ * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true
+ * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false
+ * ```
+ * @param {String|Array} str The string to test.
+ * @param {String|Array} patterns One or more glob patterns to use for matching.
+ * @param {Object} [options] See available [options](#options).
+ * @return {Boolean} Returns true if any patterns match `str`
+ * @api public
+ */
+
+picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str);
+
+/**
+ * Parse a glob pattern to create the source string for a regular
+ * expression.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * const result = picomatch.parse(pattern[, options]);
+ * ```
+ * @param {String} `pattern`
+ * @param {Object} `options`
+ * @return {Object} Returns an object with useful properties and output to be used as a regex source string.
+ * @api public
+ */
+
+picomatch.parse = (pattern, options) => {
+ if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options));
+ return parse(pattern, { ...options, fastpaths: false });
+};
+
+/**
+ * Scan a glob pattern to separate the pattern into segments.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch.scan(input[, options]);
+ *
+ * const result = picomatch.scan('!./foo/*.js');
+ * console.log(result);
+ * { prefix: '!./',
+ * input: '!./foo/*.js',
+ * start: 3,
+ * base: 'foo',
+ * glob: '*.js',
+ * isBrace: false,
+ * isBracket: false,
+ * isGlob: true,
+ * isExtglob: false,
+ * isGlobstar: false,
+ * negated: true }
+ * ```
+ * @param {String} `input` Glob pattern to scan.
+ * @param {Object} `options`
+ * @return {Object} Returns an object with
+ * @api public
+ */
+
+picomatch.scan = (input, options) => scan(input, options);
+
+/**
+ * Compile a regular expression from the `state` object returned by the
+ * [parse()](#parse) method.
+ *
+ * @param {Object} `state`
+ * @param {Object} `options`
+ * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser.
+ * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging.
+ * @return {RegExp}
+ * @api public
+ */
+
+picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => {
+ if (returnOutput === true) {
+ return state.output;
+ }
+
+ const opts = options || {};
+ const prepend = opts.contains ? '' : '^';
+ const append = opts.contains ? '' : '$';
+
+ let source = `${prepend}(?:${state.output})${append}`;
+ if (state && state.negated === true) {
+ source = `^(?!${source}).*$`;
+ }
+
+ const regex = picomatch.toRegex(source, options);
+ if (returnState === true) {
+ regex.state = state;
+ }
+
+ return regex;
+};
+
+/**
+ * Create a regular expression from a parsed glob pattern.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * const state = picomatch.parse('*.js');
+ * // picomatch.compileRe(state[, options]);
+ *
+ * console.log(picomatch.compileRe(state));
+ * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
+ * ```
+ * @param {String} `state` The object returned from the `.parse` method.
+ * @param {Object} `options`
+ * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result.
+ * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression.
+ * @return {RegExp} Returns a regex created from the given pattern.
+ * @api public
+ */
+
+picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => {
+ if (!input || typeof input !== 'string') {
+ throw new TypeError('Expected a non-empty string');
+ }
+
+ let parsed = { negated: false, fastpaths: true };
+
+ if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) {
+ parsed.output = parse.fastpaths(input, options);
+ }
+
+ if (!parsed.output) {
+ parsed = parse(input, options);
+ }
+
+ return picomatch.compileRe(parsed, options, returnOutput, returnState);
+};
+
+/**
+ * Create a regular expression from the given regex source string.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch.toRegex(source[, options]);
+ *
+ * const { output } = picomatch.parse('*.js');
+ * console.log(picomatch.toRegex(output));
+ * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
+ * ```
+ * @param {String} `source` Regular expression source string.
+ * @param {Object} `options`
+ * @return {RegExp}
+ * @api public
+ */
+
+picomatch.toRegex = (source, options) => {
+ try {
+ const opts = options || {};
+ return new RegExp(source, opts.flags || (opts.nocase ? 'i' : ''));
+ } catch (err) {
+ if (options && options.debug === true) throw err;
+ return /$^/;
+ }
+};
+
+/**
+ * Picomatch constants.
+ * @return {Object}
+ */
+
+picomatch.constants = constants;
+
+/**
+ * Expose "picomatch"
+ */
+
+module.exports = picomatch;
+
+
/***/ }),
/***/ 2429:
@@ -27043,8 +28704,6 @@ module.exports = scan;
"use strict";
-const path = __nccwpck_require__(1017);
-const win32 = process.platform === 'win32';
const {
REGEX_BACKSLASH,
REGEX_REMOVE_BACKSLASH,
@@ -27065,20 +28724,15 @@ exports.removeBackslashes = str => {
};
exports.supportsLookbehinds = () => {
- const segs = process.version.slice(1).split('.').map(Number);
- if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) {
- return true;
+ if (typeof process !== 'undefined') {
+ const segs = process.version.slice(1).split('.').map(Number);
+ if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) {
+ return true;
+ }
}
return false;
};
-exports.isWindows = options => {
- if (options && typeof options.windows === 'boolean') {
- return options.windows;
- }
- return win32 === true || path.sep === '\\';
-};
-
exports.escapeLast = (input, char, lastIdx) => {
const idx = input.lastIndexOf(char, lastIdx);
if (idx === -1) return input;
@@ -27106,6 +28760,17 @@ exports.wrapOutput = (input, state = {}, options = {}) => {
return output;
};
+exports.basename = (path, { windows } = {}) => {
+ const segs = path.split(windows ? /[\\/]/ : '/');
+ const last = segs[segs.length - 1];
+
+ if (last === '') {
+ return segs[segs.length - 2];
+ }
+
+ return last;
+};
+
/***/ }),
@@ -29403,207 +31068,6 @@ toRegexRange.clearCache = () => (toRegexRange.cache = {});
module.exports = toRegexRange;
-/***/ }),
-
-/***/ 4256:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
-
-
-var punycode = __nccwpck_require__(5477);
-var mappingTable = __nccwpck_require__(2020);
-
-var PROCESSING_OPTIONS = {
- TRANSITIONAL: 0,
- NONTRANSITIONAL: 1
-};
-
-function normalize(str) { // fix bug in v8
- return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000');
-}
-
-function findStatus(val) {
- var start = 0;
- var end = mappingTable.length - 1;
-
- while (start <= end) {
- var mid = Math.floor((start + end) / 2);
-
- var target = mappingTable[mid];
- if (target[0][0] <= val && target[0][1] >= val) {
- return target;
- } else if (target[0][0] > val) {
- end = mid - 1;
- } else {
- start = mid + 1;
- }
- }
-
- return null;
-}
-
-var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g;
-
-function countSymbols(string) {
- return string
- // replace every surrogate pair with a BMP symbol
- .replace(regexAstralSymbols, '_')
- // then get the length
- .length;
-}
-
-function mapChars(domain_name, useSTD3, processing_option) {
- var hasError = false;
- var processed = "";
-
- var len = countSymbols(domain_name);
- for (var i = 0; i < len; ++i) {
- var codePoint = domain_name.codePointAt(i);
- var status = findStatus(codePoint);
-
- switch (status[1]) {
- case "disallowed":
- hasError = true;
- processed += String.fromCodePoint(codePoint);
- break;
- case "ignored":
- break;
- case "mapped":
- processed += String.fromCodePoint.apply(String, status[2]);
- break;
- case "deviation":
- if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) {
- processed += String.fromCodePoint.apply(String, status[2]);
- } else {
- processed += String.fromCodePoint(codePoint);
- }
- break;
- case "valid":
- processed += String.fromCodePoint(codePoint);
- break;
- case "disallowed_STD3_mapped":
- if (useSTD3) {
- hasError = true;
- processed += String.fromCodePoint(codePoint);
- } else {
- processed += String.fromCodePoint.apply(String, status[2]);
- }
- break;
- case "disallowed_STD3_valid":
- if (useSTD3) {
- hasError = true;
- }
-
- processed += String.fromCodePoint(codePoint);
- break;
- }
- }
-
- return {
- string: processed,
- error: hasError
- };
-}
-
-var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/;
-
-function validateLabel(label, processing_option) {
- if (label.substr(0, 4) === "xn--") {
- label = punycode.toUnicode(label);
- processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;
- }
-
- var error = false;
-
- if (normalize(label) !== label ||
- (label[3] === "-" && label[4] === "-") ||
- label[0] === "-" || label[label.length - 1] === "-" ||
- label.indexOf(".") !== -1 ||
- label.search(combiningMarksRegex) === 0) {
- error = true;
- }
-
- var len = countSymbols(label);
- for (var i = 0; i < len; ++i) {
- var status = findStatus(label.codePointAt(i));
- if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") ||
- (processing === PROCESSING_OPTIONS.NONTRANSITIONAL &&
- status[1] !== "valid" && status[1] !== "deviation")) {
- error = true;
- break;
- }
- }
-
- return {
- label: label,
- error: error
- };
-}
-
-function processing(domain_name, useSTD3, processing_option) {
- var result = mapChars(domain_name, useSTD3, processing_option);
- result.string = normalize(result.string);
-
- var labels = result.string.split(".");
- for (var i = 0; i < labels.length; ++i) {
- try {
- var validation = validateLabel(labels[i]);
- labels[i] = validation.label;
- result.error = result.error || validation.error;
- } catch(e) {
- result.error = true;
- }
- }
-
- return {
- string: labels.join("."),
- error: result.error
- };
-}
-
-module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) {
- var result = processing(domain_name, useSTD3, processing_option);
- var labels = result.string.split(".");
- labels = labels.map(function(l) {
- try {
- return punycode.toASCII(l);
- } catch(e) {
- result.error = true;
- return l;
- }
- });
-
- if (verifyDnsLength) {
- var total = labels.slice(0, labels.length - 1).join(".").length;
- if (total.length > 253 || total.length === 0) {
- result.error = true;
- }
-
- for (var i=0; i < labels.length; ++i) {
- if (labels.length > 63 || labels.length === 0) {
- result.error = true;
- break;
- }
- }
- }
-
- if (result.error) return null;
- return labels.join(".");
-};
-
-module.exports.toUnicode = function(domain_name, useSTD3) {
- var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL);
-
- return {
- domain: result.string,
- error: result.error
- };
-};
-
-module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS;
-
-
/***/ }),
/***/ 4294:
@@ -29884,6 +31348,21948 @@ if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
exports.debug = debug; // for test
+/***/ }),
+
+/***/ 1773:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const Client = __nccwpck_require__(3598)
+const Dispatcher = __nccwpck_require__(412)
+const errors = __nccwpck_require__(8045)
+const Pool = __nccwpck_require__(4634)
+const BalancedPool = __nccwpck_require__(7931)
+const Agent = __nccwpck_require__(7890)
+const util = __nccwpck_require__(3983)
+const { InvalidArgumentError } = errors
+const api = __nccwpck_require__(4059)
+const buildConnector = __nccwpck_require__(2067)
+const MockClient = __nccwpck_require__(8687)
+const MockAgent = __nccwpck_require__(6771)
+const MockPool = __nccwpck_require__(6193)
+const mockErrors = __nccwpck_require__(888)
+const ProxyAgent = __nccwpck_require__(7858)
+const RetryHandler = __nccwpck_require__(2286)
+const { getGlobalDispatcher, setGlobalDispatcher } = __nccwpck_require__(1892)
+const DecoratorHandler = __nccwpck_require__(6930)
+const RedirectHandler = __nccwpck_require__(2860)
+const createRedirectInterceptor = __nccwpck_require__(8861)
+
+let hasCrypto
+try {
+ __nccwpck_require__(6113)
+ hasCrypto = true
+} catch {
+ hasCrypto = false
+}
+
+Object.assign(Dispatcher.prototype, api)
+
+module.exports.Dispatcher = Dispatcher
+module.exports.Client = Client
+module.exports.Pool = Pool
+module.exports.BalancedPool = BalancedPool
+module.exports.Agent = Agent
+module.exports.ProxyAgent = ProxyAgent
+module.exports.RetryHandler = RetryHandler
+
+module.exports.DecoratorHandler = DecoratorHandler
+module.exports.RedirectHandler = RedirectHandler
+module.exports.createRedirectInterceptor = createRedirectInterceptor
+
+module.exports.buildConnector = buildConnector
+module.exports.errors = errors
+
+function makeDispatcher (fn) {
+ return (url, opts, handler) => {
+ if (typeof opts === 'function') {
+ handler = opts
+ opts = null
+ }
+
+ if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) {
+ throw new InvalidArgumentError('invalid url')
+ }
+
+ if (opts != null && typeof opts !== 'object') {
+ throw new InvalidArgumentError('invalid opts')
+ }
+
+ if (opts && opts.path != null) {
+ if (typeof opts.path !== 'string') {
+ throw new InvalidArgumentError('invalid opts.path')
+ }
+
+ let path = opts.path
+ if (!opts.path.startsWith('/')) {
+ path = `/${path}`
+ }
+
+ url = new URL(util.parseOrigin(url).origin + path)
+ } else {
+ if (!opts) {
+ opts = typeof url === 'object' ? url : {}
+ }
+
+ url = util.parseURL(url)
+ }
+
+ const { agent, dispatcher = getGlobalDispatcher() } = opts
+
+ if (agent) {
+ throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?')
+ }
+
+ return fn.call(dispatcher, {
+ ...opts,
+ origin: url.origin,
+ path: url.search ? `${url.pathname}${url.search}` : url.pathname,
+ method: opts.method || (opts.body ? 'PUT' : 'GET')
+ }, handler)
+ }
+}
+
+module.exports.setGlobalDispatcher = setGlobalDispatcher
+module.exports.getGlobalDispatcher = getGlobalDispatcher
+
+if (util.nodeMajor > 16 || (util.nodeMajor === 16 && util.nodeMinor >= 8)) {
+ let fetchImpl = null
+ module.exports.fetch = async function fetch (resource) {
+ if (!fetchImpl) {
+ fetchImpl = (__nccwpck_require__(4881).fetch)
+ }
+
+ try {
+ return await fetchImpl(...arguments)
+ } catch (err) {
+ if (typeof err === 'object') {
+ Error.captureStackTrace(err, this)
+ }
+
+ throw err
+ }
+ }
+ module.exports.Headers = __nccwpck_require__(554).Headers
+ module.exports.Response = __nccwpck_require__(7823).Response
+ module.exports.Request = __nccwpck_require__(8359).Request
+ module.exports.FormData = __nccwpck_require__(2015).FormData
+ module.exports.File = __nccwpck_require__(8511).File
+ module.exports.FileReader = __nccwpck_require__(1446).FileReader
+
+ const { setGlobalOrigin, getGlobalOrigin } = __nccwpck_require__(1246)
+
+ module.exports.setGlobalOrigin = setGlobalOrigin
+ module.exports.getGlobalOrigin = getGlobalOrigin
+
+ const { CacheStorage } = __nccwpck_require__(7907)
+ const { kConstruct } = __nccwpck_require__(9174)
+
+ // Cache & CacheStorage are tightly coupled with fetch. Even if it may run
+ // in an older version of Node, it doesn't have any use without fetch.
+ module.exports.caches = new CacheStorage(kConstruct)
+}
+
+if (util.nodeMajor >= 16) {
+ const { deleteCookie, getCookies, getSetCookies, setCookie } = __nccwpck_require__(1724)
+
+ module.exports.deleteCookie = deleteCookie
+ module.exports.getCookies = getCookies
+ module.exports.getSetCookies = getSetCookies
+ module.exports.setCookie = setCookie
+
+ const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685)
+
+ module.exports.parseMIMEType = parseMIMEType
+ module.exports.serializeAMimeType = serializeAMimeType
+}
+
+if (util.nodeMajor >= 18 && hasCrypto) {
+ const { WebSocket } = __nccwpck_require__(4284)
+
+ module.exports.WebSocket = WebSocket
+}
+
+module.exports.request = makeDispatcher(api.request)
+module.exports.stream = makeDispatcher(api.stream)
+module.exports.pipeline = makeDispatcher(api.pipeline)
+module.exports.connect = makeDispatcher(api.connect)
+module.exports.upgrade = makeDispatcher(api.upgrade)
+
+module.exports.MockClient = MockClient
+module.exports.MockPool = MockPool
+module.exports.MockAgent = MockAgent
+module.exports.mockErrors = mockErrors
+
+
+/***/ }),
+
+/***/ 7890:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { InvalidArgumentError } = __nccwpck_require__(8045)
+const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = __nccwpck_require__(2785)
+const DispatcherBase = __nccwpck_require__(4839)
+const Pool = __nccwpck_require__(4634)
+const Client = __nccwpck_require__(3598)
+const util = __nccwpck_require__(3983)
+const createRedirectInterceptor = __nccwpck_require__(8861)
+const { WeakRef, FinalizationRegistry } = __nccwpck_require__(6436)()
+
+const kOnConnect = Symbol('onConnect')
+const kOnDisconnect = Symbol('onDisconnect')
+const kOnConnectionError = Symbol('onConnectionError')
+const kMaxRedirections = Symbol('maxRedirections')
+const kOnDrain = Symbol('onDrain')
+const kFactory = Symbol('factory')
+const kFinalizer = Symbol('finalizer')
+const kOptions = Symbol('options')
+
+function defaultFactory (origin, opts) {
+ return opts && opts.connections === 1
+ ? new Client(origin, opts)
+ : new Pool(origin, opts)
+}
+
+class Agent extends DispatcherBase {
+ constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) {
+ super()
+
+ if (typeof factory !== 'function') {
+ throw new InvalidArgumentError('factory must be a function.')
+ }
+
+ if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
+ throw new InvalidArgumentError('connect must be a function or an object')
+ }
+
+ if (!Number.isInteger(maxRedirections) || maxRedirections < 0) {
+ throw new InvalidArgumentError('maxRedirections must be a positive number')
+ }
+
+ if (connect && typeof connect !== 'function') {
+ connect = { ...connect }
+ }
+
+ this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent)
+ ? options.interceptors.Agent
+ : [createRedirectInterceptor({ maxRedirections })]
+
+ this[kOptions] = { ...util.deepClone(options), connect }
+ this[kOptions].interceptors = options.interceptors
+ ? { ...options.interceptors }
+ : undefined
+ this[kMaxRedirections] = maxRedirections
+ this[kFactory] = factory
+ this[kClients] = new Map()
+ this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => {
+ const ref = this[kClients].get(key)
+ if (ref !== undefined && ref.deref() === undefined) {
+ this[kClients].delete(key)
+ }
+ })
+
+ const agent = this
+
+ this[kOnDrain] = (origin, targets) => {
+ agent.emit('drain', origin, [agent, ...targets])
+ }
+
+ this[kOnConnect] = (origin, targets) => {
+ agent.emit('connect', origin, [agent, ...targets])
+ }
+
+ this[kOnDisconnect] = (origin, targets, err) => {
+ agent.emit('disconnect', origin, [agent, ...targets], err)
+ }
+
+ this[kOnConnectionError] = (origin, targets, err) => {
+ agent.emit('connectionError', origin, [agent, ...targets], err)
+ }
+ }
+
+ get [kRunning] () {
+ let ret = 0
+ for (const ref of this[kClients].values()) {
+ const client = ref.deref()
+ /* istanbul ignore next: gc is undeterministic */
+ if (client) {
+ ret += client[kRunning]
+ }
+ }
+ return ret
+ }
+
+ [kDispatch] (opts, handler) {
+ let key
+ if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) {
+ key = String(opts.origin)
+ } else {
+ throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.')
+ }
+
+ const ref = this[kClients].get(key)
+
+ let dispatcher = ref ? ref.deref() : null
+ if (!dispatcher) {
+ dispatcher = this[kFactory](opts.origin, this[kOptions])
+ .on('drain', this[kOnDrain])
+ .on('connect', this[kOnConnect])
+ .on('disconnect', this[kOnDisconnect])
+ .on('connectionError', this[kOnConnectionError])
+
+ this[kClients].set(key, new WeakRef(dispatcher))
+ this[kFinalizer].register(dispatcher, key)
+ }
+
+ return dispatcher.dispatch(opts, handler)
+ }
+
+ async [kClose] () {
+ const closePromises = []
+ for (const ref of this[kClients].values()) {
+ const client = ref.deref()
+ /* istanbul ignore else: gc is undeterministic */
+ if (client) {
+ closePromises.push(client.close())
+ }
+ }
+
+ await Promise.all(closePromises)
+ }
+
+ async [kDestroy] (err) {
+ const destroyPromises = []
+ for (const ref of this[kClients].values()) {
+ const client = ref.deref()
+ /* istanbul ignore else: gc is undeterministic */
+ if (client) {
+ destroyPromises.push(client.destroy(err))
+ }
+ }
+
+ await Promise.all(destroyPromises)
+ }
+}
+
+module.exports = Agent
+
+
+/***/ }),
+
+/***/ 7032:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+const { addAbortListener } = __nccwpck_require__(3983)
+const { RequestAbortedError } = __nccwpck_require__(8045)
+
+const kListener = Symbol('kListener')
+const kSignal = Symbol('kSignal')
+
+function abort (self) {
+ if (self.abort) {
+ self.abort()
+ } else {
+ self.onError(new RequestAbortedError())
+ }
+}
+
+function addSignal (self, signal) {
+ self[kSignal] = null
+ self[kListener] = null
+
+ if (!signal) {
+ return
+ }
+
+ if (signal.aborted) {
+ abort(self)
+ return
+ }
+
+ self[kSignal] = signal
+ self[kListener] = () => {
+ abort(self)
+ }
+
+ addAbortListener(self[kSignal], self[kListener])
+}
+
+function removeSignal (self) {
+ if (!self[kSignal]) {
+ return
+ }
+
+ if ('removeEventListener' in self[kSignal]) {
+ self[kSignal].removeEventListener('abort', self[kListener])
+ } else {
+ self[kSignal].removeListener('abort', self[kListener])
+ }
+
+ self[kSignal] = null
+ self[kListener] = null
+}
+
+module.exports = {
+ addSignal,
+ removeSignal
+}
+
+
+/***/ }),
+
+/***/ 9744:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { AsyncResource } = __nccwpck_require__(852)
+const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(8045)
+const util = __nccwpck_require__(3983)
+const { addSignal, removeSignal } = __nccwpck_require__(7032)
+
+class ConnectHandler extends AsyncResource {
+ constructor (opts, callback) {
+ if (!opts || typeof opts !== 'object') {
+ throw new InvalidArgumentError('invalid opts')
+ }
+
+ if (typeof callback !== 'function') {
+ throw new InvalidArgumentError('invalid callback')
+ }
+
+ const { signal, opaque, responseHeaders } = opts
+
+ if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
+ throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
+ }
+
+ super('UNDICI_CONNECT')
+
+ this.opaque = opaque || null
+ this.responseHeaders = responseHeaders || null
+ this.callback = callback
+ this.abort = null
+
+ addSignal(this, signal)
+ }
+
+ onConnect (abort, context) {
+ if (!this.callback) {
+ throw new RequestAbortedError()
+ }
+
+ this.abort = abort
+ this.context = context
+ }
+
+ onHeaders () {
+ throw new SocketError('bad connect', null)
+ }
+
+ onUpgrade (statusCode, rawHeaders, socket) {
+ const { callback, opaque, context } = this
+
+ removeSignal(this)
+
+ this.callback = null
+
+ let headers = rawHeaders
+ // Indicates is an HTTP2Session
+ if (headers != null) {
+ headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+ }
+
+ this.runInAsyncScope(callback, null, null, {
+ statusCode,
+ headers,
+ socket,
+ opaque,
+ context
+ })
+ }
+
+ onError (err) {
+ const { callback, opaque } = this
+
+ removeSignal(this)
+
+ if (callback) {
+ this.callback = null
+ queueMicrotask(() => {
+ this.runInAsyncScope(callback, null, err, { opaque })
+ })
+ }
+ }
+}
+
+function connect (opts, callback) {
+ if (callback === undefined) {
+ return new Promise((resolve, reject) => {
+ connect.call(this, opts, (err, data) => {
+ return err ? reject(err) : resolve(data)
+ })
+ })
+ }
+
+ try {
+ const connectHandler = new ConnectHandler(opts, callback)
+ this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler)
+ } catch (err) {
+ if (typeof callback !== 'function') {
+ throw err
+ }
+ const opaque = opts && opts.opaque
+ queueMicrotask(() => callback(err, { opaque }))
+ }
+}
+
+module.exports = connect
+
+
+/***/ }),
+
+/***/ 8752:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const {
+ Readable,
+ Duplex,
+ PassThrough
+} = __nccwpck_require__(2781)
+const {
+ InvalidArgumentError,
+ InvalidReturnValueError,
+ RequestAbortedError
+} = __nccwpck_require__(8045)
+const util = __nccwpck_require__(3983)
+const { AsyncResource } = __nccwpck_require__(852)
+const { addSignal, removeSignal } = __nccwpck_require__(7032)
+const assert = __nccwpck_require__(9491)
+
+const kResume = Symbol('resume')
+
+class PipelineRequest extends Readable {
+ constructor () {
+ super({ autoDestroy: true })
+
+ this[kResume] = null
+ }
+
+ _read () {
+ const { [kResume]: resume } = this
+
+ if (resume) {
+ this[kResume] = null
+ resume()
+ }
+ }
+
+ _destroy (err, callback) {
+ this._read()
+
+ callback(err)
+ }
+}
+
+class PipelineResponse extends Readable {
+ constructor (resume) {
+ super({ autoDestroy: true })
+ this[kResume] = resume
+ }
+
+ _read () {
+ this[kResume]()
+ }
+
+ _destroy (err, callback) {
+ if (!err && !this._readableState.endEmitted) {
+ err = new RequestAbortedError()
+ }
+
+ callback(err)
+ }
+}
+
+class PipelineHandler extends AsyncResource {
+ constructor (opts, handler) {
+ if (!opts || typeof opts !== 'object') {
+ throw new InvalidArgumentError('invalid opts')
+ }
+
+ if (typeof handler !== 'function') {
+ throw new InvalidArgumentError('invalid handler')
+ }
+
+ const { signal, method, opaque, onInfo, responseHeaders } = opts
+
+ if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
+ throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
+ }
+
+ if (method === 'CONNECT') {
+ throw new InvalidArgumentError('invalid method')
+ }
+
+ if (onInfo && typeof onInfo !== 'function') {
+ throw new InvalidArgumentError('invalid onInfo callback')
+ }
+
+ super('UNDICI_PIPELINE')
+
+ this.opaque = opaque || null
+ this.responseHeaders = responseHeaders || null
+ this.handler = handler
+ this.abort = null
+ this.context = null
+ this.onInfo = onInfo || null
+
+ this.req = new PipelineRequest().on('error', util.nop)
+
+ this.ret = new Duplex({
+ readableObjectMode: opts.objectMode,
+ autoDestroy: true,
+ read: () => {
+ const { body } = this
+
+ if (body && body.resume) {
+ body.resume()
+ }
+ },
+ write: (chunk, encoding, callback) => {
+ const { req } = this
+
+ if (req.push(chunk, encoding) || req._readableState.destroyed) {
+ callback()
+ } else {
+ req[kResume] = callback
+ }
+ },
+ destroy: (err, callback) => {
+ const { body, req, res, ret, abort } = this
+
+ if (!err && !ret._readableState.endEmitted) {
+ err = new RequestAbortedError()
+ }
+
+ if (abort && err) {
+ abort()
+ }
+
+ util.destroy(body, err)
+ util.destroy(req, err)
+ util.destroy(res, err)
+
+ removeSignal(this)
+
+ callback(err)
+ }
+ }).on('prefinish', () => {
+ const { req } = this
+
+ // Node < 15 does not call _final in same tick.
+ req.push(null)
+ })
+
+ this.res = null
+
+ addSignal(this, signal)
+ }
+
+ onConnect (abort, context) {
+ const { ret, res } = this
+
+ assert(!res, 'pipeline cannot be retried')
+
+ if (ret.destroyed) {
+ throw new RequestAbortedError()
+ }
+
+ this.abort = abort
+ this.context = context
+ }
+
+ onHeaders (statusCode, rawHeaders, resume) {
+ const { opaque, handler, context } = this
+
+ if (statusCode < 200) {
+ if (this.onInfo) {
+ const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+ this.onInfo({ statusCode, headers })
+ }
+ return
+ }
+
+ this.res = new PipelineResponse(resume)
+
+ let body
+ try {
+ this.handler = null
+ const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+ body = this.runInAsyncScope(handler, null, {
+ statusCode,
+ headers,
+ opaque,
+ body: this.res,
+ context
+ })
+ } catch (err) {
+ this.res.on('error', util.nop)
+ throw err
+ }
+
+ if (!body || typeof body.on !== 'function') {
+ throw new InvalidReturnValueError('expected Readable')
+ }
+
+ body
+ .on('data', (chunk) => {
+ const { ret, body } = this
+
+ if (!ret.push(chunk) && body.pause) {
+ body.pause()
+ }
+ })
+ .on('error', (err) => {
+ const { ret } = this
+
+ util.destroy(ret, err)
+ })
+ .on('end', () => {
+ const { ret } = this
+
+ ret.push(null)
+ })
+ .on('close', () => {
+ const { ret } = this
+
+ if (!ret._readableState.ended) {
+ util.destroy(ret, new RequestAbortedError())
+ }
+ })
+
+ this.body = body
+ }
+
+ onData (chunk) {
+ const { res } = this
+ return res.push(chunk)
+ }
+
+ onComplete (trailers) {
+ const { res } = this
+ res.push(null)
+ }
+
+ onError (err) {
+ const { ret } = this
+ this.handler = null
+ util.destroy(ret, err)
+ }
+}
+
+function pipeline (opts, handler) {
+ try {
+ const pipelineHandler = new PipelineHandler(opts, handler)
+ this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler)
+ return pipelineHandler.ret
+ } catch (err) {
+ return new PassThrough().destroy(err)
+ }
+}
+
+module.exports = pipeline
+
+
+/***/ }),
+
+/***/ 5448:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const Readable = __nccwpck_require__(3858)
+const {
+ InvalidArgumentError,
+ RequestAbortedError
+} = __nccwpck_require__(8045)
+const util = __nccwpck_require__(3983)
+const { getResolveErrorBodyCallback } = __nccwpck_require__(7474)
+const { AsyncResource } = __nccwpck_require__(852)
+const { addSignal, removeSignal } = __nccwpck_require__(7032)
+
+class RequestHandler extends AsyncResource {
+ constructor (opts, callback) {
+ if (!opts || typeof opts !== 'object') {
+ throw new InvalidArgumentError('invalid opts')
+ }
+
+ const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts
+
+ try {
+ if (typeof callback !== 'function') {
+ throw new InvalidArgumentError('invalid callback')
+ }
+
+ if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) {
+ throw new InvalidArgumentError('invalid highWaterMark')
+ }
+
+ if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
+ throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
+ }
+
+ if (method === 'CONNECT') {
+ throw new InvalidArgumentError('invalid method')
+ }
+
+ if (onInfo && typeof onInfo !== 'function') {
+ throw new InvalidArgumentError('invalid onInfo callback')
+ }
+
+ super('UNDICI_REQUEST')
+ } catch (err) {
+ if (util.isStream(body)) {
+ util.destroy(body.on('error', util.nop), err)
+ }
+ throw err
+ }
+
+ this.responseHeaders = responseHeaders || null
+ this.opaque = opaque || null
+ this.callback = callback
+ this.res = null
+ this.abort = null
+ this.body = body
+ this.trailers = {}
+ this.context = null
+ this.onInfo = onInfo || null
+ this.throwOnError = throwOnError
+ this.highWaterMark = highWaterMark
+
+ if (util.isStream(body)) {
+ body.on('error', (err) => {
+ this.onError(err)
+ })
+ }
+
+ addSignal(this, signal)
+ }
+
+ onConnect (abort, context) {
+ if (!this.callback) {
+ throw new RequestAbortedError()
+ }
+
+ this.abort = abort
+ this.context = context
+ }
+
+ onHeaders (statusCode, rawHeaders, resume, statusMessage) {
+ const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this
+
+ const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+
+ if (statusCode < 200) {
+ if (this.onInfo) {
+ this.onInfo({ statusCode, headers })
+ }
+ return
+ }
+
+ const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
+ const contentType = parsedHeaders['content-type']
+ const body = new Readable({ resume, abort, contentType, highWaterMark })
+
+ this.callback = null
+ this.res = body
+ if (callback !== null) {
+ if (this.throwOnError && statusCode >= 400) {
+ this.runInAsyncScope(getResolveErrorBodyCallback, null,
+ { callback, body, contentType, statusCode, statusMessage, headers }
+ )
+ } else {
+ this.runInAsyncScope(callback, null, null, {
+ statusCode,
+ headers,
+ trailers: this.trailers,
+ opaque,
+ body,
+ context
+ })
+ }
+ }
+ }
+
+ onData (chunk) {
+ const { res } = this
+ return res.push(chunk)
+ }
+
+ onComplete (trailers) {
+ const { res } = this
+
+ removeSignal(this)
+
+ util.parseHeaders(trailers, this.trailers)
+
+ res.push(null)
+ }
+
+ onError (err) {
+ const { res, callback, body, opaque } = this
+
+ removeSignal(this)
+
+ if (callback) {
+ // TODO: Does this need queueMicrotask?
+ this.callback = null
+ queueMicrotask(() => {
+ this.runInAsyncScope(callback, null, err, { opaque })
+ })
+ }
+
+ if (res) {
+ this.res = null
+ // Ensure all queued handlers are invoked before destroying res.
+ queueMicrotask(() => {
+ util.destroy(res, err)
+ })
+ }
+
+ if (body) {
+ this.body = null
+ util.destroy(body, err)
+ }
+ }
+}
+
+function request (opts, callback) {
+ if (callback === undefined) {
+ return new Promise((resolve, reject) => {
+ request.call(this, opts, (err, data) => {
+ return err ? reject(err) : resolve(data)
+ })
+ })
+ }
+
+ try {
+ this.dispatch(opts, new RequestHandler(opts, callback))
+ } catch (err) {
+ if (typeof callback !== 'function') {
+ throw err
+ }
+ const opaque = opts && opts.opaque
+ queueMicrotask(() => callback(err, { opaque }))
+ }
+}
+
+module.exports = request
+module.exports.RequestHandler = RequestHandler
+
+
+/***/ }),
+
+/***/ 5395:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { finished, PassThrough } = __nccwpck_require__(2781)
+const {
+ InvalidArgumentError,
+ InvalidReturnValueError,
+ RequestAbortedError
+} = __nccwpck_require__(8045)
+const util = __nccwpck_require__(3983)
+const { getResolveErrorBodyCallback } = __nccwpck_require__(7474)
+const { AsyncResource } = __nccwpck_require__(852)
+const { addSignal, removeSignal } = __nccwpck_require__(7032)
+
+class StreamHandler extends AsyncResource {
+ constructor (opts, factory, callback) {
+ if (!opts || typeof opts !== 'object') {
+ throw new InvalidArgumentError('invalid opts')
+ }
+
+ const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts
+
+ try {
+ if (typeof callback !== 'function') {
+ throw new InvalidArgumentError('invalid callback')
+ }
+
+ if (typeof factory !== 'function') {
+ throw new InvalidArgumentError('invalid factory')
+ }
+
+ if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
+ throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
+ }
+
+ if (method === 'CONNECT') {
+ throw new InvalidArgumentError('invalid method')
+ }
+
+ if (onInfo && typeof onInfo !== 'function') {
+ throw new InvalidArgumentError('invalid onInfo callback')
+ }
+
+ super('UNDICI_STREAM')
+ } catch (err) {
+ if (util.isStream(body)) {
+ util.destroy(body.on('error', util.nop), err)
+ }
+ throw err
+ }
+
+ this.responseHeaders = responseHeaders || null
+ this.opaque = opaque || null
+ this.factory = factory
+ this.callback = callback
+ this.res = null
+ this.abort = null
+ this.context = null
+ this.trailers = null
+ this.body = body
+ this.onInfo = onInfo || null
+ this.throwOnError = throwOnError || false
+
+ if (util.isStream(body)) {
+ body.on('error', (err) => {
+ this.onError(err)
+ })
+ }
+
+ addSignal(this, signal)
+ }
+
+ onConnect (abort, context) {
+ if (!this.callback) {
+ throw new RequestAbortedError()
+ }
+
+ this.abort = abort
+ this.context = context
+ }
+
+ onHeaders (statusCode, rawHeaders, resume, statusMessage) {
+ const { factory, opaque, context, callback, responseHeaders } = this
+
+ const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+
+ if (statusCode < 200) {
+ if (this.onInfo) {
+ this.onInfo({ statusCode, headers })
+ }
+ return
+ }
+
+ this.factory = null
+
+ let res
+
+ if (this.throwOnError && statusCode >= 400) {
+ const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
+ const contentType = parsedHeaders['content-type']
+ res = new PassThrough()
+
+ this.callback = null
+ this.runInAsyncScope(getResolveErrorBodyCallback, null,
+ { callback, body: res, contentType, statusCode, statusMessage, headers }
+ )
+ } else {
+ if (factory === null) {
+ return
+ }
+
+ res = this.runInAsyncScope(factory, null, {
+ statusCode,
+ headers,
+ opaque,
+ context
+ })
+
+ if (
+ !res ||
+ typeof res.write !== 'function' ||
+ typeof res.end !== 'function' ||
+ typeof res.on !== 'function'
+ ) {
+ throw new InvalidReturnValueError('expected Writable')
+ }
+
+ // TODO: Avoid finished. It registers an unnecessary amount of listeners.
+ finished(res, { readable: false }, (err) => {
+ const { callback, res, opaque, trailers, abort } = this
+
+ this.res = null
+ if (err || !res.readable) {
+ util.destroy(res, err)
+ }
+
+ this.callback = null
+ this.runInAsyncScope(callback, null, err || null, { opaque, trailers })
+
+ if (err) {
+ abort()
+ }
+ })
+ }
+
+ res.on('drain', resume)
+
+ this.res = res
+
+ const needDrain = res.writableNeedDrain !== undefined
+ ? res.writableNeedDrain
+ : res._writableState && res._writableState.needDrain
+
+ return needDrain !== true
+ }
+
+ onData (chunk) {
+ const { res } = this
+
+ return res ? res.write(chunk) : true
+ }
+
+ onComplete (trailers) {
+ const { res } = this
+
+ removeSignal(this)
+
+ if (!res) {
+ return
+ }
+
+ this.trailers = util.parseHeaders(trailers)
+
+ res.end()
+ }
+
+ onError (err) {
+ const { res, callback, opaque, body } = this
+
+ removeSignal(this)
+
+ this.factory = null
+
+ if (res) {
+ this.res = null
+ util.destroy(res, err)
+ } else if (callback) {
+ this.callback = null
+ queueMicrotask(() => {
+ this.runInAsyncScope(callback, null, err, { opaque })
+ })
+ }
+
+ if (body) {
+ this.body = null
+ util.destroy(body, err)
+ }
+ }
+}
+
+function stream (opts, factory, callback) {
+ if (callback === undefined) {
+ return new Promise((resolve, reject) => {
+ stream.call(this, opts, factory, (err, data) => {
+ return err ? reject(err) : resolve(data)
+ })
+ })
+ }
+
+ try {
+ this.dispatch(opts, new StreamHandler(opts, factory, callback))
+ } catch (err) {
+ if (typeof callback !== 'function') {
+ throw err
+ }
+ const opaque = opts && opts.opaque
+ queueMicrotask(() => callback(err, { opaque }))
+ }
+}
+
+module.exports = stream
+
+
+/***/ }),
+
+/***/ 6923:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(8045)
+const { AsyncResource } = __nccwpck_require__(852)
+const util = __nccwpck_require__(3983)
+const { addSignal, removeSignal } = __nccwpck_require__(7032)
+const assert = __nccwpck_require__(9491)
+
+class UpgradeHandler extends AsyncResource {
+ constructor (opts, callback) {
+ if (!opts || typeof opts !== 'object') {
+ throw new InvalidArgumentError('invalid opts')
+ }
+
+ if (typeof callback !== 'function') {
+ throw new InvalidArgumentError('invalid callback')
+ }
+
+ const { signal, opaque, responseHeaders } = opts
+
+ if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
+ throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
+ }
+
+ super('UNDICI_UPGRADE')
+
+ this.responseHeaders = responseHeaders || null
+ this.opaque = opaque || null
+ this.callback = callback
+ this.abort = null
+ this.context = null
+
+ addSignal(this, signal)
+ }
+
+ onConnect (abort, context) {
+ if (!this.callback) {
+ throw new RequestAbortedError()
+ }
+
+ this.abort = abort
+ this.context = null
+ }
+
+ onHeaders () {
+ throw new SocketError('bad upgrade', null)
+ }
+
+ onUpgrade (statusCode, rawHeaders, socket) {
+ const { callback, opaque, context } = this
+
+ assert.strictEqual(statusCode, 101)
+
+ removeSignal(this)
+
+ this.callback = null
+ const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+ this.runInAsyncScope(callback, null, null, {
+ headers,
+ socket,
+ opaque,
+ context
+ })
+ }
+
+ onError (err) {
+ const { callback, opaque } = this
+
+ removeSignal(this)
+
+ if (callback) {
+ this.callback = null
+ queueMicrotask(() => {
+ this.runInAsyncScope(callback, null, err, { opaque })
+ })
+ }
+ }
+}
+
+function upgrade (opts, callback) {
+ if (callback === undefined) {
+ return new Promise((resolve, reject) => {
+ upgrade.call(this, opts, (err, data) => {
+ return err ? reject(err) : resolve(data)
+ })
+ })
+ }
+
+ try {
+ const upgradeHandler = new UpgradeHandler(opts, callback)
+ this.dispatch({
+ ...opts,
+ method: opts.method || 'GET',
+ upgrade: opts.protocol || 'Websocket'
+ }, upgradeHandler)
+ } catch (err) {
+ if (typeof callback !== 'function') {
+ throw err
+ }
+ const opaque = opts && opts.opaque
+ queueMicrotask(() => callback(err, { opaque }))
+ }
+}
+
+module.exports = upgrade
+
+
+/***/ }),
+
+/***/ 4059:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+module.exports.request = __nccwpck_require__(5448)
+module.exports.stream = __nccwpck_require__(5395)
+module.exports.pipeline = __nccwpck_require__(8752)
+module.exports.upgrade = __nccwpck_require__(6923)
+module.exports.connect = __nccwpck_require__(9744)
+
+
+/***/ }),
+
+/***/ 3858:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+// Ported from https://github.com/nodejs/undici/pull/907
+
+
+
+const assert = __nccwpck_require__(9491)
+const { Readable } = __nccwpck_require__(2781)
+const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = __nccwpck_require__(8045)
+const util = __nccwpck_require__(3983)
+const { ReadableStreamFrom, toUSVString } = __nccwpck_require__(3983)
+
+let Blob
+
+const kConsume = Symbol('kConsume')
+const kReading = Symbol('kReading')
+const kBody = Symbol('kBody')
+const kAbort = Symbol('abort')
+const kContentType = Symbol('kContentType')
+
+const noop = () => {}
+
+module.exports = class BodyReadable extends Readable {
+ constructor ({
+ resume,
+ abort,
+ contentType = '',
+ highWaterMark = 64 * 1024 // Same as nodejs fs streams.
+ }) {
+ super({
+ autoDestroy: true,
+ read: resume,
+ highWaterMark
+ })
+
+ this._readableState.dataEmitted = false
+
+ this[kAbort] = abort
+ this[kConsume] = null
+ this[kBody] = null
+ this[kContentType] = contentType
+
+ // Is stream being consumed through Readable API?
+ // This is an optimization so that we avoid checking
+ // for 'data' and 'readable' listeners in the hot path
+ // inside push().
+ this[kReading] = false
+ }
+
+ destroy (err) {
+ if (this.destroyed) {
+ // Node < 16
+ return this
+ }
+
+ if (!err && !this._readableState.endEmitted) {
+ err = new RequestAbortedError()
+ }
+
+ if (err) {
+ this[kAbort]()
+ }
+
+ return super.destroy(err)
+ }
+
+ emit (ev, ...args) {
+ if (ev === 'data') {
+ // Node < 16.7
+ this._readableState.dataEmitted = true
+ } else if (ev === 'error') {
+ // Node < 16
+ this._readableState.errorEmitted = true
+ }
+ return super.emit(ev, ...args)
+ }
+
+ on (ev, ...args) {
+ if (ev === 'data' || ev === 'readable') {
+ this[kReading] = true
+ }
+ return super.on(ev, ...args)
+ }
+
+ addListener (ev, ...args) {
+ return this.on(ev, ...args)
+ }
+
+ off (ev, ...args) {
+ const ret = super.off(ev, ...args)
+ if (ev === 'data' || ev === 'readable') {
+ this[kReading] = (
+ this.listenerCount('data') > 0 ||
+ this.listenerCount('readable') > 0
+ )
+ }
+ return ret
+ }
+
+ removeListener (ev, ...args) {
+ return this.off(ev, ...args)
+ }
+
+ push (chunk) {
+ if (this[kConsume] && chunk !== null && this.readableLength === 0) {
+ consumePush(this[kConsume], chunk)
+ return this[kReading] ? super.push(chunk) : true
+ }
+ return super.push(chunk)
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-body-text
+ async text () {
+ return consume(this, 'text')
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-body-json
+ async json () {
+ return consume(this, 'json')
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-body-blob
+ async blob () {
+ return consume(this, 'blob')
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-body-arraybuffer
+ async arrayBuffer () {
+ return consume(this, 'arrayBuffer')
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-body-formdata
+ async formData () {
+ // TODO: Implement.
+ throw new NotSupportedError()
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-body-bodyused
+ get bodyUsed () {
+ return util.isDisturbed(this)
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-body-body
+ get body () {
+ if (!this[kBody]) {
+ this[kBody] = ReadableStreamFrom(this)
+ if (this[kConsume]) {
+ // TODO: Is this the best way to force a lock?
+ this[kBody].getReader() // Ensure stream is locked.
+ assert(this[kBody].locked)
+ }
+ }
+ return this[kBody]
+ }
+
+ dump (opts) {
+ let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144
+ const signal = opts && opts.signal
+
+ if (signal) {
+ try {
+ if (typeof signal !== 'object' || !('aborted' in signal)) {
+ throw new InvalidArgumentError('signal must be an AbortSignal')
+ }
+ util.throwIfAborted(signal)
+ } catch (err) {
+ return Promise.reject(err)
+ }
+ }
+
+ if (this.closed) {
+ return Promise.resolve(null)
+ }
+
+ return new Promise((resolve, reject) => {
+ const signalListenerCleanup = signal
+ ? util.addAbortListener(signal, () => {
+ this.destroy()
+ })
+ : noop
+
+ this
+ .on('close', function () {
+ signalListenerCleanup()
+ if (signal && signal.aborted) {
+ reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' }))
+ } else {
+ resolve(null)
+ }
+ })
+ .on('error', noop)
+ .on('data', function (chunk) {
+ limit -= chunk.length
+ if (limit <= 0) {
+ this.destroy()
+ }
+ })
+ .resume()
+ })
+ }
+}
+
+// https://streams.spec.whatwg.org/#readablestream-locked
+function isLocked (self) {
+ // Consume is an implicit lock.
+ return (self[kBody] && self[kBody].locked === true) || self[kConsume]
+}
+
+// https://fetch.spec.whatwg.org/#body-unusable
+function isUnusable (self) {
+ return util.isDisturbed(self) || isLocked(self)
+}
+
+async function consume (stream, type) {
+ if (isUnusable(stream)) {
+ throw new TypeError('unusable')
+ }
+
+ assert(!stream[kConsume])
+
+ return new Promise((resolve, reject) => {
+ stream[kConsume] = {
+ type,
+ stream,
+ resolve,
+ reject,
+ length: 0,
+ body: []
+ }
+
+ stream
+ .on('error', function (err) {
+ consumeFinish(this[kConsume], err)
+ })
+ .on('close', function () {
+ if (this[kConsume].body !== null) {
+ consumeFinish(this[kConsume], new RequestAbortedError())
+ }
+ })
+
+ process.nextTick(consumeStart, stream[kConsume])
+ })
+}
+
+function consumeStart (consume) {
+ if (consume.body === null) {
+ return
+ }
+
+ const { _readableState: state } = consume.stream
+
+ for (const chunk of state.buffer) {
+ consumePush(consume, chunk)
+ }
+
+ if (state.endEmitted) {
+ consumeEnd(this[kConsume])
+ } else {
+ consume.stream.on('end', function () {
+ consumeEnd(this[kConsume])
+ })
+ }
+
+ consume.stream.resume()
+
+ while (consume.stream.read() != null) {
+ // Loop
+ }
+}
+
+function consumeEnd (consume) {
+ const { type, body, resolve, stream, length } = consume
+
+ try {
+ if (type === 'text') {
+ resolve(toUSVString(Buffer.concat(body)))
+ } else if (type === 'json') {
+ resolve(JSON.parse(Buffer.concat(body)))
+ } else if (type === 'arrayBuffer') {
+ const dst = new Uint8Array(length)
+
+ let pos = 0
+ for (const buf of body) {
+ dst.set(buf, pos)
+ pos += buf.byteLength
+ }
+
+ resolve(dst.buffer)
+ } else if (type === 'blob') {
+ if (!Blob) {
+ Blob = (__nccwpck_require__(4300).Blob)
+ }
+ resolve(new Blob(body, { type: stream[kContentType] }))
+ }
+
+ consumeFinish(consume)
+ } catch (err) {
+ stream.destroy(err)
+ }
+}
+
+function consumePush (consume, chunk) {
+ consume.length += chunk.length
+ consume.body.push(chunk)
+}
+
+function consumeFinish (consume, err) {
+ if (consume.body === null) {
+ return
+ }
+
+ if (err) {
+ consume.reject(err)
+ } else {
+ consume.resolve()
+ }
+
+ consume.type = null
+ consume.stream = null
+ consume.resolve = null
+ consume.reject = null
+ consume.length = 0
+ consume.body = null
+}
+
+
+/***/ }),
+
+/***/ 7474:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+const assert = __nccwpck_require__(9491)
+const {
+ ResponseStatusCodeError
+} = __nccwpck_require__(8045)
+const { toUSVString } = __nccwpck_require__(3983)
+
+async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) {
+ assert(body)
+
+ let chunks = []
+ let limit = 0
+
+ for await (const chunk of body) {
+ chunks.push(chunk)
+ limit += chunk.length
+ if (limit > 128 * 1024) {
+ chunks = null
+ break
+ }
+ }
+
+ if (statusCode === 204 || !contentType || !chunks) {
+ process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
+ return
+ }
+
+ try {
+ if (contentType.startsWith('application/json')) {
+ const payload = JSON.parse(toUSVString(Buffer.concat(chunks)))
+ process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
+ return
+ }
+
+ if (contentType.startsWith('text/')) {
+ const payload = toUSVString(Buffer.concat(chunks))
+ process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
+ return
+ }
+ } catch (err) {
+ // Process in a fallback if error
+ }
+
+ process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
+}
+
+module.exports = { getResolveErrorBodyCallback }
+
+
+/***/ }),
+
+/***/ 7931:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const {
+ BalancedPoolMissingUpstreamError,
+ InvalidArgumentError
+} = __nccwpck_require__(8045)
+const {
+ PoolBase,
+ kClients,
+ kNeedDrain,
+ kAddClient,
+ kRemoveClient,
+ kGetDispatcher
+} = __nccwpck_require__(3198)
+const Pool = __nccwpck_require__(4634)
+const { kUrl, kInterceptors } = __nccwpck_require__(2785)
+const { parseOrigin } = __nccwpck_require__(3983)
+const kFactory = Symbol('factory')
+
+const kOptions = Symbol('options')
+const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor')
+const kCurrentWeight = Symbol('kCurrentWeight')
+const kIndex = Symbol('kIndex')
+const kWeight = Symbol('kWeight')
+const kMaxWeightPerServer = Symbol('kMaxWeightPerServer')
+const kErrorPenalty = Symbol('kErrorPenalty')
+
+function getGreatestCommonDivisor (a, b) {
+ if (b === 0) return a
+ return getGreatestCommonDivisor(b, a % b)
+}
+
+function defaultFactory (origin, opts) {
+ return new Pool(origin, opts)
+}
+
+class BalancedPool extends PoolBase {
+ constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) {
+ super()
+
+ this[kOptions] = opts
+ this[kIndex] = -1
+ this[kCurrentWeight] = 0
+
+ this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100
+ this[kErrorPenalty] = this[kOptions].errorPenalty || 15
+
+ if (!Array.isArray(upstreams)) {
+ upstreams = [upstreams]
+ }
+
+ if (typeof factory !== 'function') {
+ throw new InvalidArgumentError('factory must be a function.')
+ }
+
+ this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool)
+ ? opts.interceptors.BalancedPool
+ : []
+ this[kFactory] = factory
+
+ for (const upstream of upstreams) {
+ this.addUpstream(upstream)
+ }
+ this._updateBalancedPoolStats()
+ }
+
+ addUpstream (upstream) {
+ const upstreamOrigin = parseOrigin(upstream).origin
+
+ if (this[kClients].find((pool) => (
+ pool[kUrl].origin === upstreamOrigin &&
+ pool.closed !== true &&
+ pool.destroyed !== true
+ ))) {
+ return this
+ }
+ const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions]))
+
+ this[kAddClient](pool)
+ pool.on('connect', () => {
+ pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty])
+ })
+
+ pool.on('connectionError', () => {
+ pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])
+ this._updateBalancedPoolStats()
+ })
+
+ pool.on('disconnect', (...args) => {
+ const err = args[2]
+ if (err && err.code === 'UND_ERR_SOCKET') {
+ // decrease the weight of the pool.
+ pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])
+ this._updateBalancedPoolStats()
+ }
+ })
+
+ for (const client of this[kClients]) {
+ client[kWeight] = this[kMaxWeightPerServer]
+ }
+
+ this._updateBalancedPoolStats()
+
+ return this
+ }
+
+ _updateBalancedPoolStats () {
+ this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0)
+ }
+
+ removeUpstream (upstream) {
+ const upstreamOrigin = parseOrigin(upstream).origin
+
+ const pool = this[kClients].find((pool) => (
+ pool[kUrl].origin === upstreamOrigin &&
+ pool.closed !== true &&
+ pool.destroyed !== true
+ ))
+
+ if (pool) {
+ this[kRemoveClient](pool)
+ }
+
+ return this
+ }
+
+ get upstreams () {
+ return this[kClients]
+ .filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true)
+ .map((p) => p[kUrl].origin)
+ }
+
+ [kGetDispatcher] () {
+ // We validate that pools is greater than 0,
+ // otherwise we would have to wait until an upstream
+ // is added, which might never happen.
+ if (this[kClients].length === 0) {
+ throw new BalancedPoolMissingUpstreamError()
+ }
+
+ const dispatcher = this[kClients].find(dispatcher => (
+ !dispatcher[kNeedDrain] &&
+ dispatcher.closed !== true &&
+ dispatcher.destroyed !== true
+ ))
+
+ if (!dispatcher) {
+ return
+ }
+
+ const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true)
+
+ if (allClientsBusy) {
+ return
+ }
+
+ let counter = 0
+
+ let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain])
+
+ while (counter++ < this[kClients].length) {
+ this[kIndex] = (this[kIndex] + 1) % this[kClients].length
+ const pool = this[kClients][this[kIndex]]
+
+ // find pool index with the largest weight
+ if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) {
+ maxWeightIndex = this[kIndex]
+ }
+
+ // decrease the current weight every `this[kClients].length`.
+ if (this[kIndex] === 0) {
+ // Set the current weight to the next lower weight.
+ this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor]
+
+ if (this[kCurrentWeight] <= 0) {
+ this[kCurrentWeight] = this[kMaxWeightPerServer]
+ }
+ }
+ if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) {
+ return pool
+ }
+ }
+
+ this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight]
+ this[kIndex] = maxWeightIndex
+ return this[kClients][maxWeightIndex]
+ }
+}
+
+module.exports = BalancedPool
+
+
+/***/ }),
+
+/***/ 6101:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { kConstruct } = __nccwpck_require__(9174)
+const { urlEquals, fieldValues: getFieldValues } = __nccwpck_require__(2396)
+const { kEnumerableProperty, isDisturbed } = __nccwpck_require__(3983)
+const { kHeadersList } = __nccwpck_require__(2785)
+const { webidl } = __nccwpck_require__(1744)
+const { Response, cloneResponse } = __nccwpck_require__(7823)
+const { Request } = __nccwpck_require__(8359)
+const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(5861)
+const { fetching } = __nccwpck_require__(4881)
+const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = __nccwpck_require__(2538)
+const assert = __nccwpck_require__(9491)
+const { getGlobalDispatcher } = __nccwpck_require__(1892)
+
+/**
+ * @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation
+ * @typedef {Object} CacheBatchOperation
+ * @property {'delete' | 'put'} type
+ * @property {any} request
+ * @property {any} response
+ * @property {import('../../types/cache').CacheQueryOptions} options
+ */
+
+/**
+ * @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list
+ * @typedef {[any, any][]} requestResponseList
+ */
+
+class Cache {
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list
+ * @type {requestResponseList}
+ */
+ #relevantRequestResponseList
+
+ constructor () {
+ if (arguments[0] !== kConstruct) {
+ webidl.illegalConstructor()
+ }
+
+ this.#relevantRequestResponseList = arguments[1]
+ }
+
+ async match (request, options = {}) {
+ webidl.brandCheck(this, Cache)
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' })
+
+ request = webidl.converters.RequestInfo(request)
+ options = webidl.converters.CacheQueryOptions(options)
+
+ const p = await this.matchAll(request, options)
+
+ if (p.length === 0) {
+ return
+ }
+
+ return p[0]
+ }
+
+ async matchAll (request = undefined, options = {}) {
+ webidl.brandCheck(this, Cache)
+
+ if (request !== undefined) request = webidl.converters.RequestInfo(request)
+ options = webidl.converters.CacheQueryOptions(options)
+
+ // 1.
+ let r = null
+
+ // 2.
+ if (request !== undefined) {
+ if (request instanceof Request) {
+ // 2.1.1
+ r = request[kState]
+
+ // 2.1.2
+ if (r.method !== 'GET' && !options.ignoreMethod) {
+ return []
+ }
+ } else if (typeof request === 'string') {
+ // 2.2.1
+ r = new Request(request)[kState]
+ }
+ }
+
+ // 5.
+ // 5.1
+ const responses = []
+
+ // 5.2
+ if (request === undefined) {
+ // 5.2.1
+ for (const requestResponse of this.#relevantRequestResponseList) {
+ responses.push(requestResponse[1])
+ }
+ } else { // 5.3
+ // 5.3.1
+ const requestResponses = this.#queryCache(r, options)
+
+ // 5.3.2
+ for (const requestResponse of requestResponses) {
+ responses.push(requestResponse[1])
+ }
+ }
+
+ // 5.4
+ // We don't implement CORs so we don't need to loop over the responses, yay!
+
+ // 5.5.1
+ const responseList = []
+
+ // 5.5.2
+ for (const response of responses) {
+ // 5.5.2.1
+ const responseObject = new Response(response.body?.source ?? null)
+ const body = responseObject[kState].body
+ responseObject[kState] = response
+ responseObject[kState].body = body
+ responseObject[kHeaders][kHeadersList] = response.headersList
+ responseObject[kHeaders][kGuard] = 'immutable'
+
+ responseList.push(responseObject)
+ }
+
+ // 6.
+ return Object.freeze(responseList)
+ }
+
+ async add (request) {
+ webidl.brandCheck(this, Cache)
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' })
+
+ request = webidl.converters.RequestInfo(request)
+
+ // 1.
+ const requests = [request]
+
+ // 2.
+ const responseArrayPromise = this.addAll(requests)
+
+ // 3.
+ return await responseArrayPromise
+ }
+
+ async addAll (requests) {
+ webidl.brandCheck(this, Cache)
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' })
+
+ requests = webidl.converters['sequence'](requests)
+
+ // 1.
+ const responsePromises = []
+
+ // 2.
+ const requestList = []
+
+ // 3.
+ for (const request of requests) {
+ if (typeof request === 'string') {
+ continue
+ }
+
+ // 3.1
+ const r = request[kState]
+
+ // 3.2
+ if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') {
+ throw webidl.errors.exception({
+ header: 'Cache.addAll',
+ message: 'Expected http/s scheme when method is not GET.'
+ })
+ }
+ }
+
+ // 4.
+ /** @type {ReturnType[]} */
+ const fetchControllers = []
+
+ // 5.
+ for (const request of requests) {
+ // 5.1
+ const r = new Request(request)[kState]
+
+ // 5.2
+ if (!urlIsHttpHttpsScheme(r.url)) {
+ throw webidl.errors.exception({
+ header: 'Cache.addAll',
+ message: 'Expected http/s scheme.'
+ })
+ }
+
+ // 5.4
+ r.initiator = 'fetch'
+ r.destination = 'subresource'
+
+ // 5.5
+ requestList.push(r)
+
+ // 5.6
+ const responsePromise = createDeferredPromise()
+
+ // 5.7
+ fetchControllers.push(fetching({
+ request: r,
+ dispatcher: getGlobalDispatcher(),
+ processResponse (response) {
+ // 1.
+ if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) {
+ responsePromise.reject(webidl.errors.exception({
+ header: 'Cache.addAll',
+ message: 'Received an invalid status code or the request failed.'
+ }))
+ } else if (response.headersList.contains('vary')) { // 2.
+ // 2.1
+ const fieldValues = getFieldValues(response.headersList.get('vary'))
+
+ // 2.2
+ for (const fieldValue of fieldValues) {
+ // 2.2.1
+ if (fieldValue === '*') {
+ responsePromise.reject(webidl.errors.exception({
+ header: 'Cache.addAll',
+ message: 'invalid vary field value'
+ }))
+
+ for (const controller of fetchControllers) {
+ controller.abort()
+ }
+
+ return
+ }
+ }
+ }
+ },
+ processResponseEndOfBody (response) {
+ // 1.
+ if (response.aborted) {
+ responsePromise.reject(new DOMException('aborted', 'AbortError'))
+ return
+ }
+
+ // 2.
+ responsePromise.resolve(response)
+ }
+ }))
+
+ // 5.8
+ responsePromises.push(responsePromise.promise)
+ }
+
+ // 6.
+ const p = Promise.all(responsePromises)
+
+ // 7.
+ const responses = await p
+
+ // 7.1
+ const operations = []
+
+ // 7.2
+ let index = 0
+
+ // 7.3
+ for (const response of responses) {
+ // 7.3.1
+ /** @type {CacheBatchOperation} */
+ const operation = {
+ type: 'put', // 7.3.2
+ request: requestList[index], // 7.3.3
+ response // 7.3.4
+ }
+
+ operations.push(operation) // 7.3.5
+
+ index++ // 7.3.6
+ }
+
+ // 7.5
+ const cacheJobPromise = createDeferredPromise()
+
+ // 7.6.1
+ let errorData = null
+
+ // 7.6.2
+ try {
+ this.#batchCacheOperations(operations)
+ } catch (e) {
+ errorData = e
+ }
+
+ // 7.6.3
+ queueMicrotask(() => {
+ // 7.6.3.1
+ if (errorData === null) {
+ cacheJobPromise.resolve(undefined)
+ } else {
+ // 7.6.3.2
+ cacheJobPromise.reject(errorData)
+ }
+ })
+
+ // 7.7
+ return cacheJobPromise.promise
+ }
+
+ async put (request, response) {
+ webidl.brandCheck(this, Cache)
+ webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' })
+
+ request = webidl.converters.RequestInfo(request)
+ response = webidl.converters.Response(response)
+
+ // 1.
+ let innerRequest = null
+
+ // 2.
+ if (request instanceof Request) {
+ innerRequest = request[kState]
+ } else { // 3.
+ innerRequest = new Request(request)[kState]
+ }
+
+ // 4.
+ if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') {
+ throw webidl.errors.exception({
+ header: 'Cache.put',
+ message: 'Expected an http/s scheme when method is not GET'
+ })
+ }
+
+ // 5.
+ const innerResponse = response[kState]
+
+ // 6.
+ if (innerResponse.status === 206) {
+ throw webidl.errors.exception({
+ header: 'Cache.put',
+ message: 'Got 206 status'
+ })
+ }
+
+ // 7.
+ if (innerResponse.headersList.contains('vary')) {
+ // 7.1.
+ const fieldValues = getFieldValues(innerResponse.headersList.get('vary'))
+
+ // 7.2.
+ for (const fieldValue of fieldValues) {
+ // 7.2.1
+ if (fieldValue === '*') {
+ throw webidl.errors.exception({
+ header: 'Cache.put',
+ message: 'Got * vary field value'
+ })
+ }
+ }
+ }
+
+ // 8.
+ if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) {
+ throw webidl.errors.exception({
+ header: 'Cache.put',
+ message: 'Response body is locked or disturbed'
+ })
+ }
+
+ // 9.
+ const clonedResponse = cloneResponse(innerResponse)
+
+ // 10.
+ const bodyReadPromise = createDeferredPromise()
+
+ // 11.
+ if (innerResponse.body != null) {
+ // 11.1
+ const stream = innerResponse.body.stream
+
+ // 11.2
+ const reader = stream.getReader()
+
+ // 11.3
+ readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject)
+ } else {
+ bodyReadPromise.resolve(undefined)
+ }
+
+ // 12.
+ /** @type {CacheBatchOperation[]} */
+ const operations = []
+
+ // 13.
+ /** @type {CacheBatchOperation} */
+ const operation = {
+ type: 'put', // 14.
+ request: innerRequest, // 15.
+ response: clonedResponse // 16.
+ }
+
+ // 17.
+ operations.push(operation)
+
+ // 19.
+ const bytes = await bodyReadPromise.promise
+
+ if (clonedResponse.body != null) {
+ clonedResponse.body.source = bytes
+ }
+
+ // 19.1
+ const cacheJobPromise = createDeferredPromise()
+
+ // 19.2.1
+ let errorData = null
+
+ // 19.2.2
+ try {
+ this.#batchCacheOperations(operations)
+ } catch (e) {
+ errorData = e
+ }
+
+ // 19.2.3
+ queueMicrotask(() => {
+ // 19.2.3.1
+ if (errorData === null) {
+ cacheJobPromise.resolve()
+ } else { // 19.2.3.2
+ cacheJobPromise.reject(errorData)
+ }
+ })
+
+ return cacheJobPromise.promise
+ }
+
+ async delete (request, options = {}) {
+ webidl.brandCheck(this, Cache)
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' })
+
+ request = webidl.converters.RequestInfo(request)
+ options = webidl.converters.CacheQueryOptions(options)
+
+ /**
+ * @type {Request}
+ */
+ let r = null
+
+ if (request instanceof Request) {
+ r = request[kState]
+
+ if (r.method !== 'GET' && !options.ignoreMethod) {
+ return false
+ }
+ } else {
+ assert(typeof request === 'string')
+
+ r = new Request(request)[kState]
+ }
+
+ /** @type {CacheBatchOperation[]} */
+ const operations = []
+
+ /** @type {CacheBatchOperation} */
+ const operation = {
+ type: 'delete',
+ request: r,
+ options
+ }
+
+ operations.push(operation)
+
+ const cacheJobPromise = createDeferredPromise()
+
+ let errorData = null
+ let requestResponses
+
+ try {
+ requestResponses = this.#batchCacheOperations(operations)
+ } catch (e) {
+ errorData = e
+ }
+
+ queueMicrotask(() => {
+ if (errorData === null) {
+ cacheJobPromise.resolve(!!requestResponses?.length)
+ } else {
+ cacheJobPromise.reject(errorData)
+ }
+ })
+
+ return cacheJobPromise.promise
+ }
+
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#dom-cache-keys
+ * @param {any} request
+ * @param {import('../../types/cache').CacheQueryOptions} options
+ * @returns {readonly Request[]}
+ */
+ async keys (request = undefined, options = {}) {
+ webidl.brandCheck(this, Cache)
+
+ if (request !== undefined) request = webidl.converters.RequestInfo(request)
+ options = webidl.converters.CacheQueryOptions(options)
+
+ // 1.
+ let r = null
+
+ // 2.
+ if (request !== undefined) {
+ // 2.1
+ if (request instanceof Request) {
+ // 2.1.1
+ r = request[kState]
+
+ // 2.1.2
+ if (r.method !== 'GET' && !options.ignoreMethod) {
+ return []
+ }
+ } else if (typeof request === 'string') { // 2.2
+ r = new Request(request)[kState]
+ }
+ }
+
+ // 4.
+ const promise = createDeferredPromise()
+
+ // 5.
+ // 5.1
+ const requests = []
+
+ // 5.2
+ if (request === undefined) {
+ // 5.2.1
+ for (const requestResponse of this.#relevantRequestResponseList) {
+ // 5.2.1.1
+ requests.push(requestResponse[0])
+ }
+ } else { // 5.3
+ // 5.3.1
+ const requestResponses = this.#queryCache(r, options)
+
+ // 5.3.2
+ for (const requestResponse of requestResponses) {
+ // 5.3.2.1
+ requests.push(requestResponse[0])
+ }
+ }
+
+ // 5.4
+ queueMicrotask(() => {
+ // 5.4.1
+ const requestList = []
+
+ // 5.4.2
+ for (const request of requests) {
+ const requestObject = new Request('https://a')
+ requestObject[kState] = request
+ requestObject[kHeaders][kHeadersList] = request.headersList
+ requestObject[kHeaders][kGuard] = 'immutable'
+ requestObject[kRealm] = request.client
+
+ // 5.4.2.1
+ requestList.push(requestObject)
+ }
+
+ // 5.4.3
+ promise.resolve(Object.freeze(requestList))
+ })
+
+ return promise.promise
+ }
+
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm
+ * @param {CacheBatchOperation[]} operations
+ * @returns {requestResponseList}
+ */
+ #batchCacheOperations (operations) {
+ // 1.
+ const cache = this.#relevantRequestResponseList
+
+ // 2.
+ const backupCache = [...cache]
+
+ // 3.
+ const addedItems = []
+
+ // 4.1
+ const resultList = []
+
+ try {
+ // 4.2
+ for (const operation of operations) {
+ // 4.2.1
+ if (operation.type !== 'delete' && operation.type !== 'put') {
+ throw webidl.errors.exception({
+ header: 'Cache.#batchCacheOperations',
+ message: 'operation type does not match "delete" or "put"'
+ })
+ }
+
+ // 4.2.2
+ if (operation.type === 'delete' && operation.response != null) {
+ throw webidl.errors.exception({
+ header: 'Cache.#batchCacheOperations',
+ message: 'delete operation should not have an associated response'
+ })
+ }
+
+ // 4.2.3
+ if (this.#queryCache(operation.request, operation.options, addedItems).length) {
+ throw new DOMException('???', 'InvalidStateError')
+ }
+
+ // 4.2.4
+ let requestResponses
+
+ // 4.2.5
+ if (operation.type === 'delete') {
+ // 4.2.5.1
+ requestResponses = this.#queryCache(operation.request, operation.options)
+
+ // TODO: the spec is wrong, this is needed to pass WPTs
+ if (requestResponses.length === 0) {
+ return []
+ }
+
+ // 4.2.5.2
+ for (const requestResponse of requestResponses) {
+ const idx = cache.indexOf(requestResponse)
+ assert(idx !== -1)
+
+ // 4.2.5.2.1
+ cache.splice(idx, 1)
+ }
+ } else if (operation.type === 'put') { // 4.2.6
+ // 4.2.6.1
+ if (operation.response == null) {
+ throw webidl.errors.exception({
+ header: 'Cache.#batchCacheOperations',
+ message: 'put operation should have an associated response'
+ })
+ }
+
+ // 4.2.6.2
+ const r = operation.request
+
+ // 4.2.6.3
+ if (!urlIsHttpHttpsScheme(r.url)) {
+ throw webidl.errors.exception({
+ header: 'Cache.#batchCacheOperations',
+ message: 'expected http or https scheme'
+ })
+ }
+
+ // 4.2.6.4
+ if (r.method !== 'GET') {
+ throw webidl.errors.exception({
+ header: 'Cache.#batchCacheOperations',
+ message: 'not get method'
+ })
+ }
+
+ // 4.2.6.5
+ if (operation.options != null) {
+ throw webidl.errors.exception({
+ header: 'Cache.#batchCacheOperations',
+ message: 'options must not be defined'
+ })
+ }
+
+ // 4.2.6.6
+ requestResponses = this.#queryCache(operation.request)
+
+ // 4.2.6.7
+ for (const requestResponse of requestResponses) {
+ const idx = cache.indexOf(requestResponse)
+ assert(idx !== -1)
+
+ // 4.2.6.7.1
+ cache.splice(idx, 1)
+ }
+
+ // 4.2.6.8
+ cache.push([operation.request, operation.response])
+
+ // 4.2.6.10
+ addedItems.push([operation.request, operation.response])
+ }
+
+ // 4.2.7
+ resultList.push([operation.request, operation.response])
+ }
+
+ // 4.3
+ return resultList
+ } catch (e) { // 5.
+ // 5.1
+ this.#relevantRequestResponseList.length = 0
+
+ // 5.2
+ this.#relevantRequestResponseList = backupCache
+
+ // 5.3
+ throw e
+ }
+ }
+
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#query-cache
+ * @param {any} requestQuery
+ * @param {import('../../types/cache').CacheQueryOptions} options
+ * @param {requestResponseList} targetStorage
+ * @returns {requestResponseList}
+ */
+ #queryCache (requestQuery, options, targetStorage) {
+ /** @type {requestResponseList} */
+ const resultList = []
+
+ const storage = targetStorage ?? this.#relevantRequestResponseList
+
+ for (const requestResponse of storage) {
+ const [cachedRequest, cachedResponse] = requestResponse
+ if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) {
+ resultList.push(requestResponse)
+ }
+ }
+
+ return resultList
+ }
+
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm
+ * @param {any} requestQuery
+ * @param {any} request
+ * @param {any | null} response
+ * @param {import('../../types/cache').CacheQueryOptions | undefined} options
+ * @returns {boolean}
+ */
+ #requestMatchesCachedItem (requestQuery, request, response = null, options) {
+ // if (options?.ignoreMethod === false && request.method === 'GET') {
+ // return false
+ // }
+
+ const queryURL = new URL(requestQuery.url)
+
+ const cachedURL = new URL(request.url)
+
+ if (options?.ignoreSearch) {
+ cachedURL.search = ''
+
+ queryURL.search = ''
+ }
+
+ if (!urlEquals(queryURL, cachedURL, true)) {
+ return false
+ }
+
+ if (
+ response == null ||
+ options?.ignoreVary ||
+ !response.headersList.contains('vary')
+ ) {
+ return true
+ }
+
+ const fieldValues = getFieldValues(response.headersList.get('vary'))
+
+ for (const fieldValue of fieldValues) {
+ if (fieldValue === '*') {
+ return false
+ }
+
+ const requestValue = request.headersList.get(fieldValue)
+ const queryValue = requestQuery.headersList.get(fieldValue)
+
+ // If one has the header and the other doesn't, or one has
+ // a different value than the other, return false
+ if (requestValue !== queryValue) {
+ return false
+ }
+ }
+
+ return true
+ }
+}
+
+Object.defineProperties(Cache.prototype, {
+ [Symbol.toStringTag]: {
+ value: 'Cache',
+ configurable: true
+ },
+ match: kEnumerableProperty,
+ matchAll: kEnumerableProperty,
+ add: kEnumerableProperty,
+ addAll: kEnumerableProperty,
+ put: kEnumerableProperty,
+ delete: kEnumerableProperty,
+ keys: kEnumerableProperty
+})
+
+const cacheQueryOptionConverters = [
+ {
+ key: 'ignoreSearch',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: 'ignoreMethod',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: 'ignoreVary',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ }
+]
+
+webidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters)
+
+webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([
+ ...cacheQueryOptionConverters,
+ {
+ key: 'cacheName',
+ converter: webidl.converters.DOMString
+ }
+])
+
+webidl.converters.Response = webidl.interfaceConverter(Response)
+
+webidl.converters['sequence'] = webidl.sequenceConverter(
+ webidl.converters.RequestInfo
+)
+
+module.exports = {
+ Cache
+}
+
+
+/***/ }),
+
+/***/ 7907:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { kConstruct } = __nccwpck_require__(9174)
+const { Cache } = __nccwpck_require__(6101)
+const { webidl } = __nccwpck_require__(1744)
+const { kEnumerableProperty } = __nccwpck_require__(3983)
+
+class CacheStorage {
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map
+ * @type {Map}
+ */
+ async has (cacheName) {
+ webidl.brandCheck(this, CacheStorage)
+ webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' })
+
+ cacheName = webidl.converters.DOMString(cacheName)
+
+ // 2.1.1
+ // 2.2
+ return this.#caches.has(cacheName)
+ }
+
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open
+ * @param {string} cacheName
+ * @returns {Promise}
+ */
+ async open (cacheName) {
+ webidl.brandCheck(this, CacheStorage)
+ webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' })
+
+ cacheName = webidl.converters.DOMString(cacheName)
+
+ // 2.1
+ if (this.#caches.has(cacheName)) {
+ // await caches.open('v1') !== await caches.open('v1')
+
+ // 2.1.1
+ const cache = this.#caches.get(cacheName)
+
+ // 2.1.1.1
+ return new Cache(kConstruct, cache)
+ }
+
+ // 2.2
+ const cache = []
+
+ // 2.3
+ this.#caches.set(cacheName, cache)
+
+ // 2.4
+ return new Cache(kConstruct, cache)
+ }
+
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#cache-storage-delete
+ * @param {string} cacheName
+ * @returns {Promise}
+ */
+ async delete (cacheName) {
+ webidl.brandCheck(this, CacheStorage)
+ webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' })
+
+ cacheName = webidl.converters.DOMString(cacheName)
+
+ return this.#caches.delete(cacheName)
+ }
+
+ /**
+ * @see https://w3c.github.io/ServiceWorker/#cache-storage-keys
+ * @returns {string[]}
+ */
+ async keys () {
+ webidl.brandCheck(this, CacheStorage)
+
+ // 2.1
+ const keys = this.#caches.keys()
+
+ // 2.2
+ return [...keys]
+ }
+}
+
+Object.defineProperties(CacheStorage.prototype, {
+ [Symbol.toStringTag]: {
+ value: 'CacheStorage',
+ configurable: true
+ },
+ match: kEnumerableProperty,
+ has: kEnumerableProperty,
+ open: kEnumerableProperty,
+ delete: kEnumerableProperty,
+ keys: kEnumerableProperty
+})
+
+module.exports = {
+ CacheStorage
+}
+
+
+/***/ }),
+
+/***/ 9174:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+module.exports = {
+ kConstruct: (__nccwpck_require__(2785).kConstruct)
+}
+
+
+/***/ }),
+
+/***/ 2396:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const assert = __nccwpck_require__(9491)
+const { URLSerializer } = __nccwpck_require__(685)
+const { isValidHeaderName } = __nccwpck_require__(2538)
+
+/**
+ * @see https://url.spec.whatwg.org/#concept-url-equals
+ * @param {URL} A
+ * @param {URL} B
+ * @param {boolean | undefined} excludeFragment
+ * @returns {boolean}
+ */
+function urlEquals (A, B, excludeFragment = false) {
+ const serializedA = URLSerializer(A, excludeFragment)
+
+ const serializedB = URLSerializer(B, excludeFragment)
+
+ return serializedA === serializedB
+}
+
+/**
+ * @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262
+ * @param {string} header
+ */
+function fieldValues (header) {
+ assert(header !== null)
+
+ const values = []
+
+ for (let value of header.split(',')) {
+ value = value.trim()
+
+ if (!value.length) {
+ continue
+ } else if (!isValidHeaderName(value)) {
+ continue
+ }
+
+ values.push(value)
+ }
+
+ return values
+}
+
+module.exports = {
+ urlEquals,
+ fieldValues
+}
+
+
+/***/ }),
+
+/***/ 3598:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+// @ts-check
+
+
+
+/* global WebAssembly */
+
+const assert = __nccwpck_require__(9491)
+const net = __nccwpck_require__(1808)
+const http = __nccwpck_require__(3685)
+const { pipeline } = __nccwpck_require__(2781)
+const util = __nccwpck_require__(3983)
+const timers = __nccwpck_require__(9459)
+const Request = __nccwpck_require__(2905)
+const DispatcherBase = __nccwpck_require__(4839)
+const {
+ RequestContentLengthMismatchError,
+ ResponseContentLengthMismatchError,
+ InvalidArgumentError,
+ RequestAbortedError,
+ HeadersTimeoutError,
+ HeadersOverflowError,
+ SocketError,
+ InformationalError,
+ BodyTimeoutError,
+ HTTPParserError,
+ ResponseExceededMaxSizeError,
+ ClientDestroyedError
+} = __nccwpck_require__(8045)
+const buildConnector = __nccwpck_require__(2067)
+const {
+ kUrl,
+ kReset,
+ kServerName,
+ kClient,
+ kBusy,
+ kParser,
+ kConnect,
+ kBlocking,
+ kResuming,
+ kRunning,
+ kPending,
+ kSize,
+ kWriting,
+ kQueue,
+ kConnected,
+ kConnecting,
+ kNeedDrain,
+ kNoRef,
+ kKeepAliveDefaultTimeout,
+ kHostHeader,
+ kPendingIdx,
+ kRunningIdx,
+ kError,
+ kPipelining,
+ kSocket,
+ kKeepAliveTimeoutValue,
+ kMaxHeadersSize,
+ kKeepAliveMaxTimeout,
+ kKeepAliveTimeoutThreshold,
+ kHeadersTimeout,
+ kBodyTimeout,
+ kStrictContentLength,
+ kConnector,
+ kMaxRedirections,
+ kMaxRequests,
+ kCounter,
+ kClose,
+ kDestroy,
+ kDispatch,
+ kInterceptors,
+ kLocalAddress,
+ kMaxResponseSize,
+ kHTTPConnVersion,
+ // HTTP2
+ kHost,
+ kHTTP2Session,
+ kHTTP2SessionState,
+ kHTTP2BuildRequest,
+ kHTTP2CopyHeaders,
+ kHTTP1BuildRequest
+} = __nccwpck_require__(2785)
+
+/** @type {import('http2')} */
+let http2
+try {
+ http2 = __nccwpck_require__(5158)
+} catch {
+ // @ts-ignore
+ http2 = { constants: {} }
+}
+
+const {
+ constants: {
+ HTTP2_HEADER_AUTHORITY,
+ HTTP2_HEADER_METHOD,
+ HTTP2_HEADER_PATH,
+ HTTP2_HEADER_SCHEME,
+ HTTP2_HEADER_CONTENT_LENGTH,
+ HTTP2_HEADER_EXPECT,
+ HTTP2_HEADER_STATUS
+ }
+} = http2
+
+// Experimental
+let h2ExperimentalWarned = false
+
+const FastBuffer = Buffer[Symbol.species]
+
+const kClosedResolve = Symbol('kClosedResolve')
+
+const channels = {}
+
+try {
+ const diagnosticsChannel = __nccwpck_require__(7643)
+ channels.sendHeaders = diagnosticsChannel.channel('undici:client:sendHeaders')
+ channels.beforeConnect = diagnosticsChannel.channel('undici:client:beforeConnect')
+ channels.connectError = diagnosticsChannel.channel('undici:client:connectError')
+ channels.connected = diagnosticsChannel.channel('undici:client:connected')
+} catch {
+ channels.sendHeaders = { hasSubscribers: false }
+ channels.beforeConnect = { hasSubscribers: false }
+ channels.connectError = { hasSubscribers: false }
+ channels.connected = { hasSubscribers: false }
+}
+
+/**
+ * @type {import('../types/client').default}
+ */
+class Client extends DispatcherBase {
+ /**
+ *
+ * @param {string|URL} url
+ * @param {import('../types/client').Client.Options} options
+ */
+ constructor (url, {
+ interceptors,
+ maxHeaderSize,
+ headersTimeout,
+ socketTimeout,
+ requestTimeout,
+ connectTimeout,
+ bodyTimeout,
+ idleTimeout,
+ keepAlive,
+ keepAliveTimeout,
+ maxKeepAliveTimeout,
+ keepAliveMaxTimeout,
+ keepAliveTimeoutThreshold,
+ socketPath,
+ pipelining,
+ tls,
+ strictContentLength,
+ maxCachedSessions,
+ maxRedirections,
+ connect,
+ maxRequestsPerClient,
+ localAddress,
+ maxResponseSize,
+ autoSelectFamily,
+ autoSelectFamilyAttemptTimeout,
+ // h2
+ allowH2,
+ maxConcurrentStreams
+ } = {}) {
+ super()
+
+ if (keepAlive !== undefined) {
+ throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead')
+ }
+
+ if (socketTimeout !== undefined) {
+ throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead')
+ }
+
+ if (requestTimeout !== undefined) {
+ throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead')
+ }
+
+ if (idleTimeout !== undefined) {
+ throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead')
+ }
+
+ if (maxKeepAliveTimeout !== undefined) {
+ throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead')
+ }
+
+ if (maxHeaderSize != null && !Number.isFinite(maxHeaderSize)) {
+ throw new InvalidArgumentError('invalid maxHeaderSize')
+ }
+
+ if (socketPath != null && typeof socketPath !== 'string') {
+ throw new InvalidArgumentError('invalid socketPath')
+ }
+
+ if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) {
+ throw new InvalidArgumentError('invalid connectTimeout')
+ }
+
+ if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) {
+ throw new InvalidArgumentError('invalid keepAliveTimeout')
+ }
+
+ if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) {
+ throw new InvalidArgumentError('invalid keepAliveMaxTimeout')
+ }
+
+ if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) {
+ throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold')
+ }
+
+ if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) {
+ throw new InvalidArgumentError('headersTimeout must be a positive integer or zero')
+ }
+
+ if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) {
+ throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero')
+ }
+
+ if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
+ throw new InvalidArgumentError('connect must be a function or an object')
+ }
+
+ if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {
+ throw new InvalidArgumentError('maxRedirections must be a positive number')
+ }
+
+ if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) {
+ throw new InvalidArgumentError('maxRequestsPerClient must be a positive number')
+ }
+
+ if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) {
+ throw new InvalidArgumentError('localAddress must be valid string IP address')
+ }
+
+ if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) {
+ throw new InvalidArgumentError('maxResponseSize must be a positive number')
+ }
+
+ if (
+ autoSelectFamilyAttemptTimeout != null &&
+ (!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1)
+ ) {
+ throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number')
+ }
+
+ // h2
+ if (allowH2 != null && typeof allowH2 !== 'boolean') {
+ throw new InvalidArgumentError('allowH2 must be a valid boolean value')
+ }
+
+ if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) {
+ throw new InvalidArgumentError('maxConcurrentStreams must be a possitive integer, greater than 0')
+ }
+
+ if (typeof connect !== 'function') {
+ connect = buildConnector({
+ ...tls,
+ maxCachedSessions,
+ allowH2,
+ socketPath,
+ timeout: connectTimeout,
+ ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
+ ...connect
+ })
+ }
+
+ this[kInterceptors] = interceptors && interceptors.Client && Array.isArray(interceptors.Client)
+ ? interceptors.Client
+ : [createRedirectInterceptor({ maxRedirections })]
+ this[kUrl] = util.parseOrigin(url)
+ this[kConnector] = connect
+ this[kSocket] = null
+ this[kPipelining] = pipelining != null ? pipelining : 1
+ this[kMaxHeadersSize] = maxHeaderSize || http.maxHeaderSize
+ this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout
+ this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout
+ this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold
+ this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout]
+ this[kServerName] = null
+ this[kLocalAddress] = localAddress != null ? localAddress : null
+ this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming
+ this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming
+ this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n`
+ this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3
+ this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3
+ this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength
+ this[kMaxRedirections] = maxRedirections
+ this[kMaxRequests] = maxRequestsPerClient
+ this[kClosedResolve] = null
+ this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1
+ this[kHTTPConnVersion] = 'h1'
+
+ // HTTP/2
+ this[kHTTP2Session] = null
+ this[kHTTP2SessionState] = !allowH2
+ ? null
+ : {
+ // streams: null, // Fixed queue of streams - For future support of `push`
+ openStreams: 0, // Keep track of them to decide wether or not unref the session
+ maxConcurrentStreams: maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server
+ }
+ this[kHost] = `${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}`
+
+ // kQueue is built up of 3 sections separated by
+ // the kRunningIdx and kPendingIdx indices.
+ // | complete | running | pending |
+ // ^ kRunningIdx ^ kPendingIdx ^ kQueue.length
+ // kRunningIdx points to the first running element.
+ // kPendingIdx points to the first pending element.
+ // This implements a fast queue with an amortized
+ // time of O(1).
+
+ this[kQueue] = []
+ this[kRunningIdx] = 0
+ this[kPendingIdx] = 0
+ }
+
+ get pipelining () {
+ return this[kPipelining]
+ }
+
+ set pipelining (value) {
+ this[kPipelining] = value
+ resume(this, true)
+ }
+
+ get [kPending] () {
+ return this[kQueue].length - this[kPendingIdx]
+ }
+
+ get [kRunning] () {
+ return this[kPendingIdx] - this[kRunningIdx]
+ }
+
+ get [kSize] () {
+ return this[kQueue].length - this[kRunningIdx]
+ }
+
+ get [kConnected] () {
+ return !!this[kSocket] && !this[kConnecting] && !this[kSocket].destroyed
+ }
+
+ get [kBusy] () {
+ const socket = this[kSocket]
+ return (
+ (socket && (socket[kReset] || socket[kWriting] || socket[kBlocking])) ||
+ (this[kSize] >= (this[kPipelining] || 1)) ||
+ this[kPending] > 0
+ )
+ }
+
+ /* istanbul ignore: only used for test */
+ [kConnect] (cb) {
+ connect(this)
+ this.once('connect', cb)
+ }
+
+ [kDispatch] (opts, handler) {
+ const origin = opts.origin || this[kUrl].origin
+
+ const request = this[kHTTPConnVersion] === 'h2'
+ ? Request[kHTTP2BuildRequest](origin, opts, handler)
+ : Request[kHTTP1BuildRequest](origin, opts, handler)
+
+ this[kQueue].push(request)
+ if (this[kResuming]) {
+ // Do nothing.
+ } else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) {
+ // Wait a tick in case stream/iterator is ended in the same tick.
+ this[kResuming] = 1
+ process.nextTick(resume, this)
+ } else {
+ resume(this, true)
+ }
+
+ if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) {
+ this[kNeedDrain] = 2
+ }
+
+ return this[kNeedDrain] < 2
+ }
+
+ async [kClose] () {
+ // TODO: for H2 we need to gracefully flush the remaining enqueued
+ // request and close each stream.
+ return new Promise((resolve) => {
+ if (!this[kSize]) {
+ resolve(null)
+ } else {
+ this[kClosedResolve] = resolve
+ }
+ })
+ }
+
+ async [kDestroy] (err) {
+ return new Promise((resolve) => {
+ const requests = this[kQueue].splice(this[kPendingIdx])
+ for (let i = 0; i < requests.length; i++) {
+ const request = requests[i]
+ errorRequest(this, request, err)
+ }
+
+ const callback = () => {
+ if (this[kClosedResolve]) {
+ // TODO (fix): Should we error here with ClientDestroyedError?
+ this[kClosedResolve]()
+ this[kClosedResolve] = null
+ }
+ resolve()
+ }
+
+ if (this[kHTTP2Session] != null) {
+ util.destroy(this[kHTTP2Session], err)
+ this[kHTTP2Session] = null
+ this[kHTTP2SessionState] = null
+ }
+
+ if (!this[kSocket]) {
+ queueMicrotask(callback)
+ } else {
+ util.destroy(this[kSocket].on('close', callback), err)
+ }
+
+ resume(this)
+ })
+ }
+}
+
+function onHttp2SessionError (err) {
+ assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
+
+ this[kSocket][kError] = err
+
+ onError(this[kClient], err)
+}
+
+function onHttp2FrameError (type, code, id) {
+ const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`)
+
+ if (id === 0) {
+ this[kSocket][kError] = err
+ onError(this[kClient], err)
+ }
+}
+
+function onHttp2SessionEnd () {
+ util.destroy(this, new SocketError('other side closed'))
+ util.destroy(this[kSocket], new SocketError('other side closed'))
+}
+
+function onHTTP2GoAway (code) {
+ const client = this[kClient]
+ const err = new InformationalError(`HTTP/2: "GOAWAY" frame received with code ${code}`)
+ client[kSocket] = null
+ client[kHTTP2Session] = null
+
+ if (client.destroyed) {
+ assert(this[kPending] === 0)
+
+ // Fail entire queue.
+ const requests = client[kQueue].splice(client[kRunningIdx])
+ for (let i = 0; i < requests.length; i++) {
+ const request = requests[i]
+ errorRequest(this, request, err)
+ }
+ } else if (client[kRunning] > 0) {
+ // Fail head of pipeline.
+ const request = client[kQueue][client[kRunningIdx]]
+ client[kQueue][client[kRunningIdx]++] = null
+
+ errorRequest(client, request, err)
+ }
+
+ client[kPendingIdx] = client[kRunningIdx]
+
+ assert(client[kRunning] === 0)
+
+ client.emit('disconnect',
+ client[kUrl],
+ [client],
+ err
+ )
+
+ resume(client)
+}
+
+const constants = __nccwpck_require__(953)
+const createRedirectInterceptor = __nccwpck_require__(8861)
+const EMPTY_BUF = Buffer.alloc(0)
+
+async function lazyllhttp () {
+ const llhttpWasmData = process.env.JEST_WORKER_ID ? __nccwpck_require__(1145) : undefined
+
+ let mod
+ try {
+ mod = await WebAssembly.compile(Buffer.from(__nccwpck_require__(5627), 'base64'))
+ } catch (e) {
+ /* istanbul ignore next */
+
+ // We could check if the error was caused by the simd option not
+ // being enabled, but the occurring of this other error
+ // * https://github.com/emscripten-core/emscripten/issues/11495
+ // got me to remove that check to avoid breaking Node 12.
+ mod = await WebAssembly.compile(Buffer.from(llhttpWasmData || __nccwpck_require__(1145), 'base64'))
+ }
+
+ return await WebAssembly.instantiate(mod, {
+ env: {
+ /* eslint-disable camelcase */
+
+ wasm_on_url: (p, at, len) => {
+ /* istanbul ignore next */
+ return 0
+ },
+ wasm_on_status: (p, at, len) => {
+ assert.strictEqual(currentParser.ptr, p)
+ const start = at - currentBufferPtr + currentBufferRef.byteOffset
+ return currentParser.onStatus(new FastBuffer(currentBufferRef.buffer, start, len)) || 0
+ },
+ wasm_on_message_begin: (p) => {
+ assert.strictEqual(currentParser.ptr, p)
+ return currentParser.onMessageBegin() || 0
+ },
+ wasm_on_header_field: (p, at, len) => {
+ assert.strictEqual(currentParser.ptr, p)
+ const start = at - currentBufferPtr + currentBufferRef.byteOffset
+ return currentParser.onHeaderField(new FastBuffer(currentBufferRef.buffer, start, len)) || 0
+ },
+ wasm_on_header_value: (p, at, len) => {
+ assert.strictEqual(currentParser.ptr, p)
+ const start = at - currentBufferPtr + currentBufferRef.byteOffset
+ return currentParser.onHeaderValue(new FastBuffer(currentBufferRef.buffer, start, len)) || 0
+ },
+ wasm_on_headers_complete: (p, statusCode, upgrade, shouldKeepAlive) => {
+ assert.strictEqual(currentParser.ptr, p)
+ return currentParser.onHeadersComplete(statusCode, Boolean(upgrade), Boolean(shouldKeepAlive)) || 0
+ },
+ wasm_on_body: (p, at, len) => {
+ assert.strictEqual(currentParser.ptr, p)
+ const start = at - currentBufferPtr + currentBufferRef.byteOffset
+ return currentParser.onBody(new FastBuffer(currentBufferRef.buffer, start, len)) || 0
+ },
+ wasm_on_message_complete: (p) => {
+ assert.strictEqual(currentParser.ptr, p)
+ return currentParser.onMessageComplete() || 0
+ }
+
+ /* eslint-enable camelcase */
+ }
+ })
+}
+
+let llhttpInstance = null
+let llhttpPromise = lazyllhttp()
+llhttpPromise.catch()
+
+let currentParser = null
+let currentBufferRef = null
+let currentBufferSize = 0
+let currentBufferPtr = null
+
+const TIMEOUT_HEADERS = 1
+const TIMEOUT_BODY = 2
+const TIMEOUT_IDLE = 3
+
+class Parser {
+ constructor (client, socket, { exports }) {
+ assert(Number.isFinite(client[kMaxHeadersSize]) && client[kMaxHeadersSize] > 0)
+
+ this.llhttp = exports
+ this.ptr = this.llhttp.llhttp_alloc(constants.TYPE.RESPONSE)
+ this.client = client
+ this.socket = socket
+ this.timeout = null
+ this.timeoutValue = null
+ this.timeoutType = null
+ this.statusCode = null
+ this.statusText = ''
+ this.upgrade = false
+ this.headers = []
+ this.headersSize = 0
+ this.headersMaxSize = client[kMaxHeadersSize]
+ this.shouldKeepAlive = false
+ this.paused = false
+ this.resume = this.resume.bind(this)
+
+ this.bytesRead = 0
+
+ this.keepAlive = ''
+ this.contentLength = ''
+ this.connection = ''
+ this.maxResponseSize = client[kMaxResponseSize]
+ }
+
+ setTimeout (value, type) {
+ this.timeoutType = type
+ if (value !== this.timeoutValue) {
+ timers.clearTimeout(this.timeout)
+ if (value) {
+ this.timeout = timers.setTimeout(onParserTimeout, value, this)
+ // istanbul ignore else: only for jest
+ if (this.timeout.unref) {
+ this.timeout.unref()
+ }
+ } else {
+ this.timeout = null
+ }
+ this.timeoutValue = value
+ } else if (this.timeout) {
+ // istanbul ignore else: only for jest
+ if (this.timeout.refresh) {
+ this.timeout.refresh()
+ }
+ }
+ }
+
+ resume () {
+ if (this.socket.destroyed || !this.paused) {
+ return
+ }
+
+ assert(this.ptr != null)
+ assert(currentParser == null)
+
+ this.llhttp.llhttp_resume(this.ptr)
+
+ assert(this.timeoutType === TIMEOUT_BODY)
+ if (this.timeout) {
+ // istanbul ignore else: only for jest
+ if (this.timeout.refresh) {
+ this.timeout.refresh()
+ }
+ }
+
+ this.paused = false
+ this.execute(this.socket.read() || EMPTY_BUF) // Flush parser.
+ this.readMore()
+ }
+
+ readMore () {
+ while (!this.paused && this.ptr) {
+ const chunk = this.socket.read()
+ if (chunk === null) {
+ break
+ }
+ this.execute(chunk)
+ }
+ }
+
+ execute (data) {
+ assert(this.ptr != null)
+ assert(currentParser == null)
+ assert(!this.paused)
+
+ const { socket, llhttp } = this
+
+ if (data.length > currentBufferSize) {
+ if (currentBufferPtr) {
+ llhttp.free(currentBufferPtr)
+ }
+ currentBufferSize = Math.ceil(data.length / 4096) * 4096
+ currentBufferPtr = llhttp.malloc(currentBufferSize)
+ }
+
+ new Uint8Array(llhttp.memory.buffer, currentBufferPtr, currentBufferSize).set(data)
+
+ // Call `execute` on the wasm parser.
+ // We pass the `llhttp_parser` pointer address, the pointer address of buffer view data,
+ // and finally the length of bytes to parse.
+ // The return value is an error code or `constants.ERROR.OK`.
+ try {
+ let ret
+
+ try {
+ currentBufferRef = data
+ currentParser = this
+ ret = llhttp.llhttp_execute(this.ptr, currentBufferPtr, data.length)
+ /* eslint-disable-next-line no-useless-catch */
+ } catch (err) {
+ /* istanbul ignore next: difficult to make a test case for */
+ throw err
+ } finally {
+ currentParser = null
+ currentBufferRef = null
+ }
+
+ const offset = llhttp.llhttp_get_error_pos(this.ptr) - currentBufferPtr
+
+ if (ret === constants.ERROR.PAUSED_UPGRADE) {
+ this.onUpgrade(data.slice(offset))
+ } else if (ret === constants.ERROR.PAUSED) {
+ this.paused = true
+ socket.unshift(data.slice(offset))
+ } else if (ret !== constants.ERROR.OK) {
+ const ptr = llhttp.llhttp_get_error_reason(this.ptr)
+ let message = ''
+ /* istanbul ignore else: difficult to make a test case for */
+ if (ptr) {
+ const len = new Uint8Array(llhttp.memory.buffer, ptr).indexOf(0)
+ message =
+ 'Response does not match the HTTP/1.1 protocol (' +
+ Buffer.from(llhttp.memory.buffer, ptr, len).toString() +
+ ')'
+ }
+ throw new HTTPParserError(message, constants.ERROR[ret], data.slice(offset))
+ }
+ } catch (err) {
+ util.destroy(socket, err)
+ }
+ }
+
+ destroy () {
+ assert(this.ptr != null)
+ assert(currentParser == null)
+
+ this.llhttp.llhttp_free(this.ptr)
+ this.ptr = null
+
+ timers.clearTimeout(this.timeout)
+ this.timeout = null
+ this.timeoutValue = null
+ this.timeoutType = null
+
+ this.paused = false
+ }
+
+ onStatus (buf) {
+ this.statusText = buf.toString()
+ }
+
+ onMessageBegin () {
+ const { socket, client } = this
+
+ /* istanbul ignore next: difficult to make a test case for */
+ if (socket.destroyed) {
+ return -1
+ }
+
+ const request = client[kQueue][client[kRunningIdx]]
+ if (!request) {
+ return -1
+ }
+ }
+
+ onHeaderField (buf) {
+ const len = this.headers.length
+
+ if ((len & 1) === 0) {
+ this.headers.push(buf)
+ } else {
+ this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf])
+ }
+
+ this.trackHeader(buf.length)
+ }
+
+ onHeaderValue (buf) {
+ let len = this.headers.length
+
+ if ((len & 1) === 1) {
+ this.headers.push(buf)
+ len += 1
+ } else {
+ this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf])
+ }
+
+ const key = this.headers[len - 2]
+ if (key.length === 10 && key.toString().toLowerCase() === 'keep-alive') {
+ this.keepAlive += buf.toString()
+ } else if (key.length === 10 && key.toString().toLowerCase() === 'connection') {
+ this.connection += buf.toString()
+ } else if (key.length === 14 && key.toString().toLowerCase() === 'content-length') {
+ this.contentLength += buf.toString()
+ }
+
+ this.trackHeader(buf.length)
+ }
+
+ trackHeader (len) {
+ this.headersSize += len
+ if (this.headersSize >= this.headersMaxSize) {
+ util.destroy(this.socket, new HeadersOverflowError())
+ }
+ }
+
+ onUpgrade (head) {
+ const { upgrade, client, socket, headers, statusCode } = this
+
+ assert(upgrade)
+
+ const request = client[kQueue][client[kRunningIdx]]
+ assert(request)
+
+ assert(!socket.destroyed)
+ assert(socket === client[kSocket])
+ assert(!this.paused)
+ assert(request.upgrade || request.method === 'CONNECT')
+
+ this.statusCode = null
+ this.statusText = ''
+ this.shouldKeepAlive = null
+
+ assert(this.headers.length % 2 === 0)
+ this.headers = []
+ this.headersSize = 0
+
+ socket.unshift(head)
+
+ socket[kParser].destroy()
+ socket[kParser] = null
+
+ socket[kClient] = null
+ socket[kError] = null
+ socket
+ .removeListener('error', onSocketError)
+ .removeListener('readable', onSocketReadable)
+ .removeListener('end', onSocketEnd)
+ .removeListener('close', onSocketClose)
+
+ client[kSocket] = null
+ client[kQueue][client[kRunningIdx]++] = null
+ client.emit('disconnect', client[kUrl], [client], new InformationalError('upgrade'))
+
+ try {
+ request.onUpgrade(statusCode, headers, socket)
+ } catch (err) {
+ util.destroy(socket, err)
+ }
+
+ resume(client)
+ }
+
+ onHeadersComplete (statusCode, upgrade, shouldKeepAlive) {
+ const { client, socket, headers, statusText } = this
+
+ /* istanbul ignore next: difficult to make a test case for */
+ if (socket.destroyed) {
+ return -1
+ }
+
+ const request = client[kQueue][client[kRunningIdx]]
+
+ /* istanbul ignore next: difficult to make a test case for */
+ if (!request) {
+ return -1
+ }
+
+ assert(!this.upgrade)
+ assert(this.statusCode < 200)
+
+ if (statusCode === 100) {
+ util.destroy(socket, new SocketError('bad response', util.getSocketInfo(socket)))
+ return -1
+ }
+
+ /* this can only happen if server is misbehaving */
+ if (upgrade && !request.upgrade) {
+ util.destroy(socket, new SocketError('bad upgrade', util.getSocketInfo(socket)))
+ return -1
+ }
+
+ assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS)
+
+ this.statusCode = statusCode
+ this.shouldKeepAlive = (
+ shouldKeepAlive ||
+ // Override llhttp value which does not allow keepAlive for HEAD.
+ (request.method === 'HEAD' && !socket[kReset] && this.connection.toLowerCase() === 'keep-alive')
+ )
+
+ if (this.statusCode >= 200) {
+ const bodyTimeout = request.bodyTimeout != null
+ ? request.bodyTimeout
+ : client[kBodyTimeout]
+ this.setTimeout(bodyTimeout, TIMEOUT_BODY)
+ } else if (this.timeout) {
+ // istanbul ignore else: only for jest
+ if (this.timeout.refresh) {
+ this.timeout.refresh()
+ }
+ }
+
+ if (request.method === 'CONNECT') {
+ assert(client[kRunning] === 1)
+ this.upgrade = true
+ return 2
+ }
+
+ if (upgrade) {
+ assert(client[kRunning] === 1)
+ this.upgrade = true
+ return 2
+ }
+
+ assert(this.headers.length % 2 === 0)
+ this.headers = []
+ this.headersSize = 0
+
+ if (this.shouldKeepAlive && client[kPipelining]) {
+ const keepAliveTimeout = this.keepAlive ? util.parseKeepAliveTimeout(this.keepAlive) : null
+
+ if (keepAliveTimeout != null) {
+ const timeout = Math.min(
+ keepAliveTimeout - client[kKeepAliveTimeoutThreshold],
+ client[kKeepAliveMaxTimeout]
+ )
+ if (timeout <= 0) {
+ socket[kReset] = true
+ } else {
+ client[kKeepAliveTimeoutValue] = timeout
+ }
+ } else {
+ client[kKeepAliveTimeoutValue] = client[kKeepAliveDefaultTimeout]
+ }
+ } else {
+ // Stop more requests from being dispatched.
+ socket[kReset] = true
+ }
+
+ const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false
+
+ if (request.aborted) {
+ return -1
+ }
+
+ if (request.method === 'HEAD') {
+ return 1
+ }
+
+ if (statusCode < 200) {
+ return 1
+ }
+
+ if (socket[kBlocking]) {
+ socket[kBlocking] = false
+ resume(client)
+ }
+
+ return pause ? constants.ERROR.PAUSED : 0
+ }
+
+ onBody (buf) {
+ const { client, socket, statusCode, maxResponseSize } = this
+
+ if (socket.destroyed) {
+ return -1
+ }
+
+ const request = client[kQueue][client[kRunningIdx]]
+ assert(request)
+
+ assert.strictEqual(this.timeoutType, TIMEOUT_BODY)
+ if (this.timeout) {
+ // istanbul ignore else: only for jest
+ if (this.timeout.refresh) {
+ this.timeout.refresh()
+ }
+ }
+
+ assert(statusCode >= 200)
+
+ if (maxResponseSize > -1 && this.bytesRead + buf.length > maxResponseSize) {
+ util.destroy(socket, new ResponseExceededMaxSizeError())
+ return -1
+ }
+
+ this.bytesRead += buf.length
+
+ if (request.onData(buf) === false) {
+ return constants.ERROR.PAUSED
+ }
+ }
+
+ onMessageComplete () {
+ const { client, socket, statusCode, upgrade, headers, contentLength, bytesRead, shouldKeepAlive } = this
+
+ if (socket.destroyed && (!statusCode || shouldKeepAlive)) {
+ return -1
+ }
+
+ if (upgrade) {
+ return
+ }
+
+ const request = client[kQueue][client[kRunningIdx]]
+ assert(request)
+
+ assert(statusCode >= 100)
+
+ this.statusCode = null
+ this.statusText = ''
+ this.bytesRead = 0
+ this.contentLength = ''
+ this.keepAlive = ''
+ this.connection = ''
+
+ assert(this.headers.length % 2 === 0)
+ this.headers = []
+ this.headersSize = 0
+
+ if (statusCode < 200) {
+ return
+ }
+
+ /* istanbul ignore next: should be handled by llhttp? */
+ if (request.method !== 'HEAD' && contentLength && bytesRead !== parseInt(contentLength, 10)) {
+ util.destroy(socket, new ResponseContentLengthMismatchError())
+ return -1
+ }
+
+ request.onComplete(headers)
+
+ client[kQueue][client[kRunningIdx]++] = null
+
+ if (socket[kWriting]) {
+ assert.strictEqual(client[kRunning], 0)
+ // Response completed before request.
+ util.destroy(socket, new InformationalError('reset'))
+ return constants.ERROR.PAUSED
+ } else if (!shouldKeepAlive) {
+ util.destroy(socket, new InformationalError('reset'))
+ return constants.ERROR.PAUSED
+ } else if (socket[kReset] && client[kRunning] === 0) {
+ // Destroy socket once all requests have completed.
+ // The request at the tail of the pipeline is the one
+ // that requested reset and no further requests should
+ // have been queued since then.
+ util.destroy(socket, new InformationalError('reset'))
+ return constants.ERROR.PAUSED
+ } else if (client[kPipelining] === 1) {
+ // We must wait a full event loop cycle to reuse this socket to make sure
+ // that non-spec compliant servers are not closing the connection even if they
+ // said they won't.
+ setImmediate(resume, client)
+ } else {
+ resume(client)
+ }
+ }
+}
+
+function onParserTimeout (parser) {
+ const { socket, timeoutType, client } = parser
+
+ /* istanbul ignore else */
+ if (timeoutType === TIMEOUT_HEADERS) {
+ if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) {
+ assert(!parser.paused, 'cannot be paused while waiting for headers')
+ util.destroy(socket, new HeadersTimeoutError())
+ }
+ } else if (timeoutType === TIMEOUT_BODY) {
+ if (!parser.paused) {
+ util.destroy(socket, new BodyTimeoutError())
+ }
+ } else if (timeoutType === TIMEOUT_IDLE) {
+ assert(client[kRunning] === 0 && client[kKeepAliveTimeoutValue])
+ util.destroy(socket, new InformationalError('socket idle timeout'))
+ }
+}
+
+function onSocketReadable () {
+ const { [kParser]: parser } = this
+ if (parser) {
+ parser.readMore()
+ }
+}
+
+function onSocketError (err) {
+ const { [kClient]: client, [kParser]: parser } = this
+
+ assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
+
+ if (client[kHTTPConnVersion] !== 'h2') {
+ // On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded
+ // to the user.
+ if (err.code === 'ECONNRESET' && parser.statusCode && !parser.shouldKeepAlive) {
+ // We treat all incoming data so for as a valid response.
+ parser.onMessageComplete()
+ return
+ }
+ }
+
+ this[kError] = err
+
+ onError(this[kClient], err)
+}
+
+function onError (client, err) {
+ if (
+ client[kRunning] === 0 &&
+ err.code !== 'UND_ERR_INFO' &&
+ err.code !== 'UND_ERR_SOCKET'
+ ) {
+ // Error is not caused by running request and not a recoverable
+ // socket error.
+
+ assert(client[kPendingIdx] === client[kRunningIdx])
+
+ const requests = client[kQueue].splice(client[kRunningIdx])
+ for (let i = 0; i < requests.length; i++) {
+ const request = requests[i]
+ errorRequest(client, request, err)
+ }
+ assert(client[kSize] === 0)
+ }
+}
+
+function onSocketEnd () {
+ const { [kParser]: parser, [kClient]: client } = this
+
+ if (client[kHTTPConnVersion] !== 'h2') {
+ if (parser.statusCode && !parser.shouldKeepAlive) {
+ // We treat all incoming data so far as a valid response.
+ parser.onMessageComplete()
+ return
+ }
+ }
+
+ util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this)))
+}
+
+function onSocketClose () {
+ const { [kClient]: client, [kParser]: parser } = this
+
+ if (client[kHTTPConnVersion] === 'h1' && parser) {
+ if (!this[kError] && parser.statusCode && !parser.shouldKeepAlive) {
+ // We treat all incoming data so far as a valid response.
+ parser.onMessageComplete()
+ }
+
+ this[kParser].destroy()
+ this[kParser] = null
+ }
+
+ const err = this[kError] || new SocketError('closed', util.getSocketInfo(this))
+
+ client[kSocket] = null
+
+ if (client.destroyed) {
+ assert(client[kPending] === 0)
+
+ // Fail entire queue.
+ const requests = client[kQueue].splice(client[kRunningIdx])
+ for (let i = 0; i < requests.length; i++) {
+ const request = requests[i]
+ errorRequest(client, request, err)
+ }
+ } else if (client[kRunning] > 0 && err.code !== 'UND_ERR_INFO') {
+ // Fail head of pipeline.
+ const request = client[kQueue][client[kRunningIdx]]
+ client[kQueue][client[kRunningIdx]++] = null
+
+ errorRequest(client, request, err)
+ }
+
+ client[kPendingIdx] = client[kRunningIdx]
+
+ assert(client[kRunning] === 0)
+
+ client.emit('disconnect', client[kUrl], [client], err)
+
+ resume(client)
+}
+
+async function connect (client) {
+ assert(!client[kConnecting])
+ assert(!client[kSocket])
+
+ let { host, hostname, protocol, port } = client[kUrl]
+
+ // Resolve ipv6
+ if (hostname[0] === '[') {
+ const idx = hostname.indexOf(']')
+
+ assert(idx !== -1)
+ const ip = hostname.substring(1, idx)
+
+ assert(net.isIP(ip))
+ hostname = ip
+ }
+
+ client[kConnecting] = true
+
+ if (channels.beforeConnect.hasSubscribers) {
+ channels.beforeConnect.publish({
+ connectParams: {
+ host,
+ hostname,
+ protocol,
+ port,
+ servername: client[kServerName],
+ localAddress: client[kLocalAddress]
+ },
+ connector: client[kConnector]
+ })
+ }
+
+ try {
+ const socket = await new Promise((resolve, reject) => {
+ client[kConnector]({
+ host,
+ hostname,
+ protocol,
+ port,
+ servername: client[kServerName],
+ localAddress: client[kLocalAddress]
+ }, (err, socket) => {
+ if (err) {
+ reject(err)
+ } else {
+ resolve(socket)
+ }
+ })
+ })
+
+ if (client.destroyed) {
+ util.destroy(socket.on('error', () => {}), new ClientDestroyedError())
+ return
+ }
+
+ client[kConnecting] = false
+
+ assert(socket)
+
+ const isH2 = socket.alpnProtocol === 'h2'
+ if (isH2) {
+ if (!h2ExperimentalWarned) {
+ h2ExperimentalWarned = true
+ process.emitWarning('H2 support is experimental, expect them to change at any time.', {
+ code: 'UNDICI-H2'
+ })
+ }
+
+ const session = http2.connect(client[kUrl], {
+ createConnection: () => socket,
+ peerMaxConcurrentStreams: client[kHTTP2SessionState].maxConcurrentStreams
+ })
+
+ client[kHTTPConnVersion] = 'h2'
+ session[kClient] = client
+ session[kSocket] = socket
+ session.on('error', onHttp2SessionError)
+ session.on('frameError', onHttp2FrameError)
+ session.on('end', onHttp2SessionEnd)
+ session.on('goaway', onHTTP2GoAway)
+ session.on('close', onSocketClose)
+ session.unref()
+
+ client[kHTTP2Session] = session
+ socket[kHTTP2Session] = session
+ } else {
+ if (!llhttpInstance) {
+ llhttpInstance = await llhttpPromise
+ llhttpPromise = null
+ }
+
+ socket[kNoRef] = false
+ socket[kWriting] = false
+ socket[kReset] = false
+ socket[kBlocking] = false
+ socket[kParser] = new Parser(client, socket, llhttpInstance)
+ }
+
+ socket[kCounter] = 0
+ socket[kMaxRequests] = client[kMaxRequests]
+ socket[kClient] = client
+ socket[kError] = null
+
+ socket
+ .on('error', onSocketError)
+ .on('readable', onSocketReadable)
+ .on('end', onSocketEnd)
+ .on('close', onSocketClose)
+
+ client[kSocket] = socket
+
+ if (channels.connected.hasSubscribers) {
+ channels.connected.publish({
+ connectParams: {
+ host,
+ hostname,
+ protocol,
+ port,
+ servername: client[kServerName],
+ localAddress: client[kLocalAddress]
+ },
+ connector: client[kConnector],
+ socket
+ })
+ }
+ client.emit('connect', client[kUrl], [client])
+ } catch (err) {
+ if (client.destroyed) {
+ return
+ }
+
+ client[kConnecting] = false
+
+ if (channels.connectError.hasSubscribers) {
+ channels.connectError.publish({
+ connectParams: {
+ host,
+ hostname,
+ protocol,
+ port,
+ servername: client[kServerName],
+ localAddress: client[kLocalAddress]
+ },
+ connector: client[kConnector],
+ error: err
+ })
+ }
+
+ if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') {
+ assert(client[kRunning] === 0)
+ while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) {
+ const request = client[kQueue][client[kPendingIdx]++]
+ errorRequest(client, request, err)
+ }
+ } else {
+ onError(client, err)
+ }
+
+ client.emit('connectionError', client[kUrl], [client], err)
+ }
+
+ resume(client)
+}
+
+function emitDrain (client) {
+ client[kNeedDrain] = 0
+ client.emit('drain', client[kUrl], [client])
+}
+
+function resume (client, sync) {
+ if (client[kResuming] === 2) {
+ return
+ }
+
+ client[kResuming] = 2
+
+ _resume(client, sync)
+ client[kResuming] = 0
+
+ if (client[kRunningIdx] > 256) {
+ client[kQueue].splice(0, client[kRunningIdx])
+ client[kPendingIdx] -= client[kRunningIdx]
+ client[kRunningIdx] = 0
+ }
+}
+
+function _resume (client, sync) {
+ while (true) {
+ if (client.destroyed) {
+ assert(client[kPending] === 0)
+ return
+ }
+
+ if (client[kClosedResolve] && !client[kSize]) {
+ client[kClosedResolve]()
+ client[kClosedResolve] = null
+ return
+ }
+
+ const socket = client[kSocket]
+
+ if (socket && !socket.destroyed && socket.alpnProtocol !== 'h2') {
+ if (client[kSize] === 0) {
+ if (!socket[kNoRef] && socket.unref) {
+ socket.unref()
+ socket[kNoRef] = true
+ }
+ } else if (socket[kNoRef] && socket.ref) {
+ socket.ref()
+ socket[kNoRef] = false
+ }
+
+ if (client[kSize] === 0) {
+ if (socket[kParser].timeoutType !== TIMEOUT_IDLE) {
+ socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_IDLE)
+ }
+ } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) {
+ if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) {
+ const request = client[kQueue][client[kRunningIdx]]
+ const headersTimeout = request.headersTimeout != null
+ ? request.headersTimeout
+ : client[kHeadersTimeout]
+ socket[kParser].setTimeout(headersTimeout, TIMEOUT_HEADERS)
+ }
+ }
+ }
+
+ if (client[kBusy]) {
+ client[kNeedDrain] = 2
+ } else if (client[kNeedDrain] === 2) {
+ if (sync) {
+ client[kNeedDrain] = 1
+ process.nextTick(emitDrain, client)
+ } else {
+ emitDrain(client)
+ }
+ continue
+ }
+
+ if (client[kPending] === 0) {
+ return
+ }
+
+ if (client[kRunning] >= (client[kPipelining] || 1)) {
+ return
+ }
+
+ const request = client[kQueue][client[kPendingIdx]]
+
+ if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) {
+ if (client[kRunning] > 0) {
+ return
+ }
+
+ client[kServerName] = request.servername
+
+ if (socket && socket.servername !== request.servername) {
+ util.destroy(socket, new InformationalError('servername changed'))
+ return
+ }
+ }
+
+ if (client[kConnecting]) {
+ return
+ }
+
+ if (!socket && !client[kHTTP2Session]) {
+ connect(client)
+ return
+ }
+
+ if (socket.destroyed || socket[kWriting] || socket[kReset] || socket[kBlocking]) {
+ return
+ }
+
+ if (client[kRunning] > 0 && !request.idempotent) {
+ // Non-idempotent request cannot be retried.
+ // Ensure that no other requests are inflight and
+ // could cause failure.
+ return
+ }
+
+ if (client[kRunning] > 0 && (request.upgrade || request.method === 'CONNECT')) {
+ // Don't dispatch an upgrade until all preceding requests have completed.
+ // A misbehaving server might upgrade the connection before all pipelined
+ // request has completed.
+ return
+ }
+
+ if (client[kRunning] > 0 && util.bodyLength(request.body) !== 0 &&
+ (util.isStream(request.body) || util.isAsyncIterable(request.body))) {
+ // Request with stream or iterator body can error while other requests
+ // are inflight and indirectly error those as well.
+ // Ensure this doesn't happen by waiting for inflight
+ // to complete before dispatching.
+
+ // Request with stream or iterator body cannot be retried.
+ // Ensure that no other requests are inflight and
+ // could cause failure.
+ return
+ }
+
+ if (!request.aborted && write(client, request)) {
+ client[kPendingIdx]++
+ } else {
+ client[kQueue].splice(client[kPendingIdx], 1)
+ }
+ }
+}
+
+// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2
+function shouldSendContentLength (method) {
+ return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT'
+}
+
+function write (client, request) {
+ if (client[kHTTPConnVersion] === 'h2') {
+ writeH2(client, client[kHTTP2Session], request)
+ return
+ }
+
+ const { body, method, path, host, upgrade, headers, blocking, reset } = request
+
+ // https://tools.ietf.org/html/rfc7231#section-4.3.1
+ // https://tools.ietf.org/html/rfc7231#section-4.3.2
+ // https://tools.ietf.org/html/rfc7231#section-4.3.5
+
+ // Sending a payload body on a request that does not
+ // expect it can cause undefined behavior on some
+ // servers and corrupt connection state. Do not
+ // re-use the connection for further requests.
+
+ const expectsPayload = (
+ method === 'PUT' ||
+ method === 'POST' ||
+ method === 'PATCH'
+ )
+
+ if (body && typeof body.read === 'function') {
+ // Try to read EOF in order to get length.
+ body.read(0)
+ }
+
+ const bodyLength = util.bodyLength(body)
+
+ let contentLength = bodyLength
+
+ if (contentLength === null) {
+ contentLength = request.contentLength
+ }
+
+ if (contentLength === 0 && !expectsPayload) {
+ // https://tools.ietf.org/html/rfc7230#section-3.3.2
+ // A user agent SHOULD NOT send a Content-Length header field when
+ // the request message does not contain a payload body and the method
+ // semantics do not anticipate such a body.
+
+ contentLength = null
+ }
+
+ // https://github.com/nodejs/undici/issues/2046
+ // A user agent may send a Content-Length header with 0 value, this should be allowed.
+ if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength !== null && request.contentLength !== contentLength) {
+ if (client[kStrictContentLength]) {
+ errorRequest(client, request, new RequestContentLengthMismatchError())
+ return false
+ }
+
+ process.emitWarning(new RequestContentLengthMismatchError())
+ }
+
+ const socket = client[kSocket]
+
+ try {
+ request.onConnect((err) => {
+ if (request.aborted || request.completed) {
+ return
+ }
+
+ errorRequest(client, request, err || new RequestAbortedError())
+
+ util.destroy(socket, new InformationalError('aborted'))
+ })
+ } catch (err) {
+ errorRequest(client, request, err)
+ }
+
+ if (request.aborted) {
+ return false
+ }
+
+ if (method === 'HEAD') {
+ // https://github.com/mcollina/undici/issues/258
+ // Close after a HEAD request to interop with misbehaving servers
+ // that may send a body in the response.
+
+ socket[kReset] = true
+ }
+
+ if (upgrade || method === 'CONNECT') {
+ // On CONNECT or upgrade, block pipeline from dispatching further
+ // requests on this connection.
+
+ socket[kReset] = true
+ }
+
+ if (reset != null) {
+ socket[kReset] = reset
+ }
+
+ if (client[kMaxRequests] && socket[kCounter]++ >= client[kMaxRequests]) {
+ socket[kReset] = true
+ }
+
+ if (blocking) {
+ socket[kBlocking] = true
+ }
+
+ let header = `${method} ${path} HTTP/1.1\r\n`
+
+ if (typeof host === 'string') {
+ header += `host: ${host}\r\n`
+ } else {
+ header += client[kHostHeader]
+ }
+
+ if (upgrade) {
+ header += `connection: upgrade\r\nupgrade: ${upgrade}\r\n`
+ } else if (client[kPipelining] && !socket[kReset]) {
+ header += 'connection: keep-alive\r\n'
+ } else {
+ header += 'connection: close\r\n'
+ }
+
+ if (headers) {
+ header += headers
+ }
+
+ if (channels.sendHeaders.hasSubscribers) {
+ channels.sendHeaders.publish({ request, headers: header, socket })
+ }
+
+ /* istanbul ignore else: assertion */
+ if (!body || bodyLength === 0) {
+ if (contentLength === 0) {
+ socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1')
+ } else {
+ assert(contentLength === null, 'no body must not have content length')
+ socket.write(`${header}\r\n`, 'latin1')
+ }
+ request.onRequestSent()
+ } else if (util.isBuffer(body)) {
+ assert(contentLength === body.byteLength, 'buffer body must have content length')
+
+ socket.cork()
+ socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1')
+ socket.write(body)
+ socket.uncork()
+ request.onBodySent(body)
+ request.onRequestSent()
+ if (!expectsPayload) {
+ socket[kReset] = true
+ }
+ } else if (util.isBlobLike(body)) {
+ if (typeof body.stream === 'function') {
+ writeIterable({ body: body.stream(), client, request, socket, contentLength, header, expectsPayload })
+ } else {
+ writeBlob({ body, client, request, socket, contentLength, header, expectsPayload })
+ }
+ } else if (util.isStream(body)) {
+ writeStream({ body, client, request, socket, contentLength, header, expectsPayload })
+ } else if (util.isIterable(body)) {
+ writeIterable({ body, client, request, socket, contentLength, header, expectsPayload })
+ } else {
+ assert(false)
+ }
+
+ return true
+}
+
+function writeH2 (client, session, request) {
+ const { body, method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request
+
+ let headers
+ if (typeof reqHeaders === 'string') headers = Request[kHTTP2CopyHeaders](reqHeaders.trim())
+ else headers = reqHeaders
+
+ if (upgrade) {
+ errorRequest(client, request, new Error('Upgrade not supported for H2'))
+ return false
+ }
+
+ try {
+ // TODO(HTTP/2): Should we call onConnect immediately or on stream ready event?
+ request.onConnect((err) => {
+ if (request.aborted || request.completed) {
+ return
+ }
+
+ errorRequest(client, request, err || new RequestAbortedError())
+ })
+ } catch (err) {
+ errorRequest(client, request, err)
+ }
+
+ if (request.aborted) {
+ return false
+ }
+
+ /** @type {import('node:http2').ClientHttp2Stream} */
+ let stream
+ const h2State = client[kHTTP2SessionState]
+
+ headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost]
+ headers[HTTP2_HEADER_METHOD] = method
+
+ if (method === 'CONNECT') {
+ session.ref()
+ // we are already connected, streams are pending, first request
+ // will create a new stream. We trigger a request to create the stream and wait until
+ // `ready` event is triggered
+ // We disabled endStream to allow the user to write to the stream
+ stream = session.request(headers, { endStream: false, signal })
+
+ if (stream.id && !stream.pending) {
+ request.onUpgrade(null, null, stream)
+ ++h2State.openStreams
+ } else {
+ stream.once('ready', () => {
+ request.onUpgrade(null, null, stream)
+ ++h2State.openStreams
+ })
+ }
+
+ stream.once('close', () => {
+ h2State.openStreams -= 1
+ // TODO(HTTP/2): unref only if current streams count is 0
+ if (h2State.openStreams === 0) session.unref()
+ })
+
+ return true
+ }
+
+ // https://tools.ietf.org/html/rfc7540#section-8.3
+ // :path and :scheme headers must be omited when sending CONNECT
+
+ headers[HTTP2_HEADER_PATH] = path
+ headers[HTTP2_HEADER_SCHEME] = 'https'
+
+ // https://tools.ietf.org/html/rfc7231#section-4.3.1
+ // https://tools.ietf.org/html/rfc7231#section-4.3.2
+ // https://tools.ietf.org/html/rfc7231#section-4.3.5
+
+ // Sending a payload body on a request that does not
+ // expect it can cause undefined behavior on some
+ // servers and corrupt connection state. Do not
+ // re-use the connection for further requests.
+
+ const expectsPayload = (
+ method === 'PUT' ||
+ method === 'POST' ||
+ method === 'PATCH'
+ )
+
+ if (body && typeof body.read === 'function') {
+ // Try to read EOF in order to get length.
+ body.read(0)
+ }
+
+ let contentLength = util.bodyLength(body)
+
+ if (contentLength == null) {
+ contentLength = request.contentLength
+ }
+
+ if (contentLength === 0 || !expectsPayload) {
+ // https://tools.ietf.org/html/rfc7230#section-3.3.2
+ // A user agent SHOULD NOT send a Content-Length header field when
+ // the request message does not contain a payload body and the method
+ // semantics do not anticipate such a body.
+
+ contentLength = null
+ }
+
+ // https://github.com/nodejs/undici/issues/2046
+ // A user agent may send a Content-Length header with 0 value, this should be allowed.
+ if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) {
+ if (client[kStrictContentLength]) {
+ errorRequest(client, request, new RequestContentLengthMismatchError())
+ return false
+ }
+
+ process.emitWarning(new RequestContentLengthMismatchError())
+ }
+
+ if (contentLength != null) {
+ assert(body, 'no body must not have content length')
+ headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}`
+ }
+
+ session.ref()
+
+ const shouldEndStream = method === 'GET' || method === 'HEAD'
+ if (expectContinue) {
+ headers[HTTP2_HEADER_EXPECT] = '100-continue'
+ stream = session.request(headers, { endStream: shouldEndStream, signal })
+
+ stream.once('continue', writeBodyH2)
+ } else {
+ stream = session.request(headers, {
+ endStream: shouldEndStream,
+ signal
+ })
+ writeBodyH2()
+ }
+
+ // Increment counter as we have new several streams open
+ ++h2State.openStreams
+
+ stream.once('response', headers => {
+ const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers
+
+ if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) {
+ stream.pause()
+ }
+ })
+
+ stream.once('end', () => {
+ request.onComplete([])
+ })
+
+ stream.on('data', (chunk) => {
+ if (request.onData(chunk) === false) {
+ stream.pause()
+ }
+ })
+
+ stream.once('close', () => {
+ h2State.openStreams -= 1
+ // TODO(HTTP/2): unref only if current streams count is 0
+ if (h2State.openStreams === 0) {
+ session.unref()
+ }
+ })
+
+ stream.once('error', function (err) {
+ if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) {
+ h2State.streams -= 1
+ util.destroy(stream, err)
+ }
+ })
+
+ stream.once('frameError', (type, code) => {
+ const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`)
+ errorRequest(client, request, err)
+
+ if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) {
+ h2State.streams -= 1
+ util.destroy(stream, err)
+ }
+ })
+
+ // stream.on('aborted', () => {
+ // // TODO(HTTP/2): Support aborted
+ // })
+
+ // stream.on('timeout', () => {
+ // // TODO(HTTP/2): Support timeout
+ // })
+
+ // stream.on('push', headers => {
+ // // TODO(HTTP/2): Suppor push
+ // })
+
+ // stream.on('trailers', headers => {
+ // // TODO(HTTP/2): Support trailers
+ // })
+
+ return true
+
+ function writeBodyH2 () {
+ /* istanbul ignore else: assertion */
+ if (!body) {
+ request.onRequestSent()
+ } else if (util.isBuffer(body)) {
+ assert(contentLength === body.byteLength, 'buffer body must have content length')
+ stream.cork()
+ stream.write(body)
+ stream.uncork()
+ stream.end()
+ request.onBodySent(body)
+ request.onRequestSent()
+ } else if (util.isBlobLike(body)) {
+ if (typeof body.stream === 'function') {
+ writeIterable({
+ client,
+ request,
+ contentLength,
+ h2stream: stream,
+ expectsPayload,
+ body: body.stream(),
+ socket: client[kSocket],
+ header: ''
+ })
+ } else {
+ writeBlob({
+ body,
+ client,
+ request,
+ contentLength,
+ expectsPayload,
+ h2stream: stream,
+ header: '',
+ socket: client[kSocket]
+ })
+ }
+ } else if (util.isStream(body)) {
+ writeStream({
+ body,
+ client,
+ request,
+ contentLength,
+ expectsPayload,
+ socket: client[kSocket],
+ h2stream: stream,
+ header: ''
+ })
+ } else if (util.isIterable(body)) {
+ writeIterable({
+ body,
+ client,
+ request,
+ contentLength,
+ expectsPayload,
+ header: '',
+ h2stream: stream,
+ socket: client[kSocket]
+ })
+ } else {
+ assert(false)
+ }
+ }
+}
+
+function writeStream ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
+ assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined')
+
+ if (client[kHTTPConnVersion] === 'h2') {
+ // For HTTP/2, is enough to pipe the stream
+ const pipe = pipeline(
+ body,
+ h2stream,
+ (err) => {
+ if (err) {
+ util.destroy(body, err)
+ util.destroy(h2stream, err)
+ } else {
+ request.onRequestSent()
+ }
+ }
+ )
+
+ pipe.on('data', onPipeData)
+ pipe.once('end', () => {
+ pipe.removeListener('data', onPipeData)
+ util.destroy(pipe)
+ })
+
+ function onPipeData (chunk) {
+ request.onBodySent(chunk)
+ }
+
+ return
+ }
+
+ let finished = false
+
+ const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header })
+
+ const onData = function (chunk) {
+ if (finished) {
+ return
+ }
+
+ try {
+ if (!writer.write(chunk) && this.pause) {
+ this.pause()
+ }
+ } catch (err) {
+ util.destroy(this, err)
+ }
+ }
+ const onDrain = function () {
+ if (finished) {
+ return
+ }
+
+ if (body.resume) {
+ body.resume()
+ }
+ }
+ const onAbort = function () {
+ if (finished) {
+ return
+ }
+ const err = new RequestAbortedError()
+ queueMicrotask(() => onFinished(err))
+ }
+ const onFinished = function (err) {
+ if (finished) {
+ return
+ }
+
+ finished = true
+
+ assert(socket.destroyed || (socket[kWriting] && client[kRunning] <= 1))
+
+ socket
+ .off('drain', onDrain)
+ .off('error', onFinished)
+
+ body
+ .removeListener('data', onData)
+ .removeListener('end', onFinished)
+ .removeListener('error', onFinished)
+ .removeListener('close', onAbort)
+
+ if (!err) {
+ try {
+ writer.end()
+ } catch (er) {
+ err = er
+ }
+ }
+
+ writer.destroy(err)
+
+ if (err && (err.code !== 'UND_ERR_INFO' || err.message !== 'reset')) {
+ util.destroy(body, err)
+ } else {
+ util.destroy(body)
+ }
+ }
+
+ body
+ .on('data', onData)
+ .on('end', onFinished)
+ .on('error', onFinished)
+ .on('close', onAbort)
+
+ if (body.resume) {
+ body.resume()
+ }
+
+ socket
+ .on('drain', onDrain)
+ .on('error', onFinished)
+}
+
+async function writeBlob ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
+ assert(contentLength === body.size, 'blob body must have content length')
+
+ const isH2 = client[kHTTPConnVersion] === 'h2'
+ try {
+ if (contentLength != null && contentLength !== body.size) {
+ throw new RequestContentLengthMismatchError()
+ }
+
+ const buffer = Buffer.from(await body.arrayBuffer())
+
+ if (isH2) {
+ h2stream.cork()
+ h2stream.write(buffer)
+ h2stream.uncork()
+ } else {
+ socket.cork()
+ socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1')
+ socket.write(buffer)
+ socket.uncork()
+ }
+
+ request.onBodySent(buffer)
+ request.onRequestSent()
+
+ if (!expectsPayload) {
+ socket[kReset] = true
+ }
+
+ resume(client)
+ } catch (err) {
+ util.destroy(isH2 ? h2stream : socket, err)
+ }
+}
+
+async function writeIterable ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
+ assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined')
+
+ let callback = null
+ function onDrain () {
+ if (callback) {
+ const cb = callback
+ callback = null
+ cb()
+ }
+ }
+
+ const waitForDrain = () => new Promise((resolve, reject) => {
+ assert(callback === null)
+
+ if (socket[kError]) {
+ reject(socket[kError])
+ } else {
+ callback = resolve
+ }
+ })
+
+ if (client[kHTTPConnVersion] === 'h2') {
+ h2stream
+ .on('close', onDrain)
+ .on('drain', onDrain)
+
+ try {
+ // It's up to the user to somehow abort the async iterable.
+ for await (const chunk of body) {
+ if (socket[kError]) {
+ throw socket[kError]
+ }
+
+ const res = h2stream.write(chunk)
+ request.onBodySent(chunk)
+ if (!res) {
+ await waitForDrain()
+ }
+ }
+ } catch (err) {
+ h2stream.destroy(err)
+ } finally {
+ request.onRequestSent()
+ h2stream.end()
+ h2stream
+ .off('close', onDrain)
+ .off('drain', onDrain)
+ }
+
+ return
+ }
+
+ socket
+ .on('close', onDrain)
+ .on('drain', onDrain)
+
+ const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header })
+ try {
+ // It's up to the user to somehow abort the async iterable.
+ for await (const chunk of body) {
+ if (socket[kError]) {
+ throw socket[kError]
+ }
+
+ if (!writer.write(chunk)) {
+ await waitForDrain()
+ }
+ }
+
+ writer.end()
+ } catch (err) {
+ writer.destroy(err)
+ } finally {
+ socket
+ .off('close', onDrain)
+ .off('drain', onDrain)
+ }
+}
+
+class AsyncWriter {
+ constructor ({ socket, request, contentLength, client, expectsPayload, header }) {
+ this.socket = socket
+ this.request = request
+ this.contentLength = contentLength
+ this.client = client
+ this.bytesWritten = 0
+ this.expectsPayload = expectsPayload
+ this.header = header
+
+ socket[kWriting] = true
+ }
+
+ write (chunk) {
+ const { socket, request, contentLength, client, bytesWritten, expectsPayload, header } = this
+
+ if (socket[kError]) {
+ throw socket[kError]
+ }
+
+ if (socket.destroyed) {
+ return false
+ }
+
+ const len = Buffer.byteLength(chunk)
+ if (!len) {
+ return true
+ }
+
+ // We should defer writing chunks.
+ if (contentLength !== null && bytesWritten + len > contentLength) {
+ if (client[kStrictContentLength]) {
+ throw new RequestContentLengthMismatchError()
+ }
+
+ process.emitWarning(new RequestContentLengthMismatchError())
+ }
+
+ socket.cork()
+
+ if (bytesWritten === 0) {
+ if (!expectsPayload) {
+ socket[kReset] = true
+ }
+
+ if (contentLength === null) {
+ socket.write(`${header}transfer-encoding: chunked\r\n`, 'latin1')
+ } else {
+ socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1')
+ }
+ }
+
+ if (contentLength === null) {
+ socket.write(`\r\n${len.toString(16)}\r\n`, 'latin1')
+ }
+
+ this.bytesWritten += len
+
+ const ret = socket.write(chunk)
+
+ socket.uncork()
+
+ request.onBodySent(chunk)
+
+ if (!ret) {
+ if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) {
+ // istanbul ignore else: only for jest
+ if (socket[kParser].timeout.refresh) {
+ socket[kParser].timeout.refresh()
+ }
+ }
+ }
+
+ return ret
+ }
+
+ end () {
+ const { socket, contentLength, client, bytesWritten, expectsPayload, header, request } = this
+ request.onRequestSent()
+
+ socket[kWriting] = false
+
+ if (socket[kError]) {
+ throw socket[kError]
+ }
+
+ if (socket.destroyed) {
+ return
+ }
+
+ if (bytesWritten === 0) {
+ if (expectsPayload) {
+ // https://tools.ietf.org/html/rfc7230#section-3.3.2
+ // A user agent SHOULD send a Content-Length in a request message when
+ // no Transfer-Encoding is sent and the request method defines a meaning
+ // for an enclosed payload body.
+
+ socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1')
+ } else {
+ socket.write(`${header}\r\n`, 'latin1')
+ }
+ } else if (contentLength === null) {
+ socket.write('\r\n0\r\n\r\n', 'latin1')
+ }
+
+ if (contentLength !== null && bytesWritten !== contentLength) {
+ if (client[kStrictContentLength]) {
+ throw new RequestContentLengthMismatchError()
+ } else {
+ process.emitWarning(new RequestContentLengthMismatchError())
+ }
+ }
+
+ if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) {
+ // istanbul ignore else: only for jest
+ if (socket[kParser].timeout.refresh) {
+ socket[kParser].timeout.refresh()
+ }
+ }
+
+ resume(client)
+ }
+
+ destroy (err) {
+ const { socket, client } = this
+
+ socket[kWriting] = false
+
+ if (err) {
+ assert(client[kRunning] <= 1, 'pipeline should only contain this request')
+ util.destroy(socket, err)
+ }
+ }
+}
+
+function errorRequest (client, request, err) {
+ try {
+ request.onError(err)
+ assert(request.aborted)
+ } catch (err) {
+ client.emit('error', err)
+ }
+}
+
+module.exports = Client
+
+
+/***/ }),
+
+/***/ 6436:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+/* istanbul ignore file: only for Node 12 */
+
+const { kConnected, kSize } = __nccwpck_require__(2785)
+
+class CompatWeakRef {
+ constructor (value) {
+ this.value = value
+ }
+
+ deref () {
+ return this.value[kConnected] === 0 && this.value[kSize] === 0
+ ? undefined
+ : this.value
+ }
+}
+
+class CompatFinalizer {
+ constructor (finalizer) {
+ this.finalizer = finalizer
+ }
+
+ register (dispatcher, key) {
+ if (dispatcher.on) {
+ dispatcher.on('disconnect', () => {
+ if (dispatcher[kConnected] === 0 && dispatcher[kSize] === 0) {
+ this.finalizer(key)
+ }
+ })
+ }
+ }
+}
+
+module.exports = function () {
+ // FIXME: remove workaround when the Node bug is fixed
+ // https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
+ if (process.env.NODE_V8_COVERAGE) {
+ return {
+ WeakRef: CompatWeakRef,
+ FinalizationRegistry: CompatFinalizer
+ }
+ }
+ return {
+ WeakRef: global.WeakRef || CompatWeakRef,
+ FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer
+ }
+}
+
+
+/***/ }),
+
+/***/ 663:
+/***/ ((module) => {
+
+"use strict";
+
+
+// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size
+const maxAttributeValueSize = 1024
+
+// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size
+const maxNameValuePairSize = 4096
+
+module.exports = {
+ maxAttributeValueSize,
+ maxNameValuePairSize
+}
+
+
+/***/ }),
+
+/***/ 1724:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { parseSetCookie } = __nccwpck_require__(4408)
+const { stringify, getHeadersList } = __nccwpck_require__(3121)
+const { webidl } = __nccwpck_require__(1744)
+const { Headers } = __nccwpck_require__(554)
+
+/**
+ * @typedef {Object} Cookie
+ * @property {string} name
+ * @property {string} value
+ * @property {Date|number|undefined} expires
+ * @property {number|undefined} maxAge
+ * @property {string|undefined} domain
+ * @property {string|undefined} path
+ * @property {boolean|undefined} secure
+ * @property {boolean|undefined} httpOnly
+ * @property {'Strict'|'Lax'|'None'} sameSite
+ * @property {string[]} unparsed
+ */
+
+/**
+ * @param {Headers} headers
+ * @returns {Record}
+ */
+function getCookies (headers) {
+ webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' })
+
+ webidl.brandCheck(headers, Headers, { strict: false })
+
+ const cookie = headers.get('cookie')
+ const out = {}
+
+ if (!cookie) {
+ return out
+ }
+
+ for (const piece of cookie.split(';')) {
+ const [name, ...value] = piece.split('=')
+
+ out[name.trim()] = value.join('=')
+ }
+
+ return out
+}
+
+/**
+ * @param {Headers} headers
+ * @param {string} name
+ * @param {{ path?: string, domain?: string }|undefined} attributes
+ * @returns {void}
+ */
+function deleteCookie (headers, name, attributes) {
+ webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' })
+
+ webidl.brandCheck(headers, Headers, { strict: false })
+
+ name = webidl.converters.DOMString(name)
+ attributes = webidl.converters.DeleteCookieAttributes(attributes)
+
+ // Matches behavior of
+ // https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278
+ setCookie(headers, {
+ name,
+ value: '',
+ expires: new Date(0),
+ ...attributes
+ })
+}
+
+/**
+ * @param {Headers} headers
+ * @returns {Cookie[]}
+ */
+function getSetCookies (headers) {
+ webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' })
+
+ webidl.brandCheck(headers, Headers, { strict: false })
+
+ const cookies = getHeadersList(headers).cookies
+
+ if (!cookies) {
+ return []
+ }
+
+ // In older versions of undici, cookies is a list of name:value.
+ return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair))
+}
+
+/**
+ * @param {Headers} headers
+ * @param {Cookie} cookie
+ * @returns {void}
+ */
+function setCookie (headers, cookie) {
+ webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' })
+
+ webidl.brandCheck(headers, Headers, { strict: false })
+
+ cookie = webidl.converters.Cookie(cookie)
+
+ const str = stringify(cookie)
+
+ if (str) {
+ headers.append('Set-Cookie', stringify(cookie))
+ }
+}
+
+webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([
+ {
+ converter: webidl.nullableConverter(webidl.converters.DOMString),
+ key: 'path',
+ defaultValue: null
+ },
+ {
+ converter: webidl.nullableConverter(webidl.converters.DOMString),
+ key: 'domain',
+ defaultValue: null
+ }
+])
+
+webidl.converters.Cookie = webidl.dictionaryConverter([
+ {
+ converter: webidl.converters.DOMString,
+ key: 'name'
+ },
+ {
+ converter: webidl.converters.DOMString,
+ key: 'value'
+ },
+ {
+ converter: webidl.nullableConverter((value) => {
+ if (typeof value === 'number') {
+ return webidl.converters['unsigned long long'](value)
+ }
+
+ return new Date(value)
+ }),
+ key: 'expires',
+ defaultValue: null
+ },
+ {
+ converter: webidl.nullableConverter(webidl.converters['long long']),
+ key: 'maxAge',
+ defaultValue: null
+ },
+ {
+ converter: webidl.nullableConverter(webidl.converters.DOMString),
+ key: 'domain',
+ defaultValue: null
+ },
+ {
+ converter: webidl.nullableConverter(webidl.converters.DOMString),
+ key: 'path',
+ defaultValue: null
+ },
+ {
+ converter: webidl.nullableConverter(webidl.converters.boolean),
+ key: 'secure',
+ defaultValue: null
+ },
+ {
+ converter: webidl.nullableConverter(webidl.converters.boolean),
+ key: 'httpOnly',
+ defaultValue: null
+ },
+ {
+ converter: webidl.converters.USVString,
+ key: 'sameSite',
+ allowedValues: ['Strict', 'Lax', 'None']
+ },
+ {
+ converter: webidl.sequenceConverter(webidl.converters.DOMString),
+ key: 'unparsed',
+ defaultValue: []
+ }
+])
+
+module.exports = {
+ getCookies,
+ deleteCookie,
+ getSetCookies,
+ setCookie
+}
+
+
+/***/ }),
+
+/***/ 4408:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { maxNameValuePairSize, maxAttributeValueSize } = __nccwpck_require__(663)
+const { isCTLExcludingHtab } = __nccwpck_require__(3121)
+const { collectASequenceOfCodePointsFast } = __nccwpck_require__(685)
+const assert = __nccwpck_require__(9491)
+
+/**
+ * @description Parses the field-value attributes of a set-cookie header string.
+ * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
+ * @param {string} header
+ * @returns if the header is invalid, null will be returned
+ */
+function parseSetCookie (header) {
+ // 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F
+ // character (CTL characters excluding HTAB): Abort these steps and
+ // ignore the set-cookie-string entirely.
+ if (isCTLExcludingHtab(header)) {
+ return null
+ }
+
+ let nameValuePair = ''
+ let unparsedAttributes = ''
+ let name = ''
+ let value = ''
+
+ // 2. If the set-cookie-string contains a %x3B (";") character:
+ if (header.includes(';')) {
+ // 1. The name-value-pair string consists of the characters up to,
+ // but not including, the first %x3B (";"), and the unparsed-
+ // attributes consist of the remainder of the set-cookie-string
+ // (including the %x3B (";") in question).
+ const position = { position: 0 }
+
+ nameValuePair = collectASequenceOfCodePointsFast(';', header, position)
+ unparsedAttributes = header.slice(position.position)
+ } else {
+ // Otherwise:
+
+ // 1. The name-value-pair string consists of all the characters
+ // contained in the set-cookie-string, and the unparsed-
+ // attributes is the empty string.
+ nameValuePair = header
+ }
+
+ // 3. If the name-value-pair string lacks a %x3D ("=") character, then
+ // the name string is empty, and the value string is the value of
+ // name-value-pair.
+ if (!nameValuePair.includes('=')) {
+ value = nameValuePair
+ } else {
+ // Otherwise, the name string consists of the characters up to, but
+ // not including, the first %x3D ("=") character, and the (possibly
+ // empty) value string consists of the characters after the first
+ // %x3D ("=") character.
+ const position = { position: 0 }
+ name = collectASequenceOfCodePointsFast(
+ '=',
+ nameValuePair,
+ position
+ )
+ value = nameValuePair.slice(position.position + 1)
+ }
+
+ // 4. Remove any leading or trailing WSP characters from the name
+ // string and the value string.
+ name = name.trim()
+ value = value.trim()
+
+ // 5. If the sum of the lengths of the name string and the value string
+ // is more than 4096 octets, abort these steps and ignore the set-
+ // cookie-string entirely.
+ if (name.length + value.length > maxNameValuePairSize) {
+ return null
+ }
+
+ // 6. The cookie-name is the name string, and the cookie-value is the
+ // value string.
+ return {
+ name, value, ...parseUnparsedAttributes(unparsedAttributes)
+ }
+}
+
+/**
+ * Parses the remaining attributes of a set-cookie header
+ * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
+ * @param {string} unparsedAttributes
+ * @param {[Object.]={}} cookieAttributeList
+ */
+function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) {
+ // 1. If the unparsed-attributes string is empty, skip the rest of
+ // these steps.
+ if (unparsedAttributes.length === 0) {
+ return cookieAttributeList
+ }
+
+ // 2. Discard the first character of the unparsed-attributes (which
+ // will be a %x3B (";") character).
+ assert(unparsedAttributes[0] === ';')
+ unparsedAttributes = unparsedAttributes.slice(1)
+
+ let cookieAv = ''
+
+ // 3. If the remaining unparsed-attributes contains a %x3B (";")
+ // character:
+ if (unparsedAttributes.includes(';')) {
+ // 1. Consume the characters of the unparsed-attributes up to, but
+ // not including, the first %x3B (";") character.
+ cookieAv = collectASequenceOfCodePointsFast(
+ ';',
+ unparsedAttributes,
+ { position: 0 }
+ )
+ unparsedAttributes = unparsedAttributes.slice(cookieAv.length)
+ } else {
+ // Otherwise:
+
+ // 1. Consume the remainder of the unparsed-attributes.
+ cookieAv = unparsedAttributes
+ unparsedAttributes = ''
+ }
+
+ // Let the cookie-av string be the characters consumed in this step.
+
+ let attributeName = ''
+ let attributeValue = ''
+
+ // 4. If the cookie-av string contains a %x3D ("=") character:
+ if (cookieAv.includes('=')) {
+ // 1. The (possibly empty) attribute-name string consists of the
+ // characters up to, but not including, the first %x3D ("=")
+ // character, and the (possibly empty) attribute-value string
+ // consists of the characters after the first %x3D ("=")
+ // character.
+ const position = { position: 0 }
+
+ attributeName = collectASequenceOfCodePointsFast(
+ '=',
+ cookieAv,
+ position
+ )
+ attributeValue = cookieAv.slice(position.position + 1)
+ } else {
+ // Otherwise:
+
+ // 1. The attribute-name string consists of the entire cookie-av
+ // string, and the attribute-value string is empty.
+ attributeName = cookieAv
+ }
+
+ // 5. Remove any leading or trailing WSP characters from the attribute-
+ // name string and the attribute-value string.
+ attributeName = attributeName.trim()
+ attributeValue = attributeValue.trim()
+
+ // 6. If the attribute-value is longer than 1024 octets, ignore the
+ // cookie-av string and return to Step 1 of this algorithm.
+ if (attributeValue.length > maxAttributeValueSize) {
+ return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
+ }
+
+ // 7. Process the attribute-name and attribute-value according to the
+ // requirements in the following subsections. (Notice that
+ // attributes with unrecognized attribute-names are ignored.)
+ const attributeNameLowercase = attributeName.toLowerCase()
+
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1
+ // If the attribute-name case-insensitively matches the string
+ // "Expires", the user agent MUST process the cookie-av as follows.
+ if (attributeNameLowercase === 'expires') {
+ // 1. Let the expiry-time be the result of parsing the attribute-value
+ // as cookie-date (see Section 5.1.1).
+ const expiryTime = new Date(attributeValue)
+
+ // 2. If the attribute-value failed to parse as a cookie date, ignore
+ // the cookie-av.
+
+ cookieAttributeList.expires = expiryTime
+ } else if (attributeNameLowercase === 'max-age') {
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2
+ // If the attribute-name case-insensitively matches the string "Max-
+ // Age", the user agent MUST process the cookie-av as follows.
+
+ // 1. If the first character of the attribute-value is not a DIGIT or a
+ // "-" character, ignore the cookie-av.
+ const charCode = attributeValue.charCodeAt(0)
+
+ if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') {
+ return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
+ }
+
+ // 2. If the remainder of attribute-value contains a non-DIGIT
+ // character, ignore the cookie-av.
+ if (!/^\d+$/.test(attributeValue)) {
+ return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
+ }
+
+ // 3. Let delta-seconds be the attribute-value converted to an integer.
+ const deltaSeconds = Number(attributeValue)
+
+ // 4. Let cookie-age-limit be the maximum age of the cookie (which
+ // SHOULD be 400 days or less, see Section 4.1.2.2).
+
+ // 5. Set delta-seconds to the smaller of its present value and cookie-
+ // age-limit.
+ // deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs)
+
+ // 6. If delta-seconds is less than or equal to zero (0), let expiry-
+ // time be the earliest representable date and time. Otherwise, let
+ // the expiry-time be the current date and time plus delta-seconds
+ // seconds.
+ // const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds
+
+ // 7. Append an attribute to the cookie-attribute-list with an
+ // attribute-name of Max-Age and an attribute-value of expiry-time.
+ cookieAttributeList.maxAge = deltaSeconds
+ } else if (attributeNameLowercase === 'domain') {
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3
+ // If the attribute-name case-insensitively matches the string "Domain",
+ // the user agent MUST process the cookie-av as follows.
+
+ // 1. Let cookie-domain be the attribute-value.
+ let cookieDomain = attributeValue
+
+ // 2. If cookie-domain starts with %x2E ("."), let cookie-domain be
+ // cookie-domain without its leading %x2E (".").
+ if (cookieDomain[0] === '.') {
+ cookieDomain = cookieDomain.slice(1)
+ }
+
+ // 3. Convert the cookie-domain to lower case.
+ cookieDomain = cookieDomain.toLowerCase()
+
+ // 4. Append an attribute to the cookie-attribute-list with an
+ // attribute-name of Domain and an attribute-value of cookie-domain.
+ cookieAttributeList.domain = cookieDomain
+ } else if (attributeNameLowercase === 'path') {
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4
+ // If the attribute-name case-insensitively matches the string "Path",
+ // the user agent MUST process the cookie-av as follows.
+
+ // 1. If the attribute-value is empty or if the first character of the
+ // attribute-value is not %x2F ("/"):
+ let cookiePath = ''
+ if (attributeValue.length === 0 || attributeValue[0] !== '/') {
+ // 1. Let cookie-path be the default-path.
+ cookiePath = '/'
+ } else {
+ // Otherwise:
+
+ // 1. Let cookie-path be the attribute-value.
+ cookiePath = attributeValue
+ }
+
+ // 2. Append an attribute to the cookie-attribute-list with an
+ // attribute-name of Path and an attribute-value of cookie-path.
+ cookieAttributeList.path = cookiePath
+ } else if (attributeNameLowercase === 'secure') {
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5
+ // If the attribute-name case-insensitively matches the string "Secure",
+ // the user agent MUST append an attribute to the cookie-attribute-list
+ // with an attribute-name of Secure and an empty attribute-value.
+
+ cookieAttributeList.secure = true
+ } else if (attributeNameLowercase === 'httponly') {
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6
+ // If the attribute-name case-insensitively matches the string
+ // "HttpOnly", the user agent MUST append an attribute to the cookie-
+ // attribute-list with an attribute-name of HttpOnly and an empty
+ // attribute-value.
+
+ cookieAttributeList.httpOnly = true
+ } else if (attributeNameLowercase === 'samesite') {
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7
+ // If the attribute-name case-insensitively matches the string
+ // "SameSite", the user agent MUST process the cookie-av as follows:
+
+ // 1. Let enforcement be "Default".
+ let enforcement = 'Default'
+
+ const attributeValueLowercase = attributeValue.toLowerCase()
+ // 2. If cookie-av's attribute-value is a case-insensitive match for
+ // "None", set enforcement to "None".
+ if (attributeValueLowercase.includes('none')) {
+ enforcement = 'None'
+ }
+
+ // 3. If cookie-av's attribute-value is a case-insensitive match for
+ // "Strict", set enforcement to "Strict".
+ if (attributeValueLowercase.includes('strict')) {
+ enforcement = 'Strict'
+ }
+
+ // 4. If cookie-av's attribute-value is a case-insensitive match for
+ // "Lax", set enforcement to "Lax".
+ if (attributeValueLowercase.includes('lax')) {
+ enforcement = 'Lax'
+ }
+
+ // 5. Append an attribute to the cookie-attribute-list with an
+ // attribute-name of "SameSite" and an attribute-value of
+ // enforcement.
+ cookieAttributeList.sameSite = enforcement
+ } else {
+ cookieAttributeList.unparsed ??= []
+
+ cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`)
+ }
+
+ // 8. Return to Step 1 of this algorithm.
+ return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
+}
+
+module.exports = {
+ parseSetCookie,
+ parseUnparsedAttributes
+}
+
+
+/***/ }),
+
+/***/ 3121:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const assert = __nccwpck_require__(9491)
+const { kHeadersList } = __nccwpck_require__(2785)
+
+function isCTLExcludingHtab (value) {
+ if (value.length === 0) {
+ return false
+ }
+
+ for (const char of value) {
+ const code = char.charCodeAt(0)
+
+ if (
+ (code >= 0x00 || code <= 0x08) ||
+ (code >= 0x0A || code <= 0x1F) ||
+ code === 0x7F
+ ) {
+ return false
+ }
+ }
+}
+
+/**
+ CHAR =
+ token = 1*
+ separators = "(" | ")" | "<" | ">" | "@"
+ | "," | ";" | ":" | "\" | <">
+ | "/" | "[" | "]" | "?" | "="
+ | "{" | "}" | SP | HT
+ * @param {string} name
+ */
+function validateCookieName (name) {
+ for (const char of name) {
+ const code = char.charCodeAt(0)
+
+ if (
+ (code <= 0x20 || code > 0x7F) ||
+ char === '(' ||
+ char === ')' ||
+ char === '>' ||
+ char === '<' ||
+ char === '@' ||
+ char === ',' ||
+ char === ';' ||
+ char === ':' ||
+ char === '\\' ||
+ char === '"' ||
+ char === '/' ||
+ char === '[' ||
+ char === ']' ||
+ char === '?' ||
+ char === '=' ||
+ char === '{' ||
+ char === '}'
+ ) {
+ throw new Error('Invalid cookie name')
+ }
+ }
+}
+
+/**
+ cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE )
+ cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E
+ ; US-ASCII characters excluding CTLs,
+ ; whitespace DQUOTE, comma, semicolon,
+ ; and backslash
+ * @param {string} value
+ */
+function validateCookieValue (value) {
+ for (const char of value) {
+ const code = char.charCodeAt(0)
+
+ if (
+ code < 0x21 || // exclude CTLs (0-31)
+ code === 0x22 ||
+ code === 0x2C ||
+ code === 0x3B ||
+ code === 0x5C ||
+ code > 0x7E // non-ascii
+ ) {
+ throw new Error('Invalid header value')
+ }
+ }
+}
+
+/**
+ * path-value =
+ * @param {string} path
+ */
+function validateCookiePath (path) {
+ for (const char of path) {
+ const code = char.charCodeAt(0)
+
+ if (code < 0x21 || char === ';') {
+ throw new Error('Invalid cookie path')
+ }
+ }
+}
+
+/**
+ * I have no idea why these values aren't allowed to be honest,
+ * but Deno tests these. - Khafra
+ * @param {string} domain
+ */
+function validateCookieDomain (domain) {
+ if (
+ domain.startsWith('-') ||
+ domain.endsWith('.') ||
+ domain.endsWith('-')
+ ) {
+ throw new Error('Invalid cookie domain')
+ }
+}
+
+/**
+ * @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1
+ * @param {number|Date} date
+ IMF-fixdate = day-name "," SP date1 SP time-of-day SP GMT
+ ; fixed length/zone/capitalization subset of the format
+ ; see Section 3.3 of [RFC5322]
+
+ day-name = %x4D.6F.6E ; "Mon", case-sensitive
+ / %x54.75.65 ; "Tue", case-sensitive
+ / %x57.65.64 ; "Wed", case-sensitive
+ / %x54.68.75 ; "Thu", case-sensitive
+ / %x46.72.69 ; "Fri", case-sensitive
+ / %x53.61.74 ; "Sat", case-sensitive
+ / %x53.75.6E ; "Sun", case-sensitive
+ date1 = day SP month SP year
+ ; e.g., 02 Jun 1982
+
+ day = 2DIGIT
+ month = %x4A.61.6E ; "Jan", case-sensitive
+ / %x46.65.62 ; "Feb", case-sensitive
+ / %x4D.61.72 ; "Mar", case-sensitive
+ / %x41.70.72 ; "Apr", case-sensitive
+ / %x4D.61.79 ; "May", case-sensitive
+ / %x4A.75.6E ; "Jun", case-sensitive
+ / %x4A.75.6C ; "Jul", case-sensitive
+ / %x41.75.67 ; "Aug", case-sensitive
+ / %x53.65.70 ; "Sep", case-sensitive
+ / %x4F.63.74 ; "Oct", case-sensitive
+ / %x4E.6F.76 ; "Nov", case-sensitive
+ / %x44.65.63 ; "Dec", case-sensitive
+ year = 4DIGIT
+
+ GMT = %x47.4D.54 ; "GMT", case-sensitive
+
+ time-of-day = hour ":" minute ":" second
+ ; 00:00:00 - 23:59:60 (leap second)
+
+ hour = 2DIGIT
+ minute = 2DIGIT
+ second = 2DIGIT
+ */
+function toIMFDate (date) {
+ if (typeof date === 'number') {
+ date = new Date(date)
+ }
+
+ const days = [
+ 'Sun', 'Mon', 'Tue', 'Wed',
+ 'Thu', 'Fri', 'Sat'
+ ]
+
+ const months = [
+ 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+ 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'
+ ]
+
+ const dayName = days[date.getUTCDay()]
+ const day = date.getUTCDate().toString().padStart(2, '0')
+ const month = months[date.getUTCMonth()]
+ const year = date.getUTCFullYear()
+ const hour = date.getUTCHours().toString().padStart(2, '0')
+ const minute = date.getUTCMinutes().toString().padStart(2, '0')
+ const second = date.getUTCSeconds().toString().padStart(2, '0')
+
+ return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT`
+}
+
+/**
+ max-age-av = "Max-Age=" non-zero-digit *DIGIT
+ ; In practice, both expires-av and max-age-av
+ ; are limited to dates representable by the
+ ; user agent.
+ * @param {number} maxAge
+ */
+function validateCookieMaxAge (maxAge) {
+ if (maxAge < 0) {
+ throw new Error('Invalid cookie max-age')
+ }
+}
+
+/**
+ * @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1
+ * @param {import('./index').Cookie} cookie
+ */
+function stringify (cookie) {
+ if (cookie.name.length === 0) {
+ return null
+ }
+
+ validateCookieName(cookie.name)
+ validateCookieValue(cookie.value)
+
+ const out = [`${cookie.name}=${cookie.value}`]
+
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1
+ // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2
+ if (cookie.name.startsWith('__Secure-')) {
+ cookie.secure = true
+ }
+
+ if (cookie.name.startsWith('__Host-')) {
+ cookie.secure = true
+ cookie.domain = null
+ cookie.path = '/'
+ }
+
+ if (cookie.secure) {
+ out.push('Secure')
+ }
+
+ if (cookie.httpOnly) {
+ out.push('HttpOnly')
+ }
+
+ if (typeof cookie.maxAge === 'number') {
+ validateCookieMaxAge(cookie.maxAge)
+ out.push(`Max-Age=${cookie.maxAge}`)
+ }
+
+ if (cookie.domain) {
+ validateCookieDomain(cookie.domain)
+ out.push(`Domain=${cookie.domain}`)
+ }
+
+ if (cookie.path) {
+ validateCookiePath(cookie.path)
+ out.push(`Path=${cookie.path}`)
+ }
+
+ if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') {
+ out.push(`Expires=${toIMFDate(cookie.expires)}`)
+ }
+
+ if (cookie.sameSite) {
+ out.push(`SameSite=${cookie.sameSite}`)
+ }
+
+ for (const part of cookie.unparsed) {
+ if (!part.includes('=')) {
+ throw new Error('Invalid unparsed')
+ }
+
+ const [key, ...value] = part.split('=')
+
+ out.push(`${key.trim()}=${value.join('=')}`)
+ }
+
+ return out.join('; ')
+}
+
+let kHeadersListNode
+
+function getHeadersList (headers) {
+ if (headers[kHeadersList]) {
+ return headers[kHeadersList]
+ }
+
+ if (!kHeadersListNode) {
+ kHeadersListNode = Object.getOwnPropertySymbols(headers).find(
+ (symbol) => symbol.description === 'headers list'
+ )
+
+ assert(kHeadersListNode, 'Headers cannot be parsed')
+ }
+
+ const headersList = headers[kHeadersListNode]
+ assert(headersList)
+
+ return headersList
+}
+
+module.exports = {
+ isCTLExcludingHtab,
+ stringify,
+ getHeadersList
+}
+
+
+/***/ }),
+
+/***/ 2067:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const net = __nccwpck_require__(1808)
+const assert = __nccwpck_require__(9491)
+const util = __nccwpck_require__(3983)
+const { InvalidArgumentError, ConnectTimeoutError } = __nccwpck_require__(8045)
+
+let tls // include tls conditionally since it is not always available
+
+// TODO: session re-use does not wait for the first
+// connection to resolve the session and might therefore
+// resolve the same servername multiple times even when
+// re-use is enabled.
+
+let SessionCache
+// FIXME: remove workaround when the Node bug is fixed
+// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
+if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) {
+ SessionCache = class WeakSessionCache {
+ constructor (maxCachedSessions) {
+ this._maxCachedSessions = maxCachedSessions
+ this._sessionCache = new Map()
+ this._sessionRegistry = new global.FinalizationRegistry((key) => {
+ if (this._sessionCache.size < this._maxCachedSessions) {
+ return
+ }
+
+ const ref = this._sessionCache.get(key)
+ if (ref !== undefined && ref.deref() === undefined) {
+ this._sessionCache.delete(key)
+ }
+ })
+ }
+
+ get (sessionKey) {
+ const ref = this._sessionCache.get(sessionKey)
+ return ref ? ref.deref() : null
+ }
+
+ set (sessionKey, session) {
+ if (this._maxCachedSessions === 0) {
+ return
+ }
+
+ this._sessionCache.set(sessionKey, new WeakRef(session))
+ this._sessionRegistry.register(session, sessionKey)
+ }
+ }
+} else {
+ SessionCache = class SimpleSessionCache {
+ constructor (maxCachedSessions) {
+ this._maxCachedSessions = maxCachedSessions
+ this._sessionCache = new Map()
+ }
+
+ get (sessionKey) {
+ return this._sessionCache.get(sessionKey)
+ }
+
+ set (sessionKey, session) {
+ if (this._maxCachedSessions === 0) {
+ return
+ }
+
+ if (this._sessionCache.size >= this._maxCachedSessions) {
+ // remove the oldest session
+ const { value: oldestKey } = this._sessionCache.keys().next()
+ this._sessionCache.delete(oldestKey)
+ }
+
+ this._sessionCache.set(sessionKey, session)
+ }
+ }
+}
+
+function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) {
+ if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {
+ throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero')
+ }
+
+ const options = { path: socketPath, ...opts }
+ const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions)
+ timeout = timeout == null ? 10e3 : timeout
+ allowH2 = allowH2 != null ? allowH2 : false
+ return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) {
+ let socket
+ if (protocol === 'https:') {
+ if (!tls) {
+ tls = __nccwpck_require__(4404)
+ }
+ servername = servername || options.servername || util.getServerName(host) || null
+
+ const sessionKey = servername || hostname
+ const session = sessionCache.get(sessionKey) || null
+
+ assert(sessionKey)
+
+ socket = tls.connect({
+ highWaterMark: 16384, // TLS in node can't have bigger HWM anyway...
+ ...options,
+ servername,
+ session,
+ localAddress,
+ // TODO(HTTP/2): Add support for h2c
+ ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'],
+ socket: httpSocket, // upgrade socket connection
+ port: port || 443,
+ host: hostname
+ })
+
+ socket
+ .on('session', function (session) {
+ // TODO (fix): Can a session become invalid once established? Don't think so?
+ sessionCache.set(sessionKey, session)
+ })
+ } else {
+ assert(!httpSocket, 'httpSocket can only be sent on TLS update')
+ socket = net.connect({
+ highWaterMark: 64 * 1024, // Same as nodejs fs streams.
+ ...options,
+ localAddress,
+ port: port || 80,
+ host: hostname
+ })
+ }
+
+ // Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket
+ if (options.keepAlive == null || options.keepAlive) {
+ const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay
+ socket.setKeepAlive(true, keepAliveInitialDelay)
+ }
+
+ const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout)
+
+ socket
+ .setNoDelay(true)
+ .once(protocol === 'https:' ? 'secureConnect' : 'connect', function () {
+ cancelTimeout()
+
+ if (callback) {
+ const cb = callback
+ callback = null
+ cb(null, this)
+ }
+ })
+ .on('error', function (err) {
+ cancelTimeout()
+
+ if (callback) {
+ const cb = callback
+ callback = null
+ cb(err)
+ }
+ })
+
+ return socket
+ }
+}
+
+function setupTimeout (onConnectTimeout, timeout) {
+ if (!timeout) {
+ return () => {}
+ }
+
+ let s1 = null
+ let s2 = null
+ const timeoutId = setTimeout(() => {
+ // setImmediate is added to make sure that we priotorise socket error events over timeouts
+ s1 = setImmediate(() => {
+ if (process.platform === 'win32') {
+ // Windows needs an extra setImmediate probably due to implementation differences in the socket logic
+ s2 = setImmediate(() => onConnectTimeout())
+ } else {
+ onConnectTimeout()
+ }
+ })
+ }, timeout)
+ return () => {
+ clearTimeout(timeoutId)
+ clearImmediate(s1)
+ clearImmediate(s2)
+ }
+}
+
+function onConnectTimeout (socket) {
+ util.destroy(socket, new ConnectTimeoutError())
+}
+
+module.exports = buildConnector
+
+
+/***/ }),
+
+/***/ 8045:
+/***/ ((module) => {
+
+"use strict";
+
+
+class UndiciError extends Error {
+ constructor (message) {
+ super(message)
+ this.name = 'UndiciError'
+ this.code = 'UND_ERR'
+ }
+}
+
+class ConnectTimeoutError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, ConnectTimeoutError)
+ this.name = 'ConnectTimeoutError'
+ this.message = message || 'Connect Timeout Error'
+ this.code = 'UND_ERR_CONNECT_TIMEOUT'
+ }
+}
+
+class HeadersTimeoutError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, HeadersTimeoutError)
+ this.name = 'HeadersTimeoutError'
+ this.message = message || 'Headers Timeout Error'
+ this.code = 'UND_ERR_HEADERS_TIMEOUT'
+ }
+}
+
+class HeadersOverflowError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, HeadersOverflowError)
+ this.name = 'HeadersOverflowError'
+ this.message = message || 'Headers Overflow Error'
+ this.code = 'UND_ERR_HEADERS_OVERFLOW'
+ }
+}
+
+class BodyTimeoutError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, BodyTimeoutError)
+ this.name = 'BodyTimeoutError'
+ this.message = message || 'Body Timeout Error'
+ this.code = 'UND_ERR_BODY_TIMEOUT'
+ }
+}
+
+class ResponseStatusCodeError extends UndiciError {
+ constructor (message, statusCode, headers, body) {
+ super(message)
+ Error.captureStackTrace(this, ResponseStatusCodeError)
+ this.name = 'ResponseStatusCodeError'
+ this.message = message || 'Response Status Code Error'
+ this.code = 'UND_ERR_RESPONSE_STATUS_CODE'
+ this.body = body
+ this.status = statusCode
+ this.statusCode = statusCode
+ this.headers = headers
+ }
+}
+
+class InvalidArgumentError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, InvalidArgumentError)
+ this.name = 'InvalidArgumentError'
+ this.message = message || 'Invalid Argument Error'
+ this.code = 'UND_ERR_INVALID_ARG'
+ }
+}
+
+class InvalidReturnValueError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, InvalidReturnValueError)
+ this.name = 'InvalidReturnValueError'
+ this.message = message || 'Invalid Return Value Error'
+ this.code = 'UND_ERR_INVALID_RETURN_VALUE'
+ }
+}
+
+class RequestAbortedError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, RequestAbortedError)
+ this.name = 'AbortError'
+ this.message = message || 'Request aborted'
+ this.code = 'UND_ERR_ABORTED'
+ }
+}
+
+class InformationalError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, InformationalError)
+ this.name = 'InformationalError'
+ this.message = message || 'Request information'
+ this.code = 'UND_ERR_INFO'
+ }
+}
+
+class RequestContentLengthMismatchError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, RequestContentLengthMismatchError)
+ this.name = 'RequestContentLengthMismatchError'
+ this.message = message || 'Request body length does not match content-length header'
+ this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'
+ }
+}
+
+class ResponseContentLengthMismatchError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, ResponseContentLengthMismatchError)
+ this.name = 'ResponseContentLengthMismatchError'
+ this.message = message || 'Response body length does not match content-length header'
+ this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'
+ }
+}
+
+class ClientDestroyedError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, ClientDestroyedError)
+ this.name = 'ClientDestroyedError'
+ this.message = message || 'The client is destroyed'
+ this.code = 'UND_ERR_DESTROYED'
+ }
+}
+
+class ClientClosedError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, ClientClosedError)
+ this.name = 'ClientClosedError'
+ this.message = message || 'The client is closed'
+ this.code = 'UND_ERR_CLOSED'
+ }
+}
+
+class SocketError extends UndiciError {
+ constructor (message, socket) {
+ super(message)
+ Error.captureStackTrace(this, SocketError)
+ this.name = 'SocketError'
+ this.message = message || 'Socket error'
+ this.code = 'UND_ERR_SOCKET'
+ this.socket = socket
+ }
+}
+
+class NotSupportedError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, NotSupportedError)
+ this.name = 'NotSupportedError'
+ this.message = message || 'Not supported error'
+ this.code = 'UND_ERR_NOT_SUPPORTED'
+ }
+}
+
+class BalancedPoolMissingUpstreamError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, NotSupportedError)
+ this.name = 'MissingUpstreamError'
+ this.message = message || 'No upstream has been added to the BalancedPool'
+ this.code = 'UND_ERR_BPL_MISSING_UPSTREAM'
+ }
+}
+
+class HTTPParserError extends Error {
+ constructor (message, code, data) {
+ super(message)
+ Error.captureStackTrace(this, HTTPParserError)
+ this.name = 'HTTPParserError'
+ this.code = code ? `HPE_${code}` : undefined
+ this.data = data ? data.toString() : undefined
+ }
+}
+
+class ResponseExceededMaxSizeError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, ResponseExceededMaxSizeError)
+ this.name = 'ResponseExceededMaxSizeError'
+ this.message = message || 'Response content exceeded max size'
+ this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE'
+ }
+}
+
+class RequestRetryError extends UndiciError {
+ constructor (message, code, { headers, data }) {
+ super(message)
+ Error.captureStackTrace(this, RequestRetryError)
+ this.name = 'RequestRetryError'
+ this.message = message || 'Request retry error'
+ this.code = 'UND_ERR_REQ_RETRY'
+ this.statusCode = code
+ this.data = data
+ this.headers = headers
+ }
+}
+
+module.exports = {
+ HTTPParserError,
+ UndiciError,
+ HeadersTimeoutError,
+ HeadersOverflowError,
+ BodyTimeoutError,
+ RequestContentLengthMismatchError,
+ ConnectTimeoutError,
+ ResponseStatusCodeError,
+ InvalidArgumentError,
+ InvalidReturnValueError,
+ RequestAbortedError,
+ ClientDestroyedError,
+ ClientClosedError,
+ InformationalError,
+ SocketError,
+ NotSupportedError,
+ ResponseContentLengthMismatchError,
+ BalancedPoolMissingUpstreamError,
+ ResponseExceededMaxSizeError,
+ RequestRetryError
+}
+
+
+/***/ }),
+
+/***/ 2905:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const {
+ InvalidArgumentError,
+ NotSupportedError
+} = __nccwpck_require__(8045)
+const assert = __nccwpck_require__(9491)
+const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = __nccwpck_require__(2785)
+const util = __nccwpck_require__(3983)
+
+// tokenRegExp and headerCharRegex have been lifted from
+// https://github.com/nodejs/node/blob/main/lib/_http_common.js
+
+/**
+ * Verifies that the given val is a valid HTTP token
+ * per the rules defined in RFC 7230
+ * See https://tools.ietf.org/html/rfc7230#section-3.2.6
+ */
+const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/
+
+/**
+ * Matches if val contains an invalid field-vchar
+ * field-value = *( field-content / obs-fold )
+ * field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
+ * field-vchar = VCHAR / obs-text
+ */
+const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/
+
+// Verifies that a given path is valid does not contain control chars \x00 to \x20
+const invalidPathRegex = /[^\u0021-\u00ff]/
+
+const kHandler = Symbol('handler')
+
+const channels = {}
+
+let extractBody
+
+try {
+ const diagnosticsChannel = __nccwpck_require__(7643)
+ channels.create = diagnosticsChannel.channel('undici:request:create')
+ channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent')
+ channels.headers = diagnosticsChannel.channel('undici:request:headers')
+ channels.trailers = diagnosticsChannel.channel('undici:request:trailers')
+ channels.error = diagnosticsChannel.channel('undici:request:error')
+} catch {
+ channels.create = { hasSubscribers: false }
+ channels.bodySent = { hasSubscribers: false }
+ channels.headers = { hasSubscribers: false }
+ channels.trailers = { hasSubscribers: false }
+ channels.error = { hasSubscribers: false }
+}
+
+class Request {
+ constructor (origin, {
+ path,
+ method,
+ body,
+ headers,
+ query,
+ idempotent,
+ blocking,
+ upgrade,
+ headersTimeout,
+ bodyTimeout,
+ reset,
+ throwOnError,
+ expectContinue
+ }, handler) {
+ if (typeof path !== 'string') {
+ throw new InvalidArgumentError('path must be a string')
+ } else if (
+ path[0] !== '/' &&
+ !(path.startsWith('http://') || path.startsWith('https://')) &&
+ method !== 'CONNECT'
+ ) {
+ throw new InvalidArgumentError('path must be an absolute URL or start with a slash')
+ } else if (invalidPathRegex.exec(path) !== null) {
+ throw new InvalidArgumentError('invalid request path')
+ }
+
+ if (typeof method !== 'string') {
+ throw new InvalidArgumentError('method must be a string')
+ } else if (tokenRegExp.exec(method) === null) {
+ throw new InvalidArgumentError('invalid request method')
+ }
+
+ if (upgrade && typeof upgrade !== 'string') {
+ throw new InvalidArgumentError('upgrade must be a string')
+ }
+
+ if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) {
+ throw new InvalidArgumentError('invalid headersTimeout')
+ }
+
+ if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) {
+ throw new InvalidArgumentError('invalid bodyTimeout')
+ }
+
+ if (reset != null && typeof reset !== 'boolean') {
+ throw new InvalidArgumentError('invalid reset')
+ }
+
+ if (expectContinue != null && typeof expectContinue !== 'boolean') {
+ throw new InvalidArgumentError('invalid expectContinue')
+ }
+
+ this.headersTimeout = headersTimeout
+
+ this.bodyTimeout = bodyTimeout
+
+ this.throwOnError = throwOnError === true
+
+ this.method = method
+
+ this.abort = null
+
+ if (body == null) {
+ this.body = null
+ } else if (util.isStream(body)) {
+ this.body = body
+
+ const rState = this.body._readableState
+ if (!rState || !rState.autoDestroy) {
+ this.endHandler = function autoDestroy () {
+ util.destroy(this)
+ }
+ this.body.on('end', this.endHandler)
+ }
+
+ this.errorHandler = err => {
+ if (this.abort) {
+ this.abort(err)
+ } else {
+ this.error = err
+ }
+ }
+ this.body.on('error', this.errorHandler)
+ } else if (util.isBuffer(body)) {
+ this.body = body.byteLength ? body : null
+ } else if (ArrayBuffer.isView(body)) {
+ this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null
+ } else if (body instanceof ArrayBuffer) {
+ this.body = body.byteLength ? Buffer.from(body) : null
+ } else if (typeof body === 'string') {
+ this.body = body.length ? Buffer.from(body) : null
+ } else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) {
+ this.body = body
+ } else {
+ throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable')
+ }
+
+ this.completed = false
+
+ this.aborted = false
+
+ this.upgrade = upgrade || null
+
+ this.path = query ? util.buildURL(path, query) : path
+
+ this.origin = origin
+
+ this.idempotent = idempotent == null
+ ? method === 'HEAD' || method === 'GET'
+ : idempotent
+
+ this.blocking = blocking == null ? false : blocking
+
+ this.reset = reset == null ? null : reset
+
+ this.host = null
+
+ this.contentLength = null
+
+ this.contentType = null
+
+ this.headers = ''
+
+ // Only for H2
+ this.expectContinue = expectContinue != null ? expectContinue : false
+
+ if (Array.isArray(headers)) {
+ if (headers.length % 2 !== 0) {
+ throw new InvalidArgumentError('headers array must be even')
+ }
+ for (let i = 0; i < headers.length; i += 2) {
+ processHeader(this, headers[i], headers[i + 1])
+ }
+ } else if (headers && typeof headers === 'object') {
+ const keys = Object.keys(headers)
+ for (let i = 0; i < keys.length; i++) {
+ const key = keys[i]
+ processHeader(this, key, headers[key])
+ }
+ } else if (headers != null) {
+ throw new InvalidArgumentError('headers must be an object or an array')
+ }
+
+ if (util.isFormDataLike(this.body)) {
+ if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) {
+ throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.')
+ }
+
+ if (!extractBody) {
+ extractBody = (__nccwpck_require__(1472).extractBody)
+ }
+
+ const [bodyStream, contentType] = extractBody(body)
+ if (this.contentType == null) {
+ this.contentType = contentType
+ this.headers += `content-type: ${contentType}\r\n`
+ }
+ this.body = bodyStream.stream
+ this.contentLength = bodyStream.length
+ } else if (util.isBlobLike(body) && this.contentType == null && body.type) {
+ this.contentType = body.type
+ this.headers += `content-type: ${body.type}\r\n`
+ }
+
+ util.validateHandler(handler, method, upgrade)
+
+ this.servername = util.getServerName(this.host)
+
+ this[kHandler] = handler
+
+ if (channels.create.hasSubscribers) {
+ channels.create.publish({ request: this })
+ }
+ }
+
+ onBodySent (chunk) {
+ if (this[kHandler].onBodySent) {
+ try {
+ return this[kHandler].onBodySent(chunk)
+ } catch (err) {
+ this.abort(err)
+ }
+ }
+ }
+
+ onRequestSent () {
+ if (channels.bodySent.hasSubscribers) {
+ channels.bodySent.publish({ request: this })
+ }
+
+ if (this[kHandler].onRequestSent) {
+ try {
+ return this[kHandler].onRequestSent()
+ } catch (err) {
+ this.abort(err)
+ }
+ }
+ }
+
+ onConnect (abort) {
+ assert(!this.aborted)
+ assert(!this.completed)
+
+ if (this.error) {
+ abort(this.error)
+ } else {
+ this.abort = abort
+ return this[kHandler].onConnect(abort)
+ }
+ }
+
+ onHeaders (statusCode, headers, resume, statusText) {
+ assert(!this.aborted)
+ assert(!this.completed)
+
+ if (channels.headers.hasSubscribers) {
+ channels.headers.publish({ request: this, response: { statusCode, headers, statusText } })
+ }
+
+ try {
+ return this[kHandler].onHeaders(statusCode, headers, resume, statusText)
+ } catch (err) {
+ this.abort(err)
+ }
+ }
+
+ onData (chunk) {
+ assert(!this.aborted)
+ assert(!this.completed)
+
+ try {
+ return this[kHandler].onData(chunk)
+ } catch (err) {
+ this.abort(err)
+ return false
+ }
+ }
+
+ onUpgrade (statusCode, headers, socket) {
+ assert(!this.aborted)
+ assert(!this.completed)
+
+ return this[kHandler].onUpgrade(statusCode, headers, socket)
+ }
+
+ onComplete (trailers) {
+ this.onFinally()
+
+ assert(!this.aborted)
+
+ this.completed = true
+ if (channels.trailers.hasSubscribers) {
+ channels.trailers.publish({ request: this, trailers })
+ }
+
+ try {
+ return this[kHandler].onComplete(trailers)
+ } catch (err) {
+ // TODO (fix): This might be a bad idea?
+ this.onError(err)
+ }
+ }
+
+ onError (error) {
+ this.onFinally()
+
+ if (channels.error.hasSubscribers) {
+ channels.error.publish({ request: this, error })
+ }
+
+ if (this.aborted) {
+ return
+ }
+ this.aborted = true
+
+ return this[kHandler].onError(error)
+ }
+
+ onFinally () {
+ if (this.errorHandler) {
+ this.body.off('error', this.errorHandler)
+ this.errorHandler = null
+ }
+
+ if (this.endHandler) {
+ this.body.off('end', this.endHandler)
+ this.endHandler = null
+ }
+ }
+
+ // TODO: adjust to support H2
+ addHeader (key, value) {
+ processHeader(this, key, value)
+ return this
+ }
+
+ static [kHTTP1BuildRequest] (origin, opts, handler) {
+ // TODO: Migrate header parsing here, to make Requests
+ // HTTP agnostic
+ return new Request(origin, opts, handler)
+ }
+
+ static [kHTTP2BuildRequest] (origin, opts, handler) {
+ const headers = opts.headers
+ opts = { ...opts, headers: null }
+
+ const request = new Request(origin, opts, handler)
+
+ request.headers = {}
+
+ if (Array.isArray(headers)) {
+ if (headers.length % 2 !== 0) {
+ throw new InvalidArgumentError('headers array must be even')
+ }
+ for (let i = 0; i < headers.length; i += 2) {
+ processHeader(request, headers[i], headers[i + 1], true)
+ }
+ } else if (headers && typeof headers === 'object') {
+ const keys = Object.keys(headers)
+ for (let i = 0; i < keys.length; i++) {
+ const key = keys[i]
+ processHeader(request, key, headers[key], true)
+ }
+ } else if (headers != null) {
+ throw new InvalidArgumentError('headers must be an object or an array')
+ }
+
+ return request
+ }
+
+ static [kHTTP2CopyHeaders] (raw) {
+ const rawHeaders = raw.split('\r\n')
+ const headers = {}
+
+ for (const header of rawHeaders) {
+ const [key, value] = header.split(': ')
+
+ if (value == null || value.length === 0) continue
+
+ if (headers[key]) headers[key] += `,${value}`
+ else headers[key] = value
+ }
+
+ return headers
+ }
+}
+
+function processHeaderValue (key, val, skipAppend) {
+ if (val && typeof val === 'object') {
+ throw new InvalidArgumentError(`invalid ${key} header`)
+ }
+
+ val = val != null ? `${val}` : ''
+
+ if (headerCharRegex.exec(val) !== null) {
+ throw new InvalidArgumentError(`invalid ${key} header`)
+ }
+
+ return skipAppend ? val : `${key}: ${val}\r\n`
+}
+
+function processHeader (request, key, val, skipAppend = false) {
+ if (val && (typeof val === 'object' && !Array.isArray(val))) {
+ throw new InvalidArgumentError(`invalid ${key} header`)
+ } else if (val === undefined) {
+ return
+ }
+
+ if (
+ request.host === null &&
+ key.length === 4 &&
+ key.toLowerCase() === 'host'
+ ) {
+ if (headerCharRegex.exec(val) !== null) {
+ throw new InvalidArgumentError(`invalid ${key} header`)
+ }
+ // Consumed by Client
+ request.host = val
+ } else if (
+ request.contentLength === null &&
+ key.length === 14 &&
+ key.toLowerCase() === 'content-length'
+ ) {
+ request.contentLength = parseInt(val, 10)
+ if (!Number.isFinite(request.contentLength)) {
+ throw new InvalidArgumentError('invalid content-length header')
+ }
+ } else if (
+ request.contentType === null &&
+ key.length === 12 &&
+ key.toLowerCase() === 'content-type'
+ ) {
+ request.contentType = val
+ if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)
+ else request.headers += processHeaderValue(key, val)
+ } else if (
+ key.length === 17 &&
+ key.toLowerCase() === 'transfer-encoding'
+ ) {
+ throw new InvalidArgumentError('invalid transfer-encoding header')
+ } else if (
+ key.length === 10 &&
+ key.toLowerCase() === 'connection'
+ ) {
+ const value = typeof val === 'string' ? val.toLowerCase() : null
+ if (value !== 'close' && value !== 'keep-alive') {
+ throw new InvalidArgumentError('invalid connection header')
+ } else if (value === 'close') {
+ request.reset = true
+ }
+ } else if (
+ key.length === 10 &&
+ key.toLowerCase() === 'keep-alive'
+ ) {
+ throw new InvalidArgumentError('invalid keep-alive header')
+ } else if (
+ key.length === 7 &&
+ key.toLowerCase() === 'upgrade'
+ ) {
+ throw new InvalidArgumentError('invalid upgrade header')
+ } else if (
+ key.length === 6 &&
+ key.toLowerCase() === 'expect'
+ ) {
+ throw new NotSupportedError('expect header not supported')
+ } else if (tokenRegExp.exec(key) === null) {
+ throw new InvalidArgumentError('invalid header key')
+ } else {
+ if (Array.isArray(val)) {
+ for (let i = 0; i < val.length; i++) {
+ if (skipAppend) {
+ if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}`
+ else request.headers[key] = processHeaderValue(key, val[i], skipAppend)
+ } else {
+ request.headers += processHeaderValue(key, val[i])
+ }
+ }
+ } else {
+ if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)
+ else request.headers += processHeaderValue(key, val)
+ }
+ }
+}
+
+module.exports = Request
+
+
+/***/ }),
+
+/***/ 2785:
+/***/ ((module) => {
+
+module.exports = {
+ kClose: Symbol('close'),
+ kDestroy: Symbol('destroy'),
+ kDispatch: Symbol('dispatch'),
+ kUrl: Symbol('url'),
+ kWriting: Symbol('writing'),
+ kResuming: Symbol('resuming'),
+ kQueue: Symbol('queue'),
+ kConnect: Symbol('connect'),
+ kConnecting: Symbol('connecting'),
+ kHeadersList: Symbol('headers list'),
+ kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'),
+ kKeepAliveMaxTimeout: Symbol('max keep alive timeout'),
+ kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'),
+ kKeepAliveTimeoutValue: Symbol('keep alive timeout'),
+ kKeepAlive: Symbol('keep alive'),
+ kHeadersTimeout: Symbol('headers timeout'),
+ kBodyTimeout: Symbol('body timeout'),
+ kServerName: Symbol('server name'),
+ kLocalAddress: Symbol('local address'),
+ kHost: Symbol('host'),
+ kNoRef: Symbol('no ref'),
+ kBodyUsed: Symbol('used'),
+ kRunning: Symbol('running'),
+ kBlocking: Symbol('blocking'),
+ kPending: Symbol('pending'),
+ kSize: Symbol('size'),
+ kBusy: Symbol('busy'),
+ kQueued: Symbol('queued'),
+ kFree: Symbol('free'),
+ kConnected: Symbol('connected'),
+ kClosed: Symbol('closed'),
+ kNeedDrain: Symbol('need drain'),
+ kReset: Symbol('reset'),
+ kDestroyed: Symbol.for('nodejs.stream.destroyed'),
+ kMaxHeadersSize: Symbol('max headers size'),
+ kRunningIdx: Symbol('running index'),
+ kPendingIdx: Symbol('pending index'),
+ kError: Symbol('error'),
+ kClients: Symbol('clients'),
+ kClient: Symbol('client'),
+ kParser: Symbol('parser'),
+ kOnDestroyed: Symbol('destroy callbacks'),
+ kPipelining: Symbol('pipelining'),
+ kSocket: Symbol('socket'),
+ kHostHeader: Symbol('host header'),
+ kConnector: Symbol('connector'),
+ kStrictContentLength: Symbol('strict content length'),
+ kMaxRedirections: Symbol('maxRedirections'),
+ kMaxRequests: Symbol('maxRequestsPerClient'),
+ kProxy: Symbol('proxy agent options'),
+ kCounter: Symbol('socket request counter'),
+ kInterceptors: Symbol('dispatch interceptors'),
+ kMaxResponseSize: Symbol('max response size'),
+ kHTTP2Session: Symbol('http2Session'),
+ kHTTP2SessionState: Symbol('http2Session state'),
+ kHTTP2BuildRequest: Symbol('http2 build request'),
+ kHTTP1BuildRequest: Symbol('http1 build request'),
+ kHTTP2CopyHeaders: Symbol('http2 copy headers'),
+ kHTTPConnVersion: Symbol('http connection version'),
+ kRetryHandlerDefaultRetry: Symbol('retry agent default retry'),
+ kConstruct: Symbol('constructable')
+}
+
+
+/***/ }),
+
+/***/ 3983:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const assert = __nccwpck_require__(9491)
+const { kDestroyed, kBodyUsed } = __nccwpck_require__(2785)
+const { IncomingMessage } = __nccwpck_require__(3685)
+const stream = __nccwpck_require__(2781)
+const net = __nccwpck_require__(1808)
+const { InvalidArgumentError } = __nccwpck_require__(8045)
+const { Blob } = __nccwpck_require__(4300)
+const nodeUtil = __nccwpck_require__(3837)
+const { stringify } = __nccwpck_require__(3477)
+
+const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
+
+function nop () {}
+
+function isStream (obj) {
+ return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function'
+}
+
+// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License)
+function isBlobLike (object) {
+ return (Blob && object instanceof Blob) || (
+ object &&
+ typeof object === 'object' &&
+ (typeof object.stream === 'function' ||
+ typeof object.arrayBuffer === 'function') &&
+ /^(Blob|File)$/.test(object[Symbol.toStringTag])
+ )
+}
+
+function buildURL (url, queryParams) {
+ if (url.includes('?') || url.includes('#')) {
+ throw new Error('Query params cannot be passed when url already contains "?" or "#".')
+ }
+
+ const stringified = stringify(queryParams)
+
+ if (stringified) {
+ url += '?' + stringified
+ }
+
+ return url
+}
+
+function parseURL (url) {
+ if (typeof url === 'string') {
+ url = new URL(url)
+
+ if (!/^https?:/.test(url.origin || url.protocol)) {
+ throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
+ }
+
+ return url
+ }
+
+ if (!url || typeof url !== 'object') {
+ throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.')
+ }
+
+ if (!/^https?:/.test(url.origin || url.protocol)) {
+ throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
+ }
+
+ if (!(url instanceof URL)) {
+ if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) {
+ throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.')
+ }
+
+ if (url.path != null && typeof url.path !== 'string') {
+ throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.')
+ }
+
+ if (url.pathname != null && typeof url.pathname !== 'string') {
+ throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.')
+ }
+
+ if (url.hostname != null && typeof url.hostname !== 'string') {
+ throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.')
+ }
+
+ if (url.origin != null && typeof url.origin !== 'string') {
+ throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.')
+ }
+
+ const port = url.port != null
+ ? url.port
+ : (url.protocol === 'https:' ? 443 : 80)
+ let origin = url.origin != null
+ ? url.origin
+ : `${url.protocol}//${url.hostname}:${port}`
+ let path = url.path != null
+ ? url.path
+ : `${url.pathname || ''}${url.search || ''}`
+
+ if (origin.endsWith('/')) {
+ origin = origin.substring(0, origin.length - 1)
+ }
+
+ if (path && !path.startsWith('/')) {
+ path = `/${path}`
+ }
+ // new URL(path, origin) is unsafe when `path` contains an absolute URL
+ // From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL:
+ // If first parameter is a relative URL, second param is required, and will be used as the base URL.
+ // If first parameter is an absolute URL, a given second param will be ignored.
+ url = new URL(origin + path)
+ }
+
+ return url
+}
+
+function parseOrigin (url) {
+ url = parseURL(url)
+
+ if (url.pathname !== '/' || url.search || url.hash) {
+ throw new InvalidArgumentError('invalid url')
+ }
+
+ return url
+}
+
+function getHostname (host) {
+ if (host[0] === '[') {
+ const idx = host.indexOf(']')
+
+ assert(idx !== -1)
+ return host.substring(1, idx)
+ }
+
+ const idx = host.indexOf(':')
+ if (idx === -1) return host
+
+ return host.substring(0, idx)
+}
+
+// IP addresses are not valid server names per RFC6066
+// > Currently, the only server names supported are DNS hostnames
+function getServerName (host) {
+ if (!host) {
+ return null
+ }
+
+ assert.strictEqual(typeof host, 'string')
+
+ const servername = getHostname(host)
+ if (net.isIP(servername)) {
+ return ''
+ }
+
+ return servername
+}
+
+function deepClone (obj) {
+ return JSON.parse(JSON.stringify(obj))
+}
+
+function isAsyncIterable (obj) {
+ return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function')
+}
+
+function isIterable (obj) {
+ return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function'))
+}
+
+function bodyLength (body) {
+ if (body == null) {
+ return 0
+ } else if (isStream(body)) {
+ const state = body._readableState
+ return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length)
+ ? state.length
+ : null
+ } else if (isBlobLike(body)) {
+ return body.size != null ? body.size : null
+ } else if (isBuffer(body)) {
+ return body.byteLength
+ }
+
+ return null
+}
+
+function isDestroyed (stream) {
+ return !stream || !!(stream.destroyed || stream[kDestroyed])
+}
+
+function isReadableAborted (stream) {
+ const state = stream && stream._readableState
+ return isDestroyed(stream) && state && !state.endEmitted
+}
+
+function destroy (stream, err) {
+ if (stream == null || !isStream(stream) || isDestroyed(stream)) {
+ return
+ }
+
+ if (typeof stream.destroy === 'function') {
+ if (Object.getPrototypeOf(stream).constructor === IncomingMessage) {
+ // See: https://github.com/nodejs/node/pull/38505/files
+ stream.socket = null
+ }
+
+ stream.destroy(err)
+ } else if (err) {
+ process.nextTick((stream, err) => {
+ stream.emit('error', err)
+ }, stream, err)
+ }
+
+ if (stream.destroyed !== true) {
+ stream[kDestroyed] = true
+ }
+}
+
+const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/
+function parseKeepAliveTimeout (val) {
+ const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR)
+ return m ? parseInt(m[1], 10) * 1000 : null
+}
+
+function parseHeaders (headers, obj = {}) {
+ // For H2 support
+ if (!Array.isArray(headers)) return headers
+
+ for (let i = 0; i < headers.length; i += 2) {
+ const key = headers[i].toString().toLowerCase()
+ let val = obj[key]
+
+ if (!val) {
+ if (Array.isArray(headers[i + 1])) {
+ obj[key] = headers[i + 1].map(x => x.toString('utf8'))
+ } else {
+ obj[key] = headers[i + 1].toString('utf8')
+ }
+ } else {
+ if (!Array.isArray(val)) {
+ val = [val]
+ obj[key] = val
+ }
+ val.push(headers[i + 1].toString('utf8'))
+ }
+ }
+
+ // See https://github.com/nodejs/node/pull/46528
+ if ('content-length' in obj && 'content-disposition' in obj) {
+ obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1')
+ }
+
+ return obj
+}
+
+function parseRawHeaders (headers) {
+ const ret = []
+ let hasContentLength = false
+ let contentDispositionIdx = -1
+
+ for (let n = 0; n < headers.length; n += 2) {
+ const key = headers[n + 0].toString()
+ const val = headers[n + 1].toString('utf8')
+
+ if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) {
+ ret.push(key, val)
+ hasContentLength = true
+ } else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) {
+ contentDispositionIdx = ret.push(key, val) - 1
+ } else {
+ ret.push(key, val)
+ }
+ }
+
+ // See https://github.com/nodejs/node/pull/46528
+ if (hasContentLength && contentDispositionIdx !== -1) {
+ ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1')
+ }
+
+ return ret
+}
+
+function isBuffer (buffer) {
+ // See, https://github.com/mcollina/undici/pull/319
+ return buffer instanceof Uint8Array || Buffer.isBuffer(buffer)
+}
+
+function validateHandler (handler, method, upgrade) {
+ if (!handler || typeof handler !== 'object') {
+ throw new InvalidArgumentError('handler must be an object')
+ }
+
+ if (typeof handler.onConnect !== 'function') {
+ throw new InvalidArgumentError('invalid onConnect method')
+ }
+
+ if (typeof handler.onError !== 'function') {
+ throw new InvalidArgumentError('invalid onError method')
+ }
+
+ if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) {
+ throw new InvalidArgumentError('invalid onBodySent method')
+ }
+
+ if (upgrade || method === 'CONNECT') {
+ if (typeof handler.onUpgrade !== 'function') {
+ throw new InvalidArgumentError('invalid onUpgrade method')
+ }
+ } else {
+ if (typeof handler.onHeaders !== 'function') {
+ throw new InvalidArgumentError('invalid onHeaders method')
+ }
+
+ if (typeof handler.onData !== 'function') {
+ throw new InvalidArgumentError('invalid onData method')
+ }
+
+ if (typeof handler.onComplete !== 'function') {
+ throw new InvalidArgumentError('invalid onComplete method')
+ }
+ }
+}
+
+// A body is disturbed if it has been read from and it cannot
+// be re-used without losing state or data.
+function isDisturbed (body) {
+ return !!(body && (
+ stream.isDisturbed
+ ? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed?
+ : body[kBodyUsed] ||
+ body.readableDidRead ||
+ (body._readableState && body._readableState.dataEmitted) ||
+ isReadableAborted(body)
+ ))
+}
+
+function isErrored (body) {
+ return !!(body && (
+ stream.isErrored
+ ? stream.isErrored(body)
+ : /state: 'errored'/.test(nodeUtil.inspect(body)
+ )))
+}
+
+function isReadable (body) {
+ return !!(body && (
+ stream.isReadable
+ ? stream.isReadable(body)
+ : /state: 'readable'/.test(nodeUtil.inspect(body)
+ )))
+}
+
+function getSocketInfo (socket) {
+ return {
+ localAddress: socket.localAddress,
+ localPort: socket.localPort,
+ remoteAddress: socket.remoteAddress,
+ remotePort: socket.remotePort,
+ remoteFamily: socket.remoteFamily,
+ timeout: socket.timeout,
+ bytesWritten: socket.bytesWritten,
+ bytesRead: socket.bytesRead
+ }
+}
+
+async function * convertIterableToBuffer (iterable) {
+ for await (const chunk of iterable) {
+ yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)
+ }
+}
+
+let ReadableStream
+function ReadableStreamFrom (iterable) {
+ if (!ReadableStream) {
+ ReadableStream = (__nccwpck_require__(5356).ReadableStream)
+ }
+
+ if (ReadableStream.from) {
+ return ReadableStream.from(convertIterableToBuffer(iterable))
+ }
+
+ let iterator
+ return new ReadableStream(
+ {
+ async start () {
+ iterator = iterable[Symbol.asyncIterator]()
+ },
+ async pull (controller) {
+ const { done, value } = await iterator.next()
+ if (done) {
+ queueMicrotask(() => {
+ controller.close()
+ })
+ } else {
+ const buf = Buffer.isBuffer(value) ? value : Buffer.from(value)
+ controller.enqueue(new Uint8Array(buf))
+ }
+ return controller.desiredSize > 0
+ },
+ async cancel (reason) {
+ await iterator.return()
+ }
+ },
+ 0
+ )
+}
+
+// The chunk should be a FormData instance and contains
+// all the required methods.
+function isFormDataLike (object) {
+ return (
+ object &&
+ typeof object === 'object' &&
+ typeof object.append === 'function' &&
+ typeof object.delete === 'function' &&
+ typeof object.get === 'function' &&
+ typeof object.getAll === 'function' &&
+ typeof object.has === 'function' &&
+ typeof object.set === 'function' &&
+ object[Symbol.toStringTag] === 'FormData'
+ )
+}
+
+function throwIfAborted (signal) {
+ if (!signal) { return }
+ if (typeof signal.throwIfAborted === 'function') {
+ signal.throwIfAborted()
+ } else {
+ if (signal.aborted) {
+ // DOMException not available < v17.0.0
+ const err = new Error('The operation was aborted')
+ err.name = 'AbortError'
+ throw err
+ }
+ }
+}
+
+function addAbortListener (signal, listener) {
+ if ('addEventListener' in signal) {
+ signal.addEventListener('abort', listener, { once: true })
+ return () => signal.removeEventListener('abort', listener)
+ }
+ signal.addListener('abort', listener)
+ return () => signal.removeListener('abort', listener)
+}
+
+const hasToWellFormed = !!String.prototype.toWellFormed
+
+/**
+ * @param {string} val
+ */
+function toUSVString (val) {
+ if (hasToWellFormed) {
+ return `${val}`.toWellFormed()
+ } else if (nodeUtil.toUSVString) {
+ return nodeUtil.toUSVString(val)
+ }
+
+ return `${val}`
+}
+
+// Parsed accordingly to RFC 9110
+// https://www.rfc-editor.org/rfc/rfc9110#field.content-range
+function parseRangeHeader (range) {
+ if (range == null || range === '') return { start: 0, end: null, size: null }
+
+ const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null
+ return m
+ ? {
+ start: parseInt(m[1]),
+ end: m[2] ? parseInt(m[2]) : null,
+ size: m[3] ? parseInt(m[3]) : null
+ }
+ : null
+}
+
+const kEnumerableProperty = Object.create(null)
+kEnumerableProperty.enumerable = true
+
+module.exports = {
+ kEnumerableProperty,
+ nop,
+ isDisturbed,
+ isErrored,
+ isReadable,
+ toUSVString,
+ isReadableAborted,
+ isBlobLike,
+ parseOrigin,
+ parseURL,
+ getServerName,
+ isStream,
+ isIterable,
+ isAsyncIterable,
+ isDestroyed,
+ parseRawHeaders,
+ parseHeaders,
+ parseKeepAliveTimeout,
+ destroy,
+ bodyLength,
+ deepClone,
+ ReadableStreamFrom,
+ isBuffer,
+ validateHandler,
+ getSocketInfo,
+ isFormDataLike,
+ buildURL,
+ throwIfAborted,
+ addAbortListener,
+ parseRangeHeader,
+ nodeMajor,
+ nodeMinor,
+ nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13),
+ safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE']
+}
+
+
+/***/ }),
+
+/***/ 4839:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const Dispatcher = __nccwpck_require__(412)
+const {
+ ClientDestroyedError,
+ ClientClosedError,
+ InvalidArgumentError
+} = __nccwpck_require__(8045)
+const { kDestroy, kClose, kDispatch, kInterceptors } = __nccwpck_require__(2785)
+
+const kDestroyed = Symbol('destroyed')
+const kClosed = Symbol('closed')
+const kOnDestroyed = Symbol('onDestroyed')
+const kOnClosed = Symbol('onClosed')
+const kInterceptedDispatch = Symbol('Intercepted Dispatch')
+
+class DispatcherBase extends Dispatcher {
+ constructor () {
+ super()
+
+ this[kDestroyed] = false
+ this[kOnDestroyed] = null
+ this[kClosed] = false
+ this[kOnClosed] = []
+ }
+
+ get destroyed () {
+ return this[kDestroyed]
+ }
+
+ get closed () {
+ return this[kClosed]
+ }
+
+ get interceptors () {
+ return this[kInterceptors]
+ }
+
+ set interceptors (newInterceptors) {
+ if (newInterceptors) {
+ for (let i = newInterceptors.length - 1; i >= 0; i--) {
+ const interceptor = this[kInterceptors][i]
+ if (typeof interceptor !== 'function') {
+ throw new InvalidArgumentError('interceptor must be an function')
+ }
+ }
+ }
+
+ this[kInterceptors] = newInterceptors
+ }
+
+ close (callback) {
+ if (callback === undefined) {
+ return new Promise((resolve, reject) => {
+ this.close((err, data) => {
+ return err ? reject(err) : resolve(data)
+ })
+ })
+ }
+
+ if (typeof callback !== 'function') {
+ throw new InvalidArgumentError('invalid callback')
+ }
+
+ if (this[kDestroyed]) {
+ queueMicrotask(() => callback(new ClientDestroyedError(), null))
+ return
+ }
+
+ if (this[kClosed]) {
+ if (this[kOnClosed]) {
+ this[kOnClosed].push(callback)
+ } else {
+ queueMicrotask(() => callback(null, null))
+ }
+ return
+ }
+
+ this[kClosed] = true
+ this[kOnClosed].push(callback)
+
+ const onClosed = () => {
+ const callbacks = this[kOnClosed]
+ this[kOnClosed] = null
+ for (let i = 0; i < callbacks.length; i++) {
+ callbacks[i](null, null)
+ }
+ }
+
+ // Should not error.
+ this[kClose]()
+ .then(() => this.destroy())
+ .then(() => {
+ queueMicrotask(onClosed)
+ })
+ }
+
+ destroy (err, callback) {
+ if (typeof err === 'function') {
+ callback = err
+ err = null
+ }
+
+ if (callback === undefined) {
+ return new Promise((resolve, reject) => {
+ this.destroy(err, (err, data) => {
+ return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data)
+ })
+ })
+ }
+
+ if (typeof callback !== 'function') {
+ throw new InvalidArgumentError('invalid callback')
+ }
+
+ if (this[kDestroyed]) {
+ if (this[kOnDestroyed]) {
+ this[kOnDestroyed].push(callback)
+ } else {
+ queueMicrotask(() => callback(null, null))
+ }
+ return
+ }
+
+ if (!err) {
+ err = new ClientDestroyedError()
+ }
+
+ this[kDestroyed] = true
+ this[kOnDestroyed] = this[kOnDestroyed] || []
+ this[kOnDestroyed].push(callback)
+
+ const onDestroyed = () => {
+ const callbacks = this[kOnDestroyed]
+ this[kOnDestroyed] = null
+ for (let i = 0; i < callbacks.length; i++) {
+ callbacks[i](null, null)
+ }
+ }
+
+ // Should not error.
+ this[kDestroy](err).then(() => {
+ queueMicrotask(onDestroyed)
+ })
+ }
+
+ [kInterceptedDispatch] (opts, handler) {
+ if (!this[kInterceptors] || this[kInterceptors].length === 0) {
+ this[kInterceptedDispatch] = this[kDispatch]
+ return this[kDispatch](opts, handler)
+ }
+
+ let dispatch = this[kDispatch].bind(this)
+ for (let i = this[kInterceptors].length - 1; i >= 0; i--) {
+ dispatch = this[kInterceptors][i](dispatch)
+ }
+ this[kInterceptedDispatch] = dispatch
+ return dispatch(opts, handler)
+ }
+
+ dispatch (opts, handler) {
+ if (!handler || typeof handler !== 'object') {
+ throw new InvalidArgumentError('handler must be an object')
+ }
+
+ try {
+ if (!opts || typeof opts !== 'object') {
+ throw new InvalidArgumentError('opts must be an object.')
+ }
+
+ if (this[kDestroyed] || this[kOnDestroyed]) {
+ throw new ClientDestroyedError()
+ }
+
+ if (this[kClosed]) {
+ throw new ClientClosedError()
+ }
+
+ return this[kInterceptedDispatch](opts, handler)
+ } catch (err) {
+ if (typeof handler.onError !== 'function') {
+ throw new InvalidArgumentError('invalid onError method')
+ }
+
+ handler.onError(err)
+
+ return false
+ }
+ }
+}
+
+module.exports = DispatcherBase
+
+
+/***/ }),
+
+/***/ 412:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const EventEmitter = __nccwpck_require__(2361)
+
+class Dispatcher extends EventEmitter {
+ dispatch () {
+ throw new Error('not implemented')
+ }
+
+ close () {
+ throw new Error('not implemented')
+ }
+
+ destroy () {
+ throw new Error('not implemented')
+ }
+}
+
+module.exports = Dispatcher
+
+
+/***/ }),
+
+/***/ 1472:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const Busboy = __nccwpck_require__(727)
+const util = __nccwpck_require__(3983)
+const {
+ ReadableStreamFrom,
+ isBlobLike,
+ isReadableStreamLike,
+ readableStreamClose,
+ createDeferredPromise,
+ fullyReadBody
+} = __nccwpck_require__(2538)
+const { FormData } = __nccwpck_require__(2015)
+const { kState } = __nccwpck_require__(5861)
+const { webidl } = __nccwpck_require__(1744)
+const { DOMException, structuredClone } = __nccwpck_require__(1037)
+const { Blob, File: NativeFile } = __nccwpck_require__(4300)
+const { kBodyUsed } = __nccwpck_require__(2785)
+const assert = __nccwpck_require__(9491)
+const { isErrored } = __nccwpck_require__(3983)
+const { isUint8Array, isArrayBuffer } = __nccwpck_require__(9830)
+const { File: UndiciFile } = __nccwpck_require__(8511)
+const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685)
+
+let ReadableStream = globalThis.ReadableStream
+
+/** @type {globalThis['File']} */
+const File = NativeFile ?? UndiciFile
+const textEncoder = new TextEncoder()
+const textDecoder = new TextDecoder()
+
+// https://fetch.spec.whatwg.org/#concept-bodyinit-extract
+function extractBody (object, keepalive = false) {
+ if (!ReadableStream) {
+ ReadableStream = (__nccwpck_require__(5356).ReadableStream)
+ }
+
+ // 1. Let stream be null.
+ let stream = null
+
+ // 2. If object is a ReadableStream object, then set stream to object.
+ if (object instanceof ReadableStream) {
+ stream = object
+ } else if (isBlobLike(object)) {
+ // 3. Otherwise, if object is a Blob object, set stream to the
+ // result of running object’s get stream.
+ stream = object.stream()
+ } else {
+ // 4. Otherwise, set stream to a new ReadableStream object, and set
+ // up stream.
+ stream = new ReadableStream({
+ async pull (controller) {
+ controller.enqueue(
+ typeof source === 'string' ? textEncoder.encode(source) : source
+ )
+ queueMicrotask(() => readableStreamClose(controller))
+ },
+ start () {},
+ type: undefined
+ })
+ }
+
+ // 5. Assert: stream is a ReadableStream object.
+ assert(isReadableStreamLike(stream))
+
+ // 6. Let action be null.
+ let action = null
+
+ // 7. Let source be null.
+ let source = null
+
+ // 8. Let length be null.
+ let length = null
+
+ // 9. Let type be null.
+ let type = null
+
+ // 10. Switch on object:
+ if (typeof object === 'string') {
+ // Set source to the UTF-8 encoding of object.
+ // Note: setting source to a Uint8Array here breaks some mocking assumptions.
+ source = object
+
+ // Set type to `text/plain;charset=UTF-8`.
+ type = 'text/plain;charset=UTF-8'
+ } else if (object instanceof URLSearchParams) {
+ // URLSearchParams
+
+ // spec says to run application/x-www-form-urlencoded on body.list
+ // this is implemented in Node.js as apart of an URLSearchParams instance toString method
+ // See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490
+ // and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100
+
+ // Set source to the result of running the application/x-www-form-urlencoded serializer with object’s list.
+ source = object.toString()
+
+ // Set type to `application/x-www-form-urlencoded;charset=UTF-8`.
+ type = 'application/x-www-form-urlencoded;charset=UTF-8'
+ } else if (isArrayBuffer(object)) {
+ // BufferSource/ArrayBuffer
+
+ // Set source to a copy of the bytes held by object.
+ source = new Uint8Array(object.slice())
+ } else if (ArrayBuffer.isView(object)) {
+ // BufferSource/ArrayBufferView
+
+ // Set source to a copy of the bytes held by object.
+ source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength))
+ } else if (util.isFormDataLike(object)) {
+ const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}`
+ const prefix = `--${boundary}\r\nContent-Disposition: form-data`
+
+ /*! formdata-polyfill. MIT License. Jimmy Wärting */
+ const escape = (str) =>
+ str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22')
+ const normalizeLinefeeds = (value) => value.replace(/\r?\n|\r/g, '\r\n')
+
+ // Set action to this step: run the multipart/form-data
+ // encoding algorithm, with object’s entry list and UTF-8.
+ // - This ensures that the body is immutable and can't be changed afterwords
+ // - That the content-length is calculated in advance.
+ // - And that all parts are pre-encoded and ready to be sent.
+
+ const blobParts = []
+ const rn = new Uint8Array([13, 10]) // '\r\n'
+ length = 0
+ let hasUnknownSizeValue = false
+
+ for (const [name, value] of object) {
+ if (typeof value === 'string') {
+ const chunk = textEncoder.encode(prefix +
+ `; name="${escape(normalizeLinefeeds(name))}"` +
+ `\r\n\r\n${normalizeLinefeeds(value)}\r\n`)
+ blobParts.push(chunk)
+ length += chunk.byteLength
+ } else {
+ const chunk = textEncoder.encode(`${prefix}; name="${escape(normalizeLinefeeds(name))}"` +
+ (value.name ? `; filename="${escape(value.name)}"` : '') + '\r\n' +
+ `Content-Type: ${
+ value.type || 'application/octet-stream'
+ }\r\n\r\n`)
+ blobParts.push(chunk, value, rn)
+ if (typeof value.size === 'number') {
+ length += chunk.byteLength + value.size + rn.byteLength
+ } else {
+ hasUnknownSizeValue = true
+ }
+ }
+ }
+
+ const chunk = textEncoder.encode(`--${boundary}--`)
+ blobParts.push(chunk)
+ length += chunk.byteLength
+ if (hasUnknownSizeValue) {
+ length = null
+ }
+
+ // Set source to object.
+ source = object
+
+ action = async function * () {
+ for (const part of blobParts) {
+ if (part.stream) {
+ yield * part.stream()
+ } else {
+ yield part
+ }
+ }
+ }
+
+ // Set type to `multipart/form-data; boundary=`,
+ // followed by the multipart/form-data boundary string generated
+ // by the multipart/form-data encoding algorithm.
+ type = 'multipart/form-data; boundary=' + boundary
+ } else if (isBlobLike(object)) {
+ // Blob
+
+ // Set source to object.
+ source = object
+
+ // Set length to object’s size.
+ length = object.size
+
+ // If object’s type attribute is not the empty byte sequence, set
+ // type to its value.
+ if (object.type) {
+ type = object.type
+ }
+ } else if (typeof object[Symbol.asyncIterator] === 'function') {
+ // If keepalive is true, then throw a TypeError.
+ if (keepalive) {
+ throw new TypeError('keepalive')
+ }
+
+ // If object is disturbed or locked, then throw a TypeError.
+ if (util.isDisturbed(object) || object.locked) {
+ throw new TypeError(
+ 'Response body object should not be disturbed or locked'
+ )
+ }
+
+ stream =
+ object instanceof ReadableStream ? object : ReadableStreamFrom(object)
+ }
+
+ // 11. If source is a byte sequence, then set action to a
+ // step that returns source and length to source’s length.
+ if (typeof source === 'string' || util.isBuffer(source)) {
+ length = Buffer.byteLength(source)
+ }
+
+ // 12. If action is non-null, then run these steps in in parallel:
+ if (action != null) {
+ // Run action.
+ let iterator
+ stream = new ReadableStream({
+ async start () {
+ iterator = action(object)[Symbol.asyncIterator]()
+ },
+ async pull (controller) {
+ const { value, done } = await iterator.next()
+ if (done) {
+ // When running action is done, close stream.
+ queueMicrotask(() => {
+ controller.close()
+ })
+ } else {
+ // Whenever one or more bytes are available and stream is not errored,
+ // enqueue a Uint8Array wrapping an ArrayBuffer containing the available
+ // bytes into stream.
+ if (!isErrored(stream)) {
+ controller.enqueue(new Uint8Array(value))
+ }
+ }
+ return controller.desiredSize > 0
+ },
+ async cancel (reason) {
+ await iterator.return()
+ },
+ type: undefined
+ })
+ }
+
+ // 13. Let body be a body whose stream is stream, source is source,
+ // and length is length.
+ const body = { stream, source, length }
+
+ // 14. Return (body, type).
+ return [body, type]
+}
+
+// https://fetch.spec.whatwg.org/#bodyinit-safely-extract
+function safelyExtractBody (object, keepalive = false) {
+ if (!ReadableStream) {
+ // istanbul ignore next
+ ReadableStream = (__nccwpck_require__(5356).ReadableStream)
+ }
+
+ // To safely extract a body and a `Content-Type` value from
+ // a byte sequence or BodyInit object object, run these steps:
+
+ // 1. If object is a ReadableStream object, then:
+ if (object instanceof ReadableStream) {
+ // Assert: object is neither disturbed nor locked.
+ // istanbul ignore next
+ assert(!util.isDisturbed(object), 'The body has already been consumed.')
+ // istanbul ignore next
+ assert(!object.locked, 'The stream is locked.')
+ }
+
+ // 2. Return the results of extracting object.
+ return extractBody(object, keepalive)
+}
+
+function cloneBody (body) {
+ // To clone a body body, run these steps:
+
+ // https://fetch.spec.whatwg.org/#concept-body-clone
+
+ // 1. Let « out1, out2 » be the result of teeing body’s stream.
+ const [out1, out2] = body.stream.tee()
+ const out2Clone = structuredClone(out2, { transfer: [out2] })
+ // This, for whatever reasons, unrefs out2Clone which allows
+ // the process to exit by itself.
+ const [, finalClone] = out2Clone.tee()
+
+ // 2. Set body’s stream to out1.
+ body.stream = out1
+
+ // 3. Return a body whose stream is out2 and other members are copied from body.
+ return {
+ stream: finalClone,
+ length: body.length,
+ source: body.source
+ }
+}
+
+async function * consumeBody (body) {
+ if (body) {
+ if (isUint8Array(body)) {
+ yield body
+ } else {
+ const stream = body.stream
+
+ if (util.isDisturbed(stream)) {
+ throw new TypeError('The body has already been consumed.')
+ }
+
+ if (stream.locked) {
+ throw new TypeError('The stream is locked.')
+ }
+
+ // Compat.
+ stream[kBodyUsed] = true
+
+ yield * stream
+ }
+ }
+}
+
+function throwIfAborted (state) {
+ if (state.aborted) {
+ throw new DOMException('The operation was aborted.', 'AbortError')
+ }
+}
+
+function bodyMixinMethods (instance) {
+ const methods = {
+ blob () {
+ // The blob() method steps are to return the result of
+ // running consume body with this and the following step
+ // given a byte sequence bytes: return a Blob whose
+ // contents are bytes and whose type attribute is this’s
+ // MIME type.
+ return specConsumeBody(this, (bytes) => {
+ let mimeType = bodyMimeType(this)
+
+ if (mimeType === 'failure') {
+ mimeType = ''
+ } else if (mimeType) {
+ mimeType = serializeAMimeType(mimeType)
+ }
+
+ // Return a Blob whose contents are bytes and type attribute
+ // is mimeType.
+ return new Blob([bytes], { type: mimeType })
+ }, instance)
+ },
+
+ arrayBuffer () {
+ // The arrayBuffer() method steps are to return the result
+ // of running consume body with this and the following step
+ // given a byte sequence bytes: return a new ArrayBuffer
+ // whose contents are bytes.
+ return specConsumeBody(this, (bytes) => {
+ return new Uint8Array(bytes).buffer
+ }, instance)
+ },
+
+ text () {
+ // The text() method steps are to return the result of running
+ // consume body with this and UTF-8 decode.
+ return specConsumeBody(this, utf8DecodeBytes, instance)
+ },
+
+ json () {
+ // The json() method steps are to return the result of running
+ // consume body with this and parse JSON from bytes.
+ return specConsumeBody(this, parseJSONFromBytes, instance)
+ },
+
+ async formData () {
+ webidl.brandCheck(this, instance)
+
+ throwIfAborted(this[kState])
+
+ const contentType = this.headers.get('Content-Type')
+
+ // If mimeType’s essence is "multipart/form-data", then:
+ if (/multipart\/form-data/.test(contentType)) {
+ const headers = {}
+ for (const [key, value] of this.headers) headers[key.toLowerCase()] = value
+
+ const responseFormData = new FormData()
+
+ let busboy
+
+ try {
+ busboy = new Busboy({
+ headers,
+ preservePath: true
+ })
+ } catch (err) {
+ throw new DOMException(`${err}`, 'AbortError')
+ }
+
+ busboy.on('field', (name, value) => {
+ responseFormData.append(name, value)
+ })
+ busboy.on('file', (name, value, filename, encoding, mimeType) => {
+ const chunks = []
+
+ if (encoding === 'base64' || encoding.toLowerCase() === 'base64') {
+ let base64chunk = ''
+
+ value.on('data', (chunk) => {
+ base64chunk += chunk.toString().replace(/[\r\n]/gm, '')
+
+ const end = base64chunk.length - base64chunk.length % 4
+ chunks.push(Buffer.from(base64chunk.slice(0, end), 'base64'))
+
+ base64chunk = base64chunk.slice(end)
+ })
+ value.on('end', () => {
+ chunks.push(Buffer.from(base64chunk, 'base64'))
+ responseFormData.append(name, new File(chunks, filename, { type: mimeType }))
+ })
+ } else {
+ value.on('data', (chunk) => {
+ chunks.push(chunk)
+ })
+ value.on('end', () => {
+ responseFormData.append(name, new File(chunks, filename, { type: mimeType }))
+ })
+ }
+ })
+
+ const busboyResolve = new Promise((resolve, reject) => {
+ busboy.on('finish', resolve)
+ busboy.on('error', (err) => reject(new TypeError(err)))
+ })
+
+ if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk)
+ busboy.end()
+ await busboyResolve
+
+ return responseFormData
+ } else if (/application\/x-www-form-urlencoded/.test(contentType)) {
+ // Otherwise, if mimeType’s essence is "application/x-www-form-urlencoded", then:
+
+ // 1. Let entries be the result of parsing bytes.
+ let entries
+ try {
+ let text = ''
+ // application/x-www-form-urlencoded parser will keep the BOM.
+ // https://url.spec.whatwg.org/#concept-urlencoded-parser
+ // Note that streaming decoder is stateful and cannot be reused
+ const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true })
+
+ for await (const chunk of consumeBody(this[kState].body)) {
+ if (!isUint8Array(chunk)) {
+ throw new TypeError('Expected Uint8Array chunk')
+ }
+ text += streamingDecoder.decode(chunk, { stream: true })
+ }
+ text += streamingDecoder.decode()
+ entries = new URLSearchParams(text)
+ } catch (err) {
+ // istanbul ignore next: Unclear when new URLSearchParams can fail on a string.
+ // 2. If entries is failure, then throw a TypeError.
+ throw Object.assign(new TypeError(), { cause: err })
+ }
+
+ // 3. Return a new FormData object whose entries are entries.
+ const formData = new FormData()
+ for (const [name, value] of entries) {
+ formData.append(name, value)
+ }
+ return formData
+ } else {
+ // Wait a tick before checking if the request has been aborted.
+ // Otherwise, a TypeError can be thrown when an AbortError should.
+ await Promise.resolve()
+
+ throwIfAborted(this[kState])
+
+ // Otherwise, throw a TypeError.
+ throw webidl.errors.exception({
+ header: `${instance.name}.formData`,
+ message: 'Could not parse content as FormData.'
+ })
+ }
+ }
+ }
+
+ return methods
+}
+
+function mixinBody (prototype) {
+ Object.assign(prototype.prototype, bodyMixinMethods(prototype))
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#concept-body-consume-body
+ * @param {Response|Request} object
+ * @param {(value: unknown) => unknown} convertBytesToJSValue
+ * @param {Response|Request} instance
+ */
+async function specConsumeBody (object, convertBytesToJSValue, instance) {
+ webidl.brandCheck(object, instance)
+
+ throwIfAborted(object[kState])
+
+ // 1. If object is unusable, then return a promise rejected
+ // with a TypeError.
+ if (bodyUnusable(object[kState].body)) {
+ throw new TypeError('Body is unusable')
+ }
+
+ // 2. Let promise be a new promise.
+ const promise = createDeferredPromise()
+
+ // 3. Let errorSteps given error be to reject promise with error.
+ const errorSteps = (error) => promise.reject(error)
+
+ // 4. Let successSteps given a byte sequence data be to resolve
+ // promise with the result of running convertBytesToJSValue
+ // with data. If that threw an exception, then run errorSteps
+ // with that exception.
+ const successSteps = (data) => {
+ try {
+ promise.resolve(convertBytesToJSValue(data))
+ } catch (e) {
+ errorSteps(e)
+ }
+ }
+
+ // 5. If object’s body is null, then run successSteps with an
+ // empty byte sequence.
+ if (object[kState].body == null) {
+ successSteps(new Uint8Array())
+ return promise.promise
+ }
+
+ // 6. Otherwise, fully read object’s body given successSteps,
+ // errorSteps, and object’s relevant global object.
+ await fullyReadBody(object[kState].body, successSteps, errorSteps)
+
+ // 7. Return promise.
+ return promise.promise
+}
+
+// https://fetch.spec.whatwg.org/#body-unusable
+function bodyUnusable (body) {
+ // An object including the Body interface mixin is
+ // said to be unusable if its body is non-null and
+ // its body’s stream is disturbed or locked.
+ return body != null && (body.stream.locked || util.isDisturbed(body.stream))
+}
+
+/**
+ * @see https://encoding.spec.whatwg.org/#utf-8-decode
+ * @param {Buffer} buffer
+ */
+function utf8DecodeBytes (buffer) {
+ if (buffer.length === 0) {
+ return ''
+ }
+
+ // 1. Let buffer be the result of peeking three bytes from
+ // ioQueue, converted to a byte sequence.
+
+ // 2. If buffer is 0xEF 0xBB 0xBF, then read three
+ // bytes from ioQueue. (Do nothing with those bytes.)
+ if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) {
+ buffer = buffer.subarray(3)
+ }
+
+ // 3. Process a queue with an instance of UTF-8’s
+ // decoder, ioQueue, output, and "replacement".
+ const output = textDecoder.decode(buffer)
+
+ // 4. Return output.
+ return output
+}
+
+/**
+ * @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value
+ * @param {Uint8Array} bytes
+ */
+function parseJSONFromBytes (bytes) {
+ return JSON.parse(utf8DecodeBytes(bytes))
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#concept-body-mime-type
+ * @param {import('./response').Response|import('./request').Request} object
+ */
+function bodyMimeType (object) {
+ const { headersList } = object[kState]
+ const contentType = headersList.get('content-type')
+
+ if (contentType === null) {
+ return 'failure'
+ }
+
+ return parseMIMEType(contentType)
+}
+
+module.exports = {
+ extractBody,
+ safelyExtractBody,
+ cloneBody,
+ mixinBody
+}
+
+
+/***/ }),
+
+/***/ 1037:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { MessageChannel, receiveMessageOnPort } = __nccwpck_require__(1267)
+
+const corsSafeListedMethods = ['GET', 'HEAD', 'POST']
+const corsSafeListedMethodsSet = new Set(corsSafeListedMethods)
+
+const nullBodyStatus = [101, 204, 205, 304]
+
+const redirectStatus = [301, 302, 303, 307, 308]
+const redirectStatusSet = new Set(redirectStatus)
+
+// https://fetch.spec.whatwg.org/#block-bad-port
+const badPorts = [
+ '1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79',
+ '87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137',
+ '139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532',
+ '540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723',
+ '2049', '3659', '4045', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6697',
+ '10080'
+]
+
+const badPortsSet = new Set(badPorts)
+
+// https://w3c.github.io/webappsec-referrer-policy/#referrer-policies
+const referrerPolicy = [
+ '',
+ 'no-referrer',
+ 'no-referrer-when-downgrade',
+ 'same-origin',
+ 'origin',
+ 'strict-origin',
+ 'origin-when-cross-origin',
+ 'strict-origin-when-cross-origin',
+ 'unsafe-url'
+]
+const referrerPolicySet = new Set(referrerPolicy)
+
+const requestRedirect = ['follow', 'manual', 'error']
+
+const safeMethods = ['GET', 'HEAD', 'OPTIONS', 'TRACE']
+const safeMethodsSet = new Set(safeMethods)
+
+const requestMode = ['navigate', 'same-origin', 'no-cors', 'cors']
+
+const requestCredentials = ['omit', 'same-origin', 'include']
+
+const requestCache = [
+ 'default',
+ 'no-store',
+ 'reload',
+ 'no-cache',
+ 'force-cache',
+ 'only-if-cached'
+]
+
+// https://fetch.spec.whatwg.org/#request-body-header-name
+const requestBodyHeader = [
+ 'content-encoding',
+ 'content-language',
+ 'content-location',
+ 'content-type',
+ // See https://github.com/nodejs/undici/issues/2021
+ // 'Content-Length' is a forbidden header name, which is typically
+ // removed in the Headers implementation. However, undici doesn't
+ // filter out headers, so we add it here.
+ 'content-length'
+]
+
+// https://fetch.spec.whatwg.org/#enumdef-requestduplex
+const requestDuplex = [
+ 'half'
+]
+
+// http://fetch.spec.whatwg.org/#forbidden-method
+const forbiddenMethods = ['CONNECT', 'TRACE', 'TRACK']
+const forbiddenMethodsSet = new Set(forbiddenMethods)
+
+const subresource = [
+ 'audio',
+ 'audioworklet',
+ 'font',
+ 'image',
+ 'manifest',
+ 'paintworklet',
+ 'script',
+ 'style',
+ 'track',
+ 'video',
+ 'xslt',
+ ''
+]
+const subresourceSet = new Set(subresource)
+
+/** @type {globalThis['DOMException']} */
+const DOMException = globalThis.DOMException ?? (() => {
+ // DOMException was only made a global in Node v17.0.0,
+ // but fetch supports >= v16.8.
+ try {
+ atob('~')
+ } catch (err) {
+ return Object.getPrototypeOf(err).constructor
+ }
+})()
+
+let channel
+
+/** @type {globalThis['structuredClone']} */
+const structuredClone =
+ globalThis.structuredClone ??
+ // https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js
+ // structuredClone was added in v17.0.0, but fetch supports v16.8
+ function structuredClone (value, options = undefined) {
+ if (arguments.length === 0) {
+ throw new TypeError('missing argument')
+ }
+
+ if (!channel) {
+ channel = new MessageChannel()
+ }
+ channel.port1.unref()
+ channel.port2.unref()
+ channel.port1.postMessage(value, options?.transfer)
+ return receiveMessageOnPort(channel.port2).message
+ }
+
+module.exports = {
+ DOMException,
+ structuredClone,
+ subresource,
+ forbiddenMethods,
+ requestBodyHeader,
+ referrerPolicy,
+ requestRedirect,
+ requestMode,
+ requestCredentials,
+ requestCache,
+ redirectStatus,
+ corsSafeListedMethods,
+ nullBodyStatus,
+ safeMethods,
+ badPorts,
+ requestDuplex,
+ subresourceSet,
+ badPortsSet,
+ redirectStatusSet,
+ corsSafeListedMethodsSet,
+ safeMethodsSet,
+ forbiddenMethodsSet,
+ referrerPolicySet
+}
+
+
+/***/ }),
+
+/***/ 685:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+const assert = __nccwpck_require__(9491)
+const { atob } = __nccwpck_require__(4300)
+const { isomorphicDecode } = __nccwpck_require__(2538)
+
+const encoder = new TextEncoder()
+
+/**
+ * @see https://mimesniff.spec.whatwg.org/#http-token-code-point
+ */
+const HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/
+const HTTP_WHITESPACE_REGEX = /(\u000A|\u000D|\u0009|\u0020)/ // eslint-disable-line
+/**
+ * @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point
+ */
+const HTTP_QUOTED_STRING_TOKENS = /[\u0009|\u0020-\u007E|\u0080-\u00FF]/ // eslint-disable-line
+
+// https://fetch.spec.whatwg.org/#data-url-processor
+/** @param {URL} dataURL */
+function dataURLProcessor (dataURL) {
+ // 1. Assert: dataURL’s scheme is "data".
+ assert(dataURL.protocol === 'data:')
+
+ // 2. Let input be the result of running the URL
+ // serializer on dataURL with exclude fragment
+ // set to true.
+ let input = URLSerializer(dataURL, true)
+
+ // 3. Remove the leading "data:" string from input.
+ input = input.slice(5)
+
+ // 4. Let position point at the start of input.
+ const position = { position: 0 }
+
+ // 5. Let mimeType be the result of collecting a
+ // sequence of code points that are not equal
+ // to U+002C (,), given position.
+ let mimeType = collectASequenceOfCodePointsFast(
+ ',',
+ input,
+ position
+ )
+
+ // 6. Strip leading and trailing ASCII whitespace
+ // from mimeType.
+ // Undici implementation note: we need to store the
+ // length because if the mimetype has spaces removed,
+ // the wrong amount will be sliced from the input in
+ // step #9
+ const mimeTypeLength = mimeType.length
+ mimeType = removeASCIIWhitespace(mimeType, true, true)
+
+ // 7. If position is past the end of input, then
+ // return failure
+ if (position.position >= input.length) {
+ return 'failure'
+ }
+
+ // 8. Advance position by 1.
+ position.position++
+
+ // 9. Let encodedBody be the remainder of input.
+ const encodedBody = input.slice(mimeTypeLength + 1)
+
+ // 10. Let body be the percent-decoding of encodedBody.
+ let body = stringPercentDecode(encodedBody)
+
+ // 11. If mimeType ends with U+003B (;), followed by
+ // zero or more U+0020 SPACE, followed by an ASCII
+ // case-insensitive match for "base64", then:
+ if (/;(\u0020){0,}base64$/i.test(mimeType)) {
+ // 1. Let stringBody be the isomorphic decode of body.
+ const stringBody = isomorphicDecode(body)
+
+ // 2. Set body to the forgiving-base64 decode of
+ // stringBody.
+ body = forgivingBase64(stringBody)
+
+ // 3. If body is failure, then return failure.
+ if (body === 'failure') {
+ return 'failure'
+ }
+
+ // 4. Remove the last 6 code points from mimeType.
+ mimeType = mimeType.slice(0, -6)
+
+ // 5. Remove trailing U+0020 SPACE code points from mimeType,
+ // if any.
+ mimeType = mimeType.replace(/(\u0020)+$/, '')
+
+ // 6. Remove the last U+003B (;) code point from mimeType.
+ mimeType = mimeType.slice(0, -1)
+ }
+
+ // 12. If mimeType starts with U+003B (;), then prepend
+ // "text/plain" to mimeType.
+ if (mimeType.startsWith(';')) {
+ mimeType = 'text/plain' + mimeType
+ }
+
+ // 13. Let mimeTypeRecord be the result of parsing
+ // mimeType.
+ let mimeTypeRecord = parseMIMEType(mimeType)
+
+ // 14. If mimeTypeRecord is failure, then set
+ // mimeTypeRecord to text/plain;charset=US-ASCII.
+ if (mimeTypeRecord === 'failure') {
+ mimeTypeRecord = parseMIMEType('text/plain;charset=US-ASCII')
+ }
+
+ // 15. Return a new data: URL struct whose MIME
+ // type is mimeTypeRecord and body is body.
+ // https://fetch.spec.whatwg.org/#data-url-struct
+ return { mimeType: mimeTypeRecord, body }
+}
+
+// https://url.spec.whatwg.org/#concept-url-serializer
+/**
+ * @param {URL} url
+ * @param {boolean} excludeFragment
+ */
+function URLSerializer (url, excludeFragment = false) {
+ if (!excludeFragment) {
+ return url.href
+ }
+
+ const href = url.href
+ const hashLength = url.hash.length
+
+ return hashLength === 0 ? href : href.substring(0, href.length - hashLength)
+}
+
+// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points
+/**
+ * @param {(char: string) => boolean} condition
+ * @param {string} input
+ * @param {{ position: number }} position
+ */
+function collectASequenceOfCodePoints (condition, input, position) {
+ // 1. Let result be the empty string.
+ let result = ''
+
+ // 2. While position doesn’t point past the end of input and the
+ // code point at position within input meets the condition condition:
+ while (position.position < input.length && condition(input[position.position])) {
+ // 1. Append that code point to the end of result.
+ result += input[position.position]
+
+ // 2. Advance position by 1.
+ position.position++
+ }
+
+ // 3. Return result.
+ return result
+}
+
+/**
+ * A faster collectASequenceOfCodePoints that only works when comparing a single character.
+ * @param {string} char
+ * @param {string} input
+ * @param {{ position: number }} position
+ */
+function collectASequenceOfCodePointsFast (char, input, position) {
+ const idx = input.indexOf(char, position.position)
+ const start = position.position
+
+ if (idx === -1) {
+ position.position = input.length
+ return input.slice(start)
+ }
+
+ position.position = idx
+ return input.slice(start, position.position)
+}
+
+// https://url.spec.whatwg.org/#string-percent-decode
+/** @param {string} input */
+function stringPercentDecode (input) {
+ // 1. Let bytes be the UTF-8 encoding of input.
+ const bytes = encoder.encode(input)
+
+ // 2. Return the percent-decoding of bytes.
+ return percentDecode(bytes)
+}
+
+// https://url.spec.whatwg.org/#percent-decode
+/** @param {Uint8Array} input */
+function percentDecode (input) {
+ // 1. Let output be an empty byte sequence.
+ /** @type {number[]} */
+ const output = []
+
+ // 2. For each byte byte in input:
+ for (let i = 0; i < input.length; i++) {
+ const byte = input[i]
+
+ // 1. If byte is not 0x25 (%), then append byte to output.
+ if (byte !== 0x25) {
+ output.push(byte)
+
+ // 2. Otherwise, if byte is 0x25 (%) and the next two bytes
+ // after byte in input are not in the ranges
+ // 0x30 (0) to 0x39 (9), 0x41 (A) to 0x46 (F),
+ // and 0x61 (a) to 0x66 (f), all inclusive, append byte
+ // to output.
+ } else if (
+ byte === 0x25 &&
+ !/^[0-9A-Fa-f]{2}$/i.test(String.fromCharCode(input[i + 1], input[i + 2]))
+ ) {
+ output.push(0x25)
+
+ // 3. Otherwise:
+ } else {
+ // 1. Let bytePoint be the two bytes after byte in input,
+ // decoded, and then interpreted as hexadecimal number.
+ const nextTwoBytes = String.fromCharCode(input[i + 1], input[i + 2])
+ const bytePoint = Number.parseInt(nextTwoBytes, 16)
+
+ // 2. Append a byte whose value is bytePoint to output.
+ output.push(bytePoint)
+
+ // 3. Skip the next two bytes in input.
+ i += 2
+ }
+ }
+
+ // 3. Return output.
+ return Uint8Array.from(output)
+}
+
+// https://mimesniff.spec.whatwg.org/#parse-a-mime-type
+/** @param {string} input */
+function parseMIMEType (input) {
+ // 1. Remove any leading and trailing HTTP whitespace
+ // from input.
+ input = removeHTTPWhitespace(input, true, true)
+
+ // 2. Let position be a position variable for input,
+ // initially pointing at the start of input.
+ const position = { position: 0 }
+
+ // 3. Let type be the result of collecting a sequence
+ // of code points that are not U+002F (/) from
+ // input, given position.
+ const type = collectASequenceOfCodePointsFast(
+ '/',
+ input,
+ position
+ )
+
+ // 4. If type is the empty string or does not solely
+ // contain HTTP token code points, then return failure.
+ // https://mimesniff.spec.whatwg.org/#http-token-code-point
+ if (type.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(type)) {
+ return 'failure'
+ }
+
+ // 5. If position is past the end of input, then return
+ // failure
+ if (position.position > input.length) {
+ return 'failure'
+ }
+
+ // 6. Advance position by 1. (This skips past U+002F (/).)
+ position.position++
+
+ // 7. Let subtype be the result of collecting a sequence of
+ // code points that are not U+003B (;) from input, given
+ // position.
+ let subtype = collectASequenceOfCodePointsFast(
+ ';',
+ input,
+ position
+ )
+
+ // 8. Remove any trailing HTTP whitespace from subtype.
+ subtype = removeHTTPWhitespace(subtype, false, true)
+
+ // 9. If subtype is the empty string or does not solely
+ // contain HTTP token code points, then return failure.
+ if (subtype.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(subtype)) {
+ return 'failure'
+ }
+
+ const typeLowercase = type.toLowerCase()
+ const subtypeLowercase = subtype.toLowerCase()
+
+ // 10. Let mimeType be a new MIME type record whose type
+ // is type, in ASCII lowercase, and subtype is subtype,
+ // in ASCII lowercase.
+ // https://mimesniff.spec.whatwg.org/#mime-type
+ const mimeType = {
+ type: typeLowercase,
+ subtype: subtypeLowercase,
+ /** @type {Map} */
+ parameters: new Map(),
+ // https://mimesniff.spec.whatwg.org/#mime-type-essence
+ essence: `${typeLowercase}/${subtypeLowercase}`
+ }
+
+ // 11. While position is not past the end of input:
+ while (position.position < input.length) {
+ // 1. Advance position by 1. (This skips past U+003B (;).)
+ position.position++
+
+ // 2. Collect a sequence of code points that are HTTP
+ // whitespace from input given position.
+ collectASequenceOfCodePoints(
+ // https://fetch.spec.whatwg.org/#http-whitespace
+ char => HTTP_WHITESPACE_REGEX.test(char),
+ input,
+ position
+ )
+
+ // 3. Let parameterName be the result of collecting a
+ // sequence of code points that are not U+003B (;)
+ // or U+003D (=) from input, given position.
+ let parameterName = collectASequenceOfCodePoints(
+ (char) => char !== ';' && char !== '=',
+ input,
+ position
+ )
+
+ // 4. Set parameterName to parameterName, in ASCII
+ // lowercase.
+ parameterName = parameterName.toLowerCase()
+
+ // 5. If position is not past the end of input, then:
+ if (position.position < input.length) {
+ // 1. If the code point at position within input is
+ // U+003B (;), then continue.
+ if (input[position.position] === ';') {
+ continue
+ }
+
+ // 2. Advance position by 1. (This skips past U+003D (=).)
+ position.position++
+ }
+
+ // 6. If position is past the end of input, then break.
+ if (position.position > input.length) {
+ break
+ }
+
+ // 7. Let parameterValue be null.
+ let parameterValue = null
+
+ // 8. If the code point at position within input is
+ // U+0022 ("), then:
+ if (input[position.position] === '"') {
+ // 1. Set parameterValue to the result of collecting
+ // an HTTP quoted string from input, given position
+ // and the extract-value flag.
+ parameterValue = collectAnHTTPQuotedString(input, position, true)
+
+ // 2. Collect a sequence of code points that are not
+ // U+003B (;) from input, given position.
+ collectASequenceOfCodePointsFast(
+ ';',
+ input,
+ position
+ )
+
+ // 9. Otherwise:
+ } else {
+ // 1. Set parameterValue to the result of collecting
+ // a sequence of code points that are not U+003B (;)
+ // from input, given position.
+ parameterValue = collectASequenceOfCodePointsFast(
+ ';',
+ input,
+ position
+ )
+
+ // 2. Remove any trailing HTTP whitespace from parameterValue.
+ parameterValue = removeHTTPWhitespace(parameterValue, false, true)
+
+ // 3. If parameterValue is the empty string, then continue.
+ if (parameterValue.length === 0) {
+ continue
+ }
+ }
+
+ // 10. If all of the following are true
+ // - parameterName is not the empty string
+ // - parameterName solely contains HTTP token code points
+ // - parameterValue solely contains HTTP quoted-string token code points
+ // - mimeType’s parameters[parameterName] does not exist
+ // then set mimeType’s parameters[parameterName] to parameterValue.
+ if (
+ parameterName.length !== 0 &&
+ HTTP_TOKEN_CODEPOINTS.test(parameterName) &&
+ (parameterValue.length === 0 || HTTP_QUOTED_STRING_TOKENS.test(parameterValue)) &&
+ !mimeType.parameters.has(parameterName)
+ ) {
+ mimeType.parameters.set(parameterName, parameterValue)
+ }
+ }
+
+ // 12. Return mimeType.
+ return mimeType
+}
+
+// https://infra.spec.whatwg.org/#forgiving-base64-decode
+/** @param {string} data */
+function forgivingBase64 (data) {
+ // 1. Remove all ASCII whitespace from data.
+ data = data.replace(/[\u0009\u000A\u000C\u000D\u0020]/g, '') // eslint-disable-line
+
+ // 2. If data’s code point length divides by 4 leaving
+ // no remainder, then:
+ if (data.length % 4 === 0) {
+ // 1. If data ends with one or two U+003D (=) code points,
+ // then remove them from data.
+ data = data.replace(/=?=$/, '')
+ }
+
+ // 3. If data’s code point length divides by 4 leaving
+ // a remainder of 1, then return failure.
+ if (data.length % 4 === 1) {
+ return 'failure'
+ }
+
+ // 4. If data contains a code point that is not one of
+ // U+002B (+)
+ // U+002F (/)
+ // ASCII alphanumeric
+ // then return failure.
+ if (/[^+/0-9A-Za-z]/.test(data)) {
+ return 'failure'
+ }
+
+ const binary = atob(data)
+ const bytes = new Uint8Array(binary.length)
+
+ for (let byte = 0; byte < binary.length; byte++) {
+ bytes[byte] = binary.charCodeAt(byte)
+ }
+
+ return bytes
+}
+
+// https://fetch.spec.whatwg.org/#collect-an-http-quoted-string
+// tests: https://fetch.spec.whatwg.org/#example-http-quoted-string
+/**
+ * @param {string} input
+ * @param {{ position: number }} position
+ * @param {boolean?} extractValue
+ */
+function collectAnHTTPQuotedString (input, position, extractValue) {
+ // 1. Let positionStart be position.
+ const positionStart = position.position
+
+ // 2. Let value be the empty string.
+ let value = ''
+
+ // 3. Assert: the code point at position within input
+ // is U+0022 (").
+ assert(input[position.position] === '"')
+
+ // 4. Advance position by 1.
+ position.position++
+
+ // 5. While true:
+ while (true) {
+ // 1. Append the result of collecting a sequence of code points
+ // that are not U+0022 (") or U+005C (\) from input, given
+ // position, to value.
+ value += collectASequenceOfCodePoints(
+ (char) => char !== '"' && char !== '\\',
+ input,
+ position
+ )
+
+ // 2. If position is past the end of input, then break.
+ if (position.position >= input.length) {
+ break
+ }
+
+ // 3. Let quoteOrBackslash be the code point at position within
+ // input.
+ const quoteOrBackslash = input[position.position]
+
+ // 4. Advance position by 1.
+ position.position++
+
+ // 5. If quoteOrBackslash is U+005C (\), then:
+ if (quoteOrBackslash === '\\') {
+ // 1. If position is past the end of input, then append
+ // U+005C (\) to value and break.
+ if (position.position >= input.length) {
+ value += '\\'
+ break
+ }
+
+ // 2. Append the code point at position within input to value.
+ value += input[position.position]
+
+ // 3. Advance position by 1.
+ position.position++
+
+ // 6. Otherwise:
+ } else {
+ // 1. Assert: quoteOrBackslash is U+0022 (").
+ assert(quoteOrBackslash === '"')
+
+ // 2. Break.
+ break
+ }
+ }
+
+ // 6. If the extract-value flag is set, then return value.
+ if (extractValue) {
+ return value
+ }
+
+ // 7. Return the code points from positionStart to position,
+ // inclusive, within input.
+ return input.slice(positionStart, position.position)
+}
+
+/**
+ * @see https://mimesniff.spec.whatwg.org/#serialize-a-mime-type
+ */
+function serializeAMimeType (mimeType) {
+ assert(mimeType !== 'failure')
+ const { parameters, essence } = mimeType
+
+ // 1. Let serialization be the concatenation of mimeType’s
+ // type, U+002F (/), and mimeType’s subtype.
+ let serialization = essence
+
+ // 2. For each name → value of mimeType’s parameters:
+ for (let [name, value] of parameters.entries()) {
+ // 1. Append U+003B (;) to serialization.
+ serialization += ';'
+
+ // 2. Append name to serialization.
+ serialization += name
+
+ // 3. Append U+003D (=) to serialization.
+ serialization += '='
+
+ // 4. If value does not solely contain HTTP token code
+ // points or value is the empty string, then:
+ if (!HTTP_TOKEN_CODEPOINTS.test(value)) {
+ // 1. Precede each occurence of U+0022 (") or
+ // U+005C (\) in value with U+005C (\).
+ value = value.replace(/(\\|")/g, '\\$1')
+
+ // 2. Prepend U+0022 (") to value.
+ value = '"' + value
+
+ // 3. Append U+0022 (") to value.
+ value += '"'
+ }
+
+ // 5. Append value to serialization.
+ serialization += value
+ }
+
+ // 3. Return serialization.
+ return serialization
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#http-whitespace
+ * @param {string} char
+ */
+function isHTTPWhiteSpace (char) {
+ return char === '\r' || char === '\n' || char === '\t' || char === ' '
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#http-whitespace
+ * @param {string} str
+ */
+function removeHTTPWhitespace (str, leading = true, trailing = true) {
+ let lead = 0
+ let trail = str.length - 1
+
+ if (leading) {
+ for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++);
+ }
+
+ if (trailing) {
+ for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--);
+ }
+
+ return str.slice(lead, trail + 1)
+}
+
+/**
+ * @see https://infra.spec.whatwg.org/#ascii-whitespace
+ * @param {string} char
+ */
+function isASCIIWhitespace (char) {
+ return char === '\r' || char === '\n' || char === '\t' || char === '\f' || char === ' '
+}
+
+/**
+ * @see https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace
+ */
+function removeASCIIWhitespace (str, leading = true, trailing = true) {
+ let lead = 0
+ let trail = str.length - 1
+
+ if (leading) {
+ for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++);
+ }
+
+ if (trailing) {
+ for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--);
+ }
+
+ return str.slice(lead, trail + 1)
+}
+
+module.exports = {
+ dataURLProcessor,
+ URLSerializer,
+ collectASequenceOfCodePoints,
+ collectASequenceOfCodePointsFast,
+ stringPercentDecode,
+ parseMIMEType,
+ collectAnHTTPQuotedString,
+ serializeAMimeType
+}
+
+
+/***/ }),
+
+/***/ 8511:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { Blob, File: NativeFile } = __nccwpck_require__(4300)
+const { types } = __nccwpck_require__(3837)
+const { kState } = __nccwpck_require__(5861)
+const { isBlobLike } = __nccwpck_require__(2538)
+const { webidl } = __nccwpck_require__(1744)
+const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685)
+const { kEnumerableProperty } = __nccwpck_require__(3983)
+const encoder = new TextEncoder()
+
+class File extends Blob {
+ constructor (fileBits, fileName, options = {}) {
+ // The File constructor is invoked with two or three parameters, depending
+ // on whether the optional dictionary parameter is used. When the File()
+ // constructor is invoked, user agents must run the following steps:
+ webidl.argumentLengthCheck(arguments, 2, { header: 'File constructor' })
+
+ fileBits = webidl.converters['sequence'](fileBits)
+ fileName = webidl.converters.USVString(fileName)
+ options = webidl.converters.FilePropertyBag(options)
+
+ // 1. Let bytes be the result of processing blob parts given fileBits and
+ // options.
+ // Note: Blob handles this for us
+
+ // 2. Let n be the fileName argument to the constructor.
+ const n = fileName
+
+ // 3. Process FilePropertyBag dictionary argument by running the following
+ // substeps:
+
+ // 1. If the type member is provided and is not the empty string, let t
+ // be set to the type dictionary member. If t contains any characters
+ // outside the range U+0020 to U+007E, then set t to the empty string
+ // and return from these substeps.
+ // 2. Convert every character in t to ASCII lowercase.
+ let t = options.type
+ let d
+
+ // eslint-disable-next-line no-labels
+ substep: {
+ if (t) {
+ t = parseMIMEType(t)
+
+ if (t === 'failure') {
+ t = ''
+ // eslint-disable-next-line no-labels
+ break substep
+ }
+
+ t = serializeAMimeType(t).toLowerCase()
+ }
+
+ // 3. If the lastModified member is provided, let d be set to the
+ // lastModified dictionary member. If it is not provided, set d to the
+ // current date and time represented as the number of milliseconds since
+ // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).
+ d = options.lastModified
+ }
+
+ // 4. Return a new File object F such that:
+ // F refers to the bytes byte sequence.
+ // F.size is set to the number of total bytes in bytes.
+ // F.name is set to n.
+ // F.type is set to t.
+ // F.lastModified is set to d.
+
+ super(processBlobParts(fileBits, options), { type: t })
+ this[kState] = {
+ name: n,
+ lastModified: d,
+ type: t
+ }
+ }
+
+ get name () {
+ webidl.brandCheck(this, File)
+
+ return this[kState].name
+ }
+
+ get lastModified () {
+ webidl.brandCheck(this, File)
+
+ return this[kState].lastModified
+ }
+
+ get type () {
+ webidl.brandCheck(this, File)
+
+ return this[kState].type
+ }
+}
+
+class FileLike {
+ constructor (blobLike, fileName, options = {}) {
+ // TODO: argument idl type check
+
+ // The File constructor is invoked with two or three parameters, depending
+ // on whether the optional dictionary parameter is used. When the File()
+ // constructor is invoked, user agents must run the following steps:
+
+ // 1. Let bytes be the result of processing blob parts given fileBits and
+ // options.
+
+ // 2. Let n be the fileName argument to the constructor.
+ const n = fileName
+
+ // 3. Process FilePropertyBag dictionary argument by running the following
+ // substeps:
+
+ // 1. If the type member is provided and is not the empty string, let t
+ // be set to the type dictionary member. If t contains any characters
+ // outside the range U+0020 to U+007E, then set t to the empty string
+ // and return from these substeps.
+ // TODO
+ const t = options.type
+
+ // 2. Convert every character in t to ASCII lowercase.
+ // TODO
+
+ // 3. If the lastModified member is provided, let d be set to the
+ // lastModified dictionary member. If it is not provided, set d to the
+ // current date and time represented as the number of milliseconds since
+ // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).
+ const d = options.lastModified ?? Date.now()
+
+ // 4. Return a new File object F such that:
+ // F refers to the bytes byte sequence.
+ // F.size is set to the number of total bytes in bytes.
+ // F.name is set to n.
+ // F.type is set to t.
+ // F.lastModified is set to d.
+
+ this[kState] = {
+ blobLike,
+ name: n,
+ type: t,
+ lastModified: d
+ }
+ }
+
+ stream (...args) {
+ webidl.brandCheck(this, FileLike)
+
+ return this[kState].blobLike.stream(...args)
+ }
+
+ arrayBuffer (...args) {
+ webidl.brandCheck(this, FileLike)
+
+ return this[kState].blobLike.arrayBuffer(...args)
+ }
+
+ slice (...args) {
+ webidl.brandCheck(this, FileLike)
+
+ return this[kState].blobLike.slice(...args)
+ }
+
+ text (...args) {
+ webidl.brandCheck(this, FileLike)
+
+ return this[kState].blobLike.text(...args)
+ }
+
+ get size () {
+ webidl.brandCheck(this, FileLike)
+
+ return this[kState].blobLike.size
+ }
+
+ get type () {
+ webidl.brandCheck(this, FileLike)
+
+ return this[kState].blobLike.type
+ }
+
+ get name () {
+ webidl.brandCheck(this, FileLike)
+
+ return this[kState].name
+ }
+
+ get lastModified () {
+ webidl.brandCheck(this, FileLike)
+
+ return this[kState].lastModified
+ }
+
+ get [Symbol.toStringTag] () {
+ return 'File'
+ }
+}
+
+Object.defineProperties(File.prototype, {
+ [Symbol.toStringTag]: {
+ value: 'File',
+ configurable: true
+ },
+ name: kEnumerableProperty,
+ lastModified: kEnumerableProperty
+})
+
+webidl.converters.Blob = webidl.interfaceConverter(Blob)
+
+webidl.converters.BlobPart = function (V, opts) {
+ if (webidl.util.Type(V) === 'Object') {
+ if (isBlobLike(V)) {
+ return webidl.converters.Blob(V, { strict: false })
+ }
+
+ if (
+ ArrayBuffer.isView(V) ||
+ types.isAnyArrayBuffer(V)
+ ) {
+ return webidl.converters.BufferSource(V, opts)
+ }
+ }
+
+ return webidl.converters.USVString(V, opts)
+}
+
+webidl.converters['sequence'] = webidl.sequenceConverter(
+ webidl.converters.BlobPart
+)
+
+// https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag
+webidl.converters.FilePropertyBag = webidl.dictionaryConverter([
+ {
+ key: 'lastModified',
+ converter: webidl.converters['long long'],
+ get defaultValue () {
+ return Date.now()
+ }
+ },
+ {
+ key: 'type',
+ converter: webidl.converters.DOMString,
+ defaultValue: ''
+ },
+ {
+ key: 'endings',
+ converter: (value) => {
+ value = webidl.converters.DOMString(value)
+ value = value.toLowerCase()
+
+ if (value !== 'native') {
+ value = 'transparent'
+ }
+
+ return value
+ },
+ defaultValue: 'transparent'
+ }
+])
+
+/**
+ * @see https://www.w3.org/TR/FileAPI/#process-blob-parts
+ * @param {(NodeJS.TypedArray|Blob|string)[]} parts
+ * @param {{ type: string, endings: string }} options
+ */
+function processBlobParts (parts, options) {
+ // 1. Let bytes be an empty sequence of bytes.
+ /** @type {NodeJS.TypedArray[]} */
+ const bytes = []
+
+ // 2. For each element in parts:
+ for (const element of parts) {
+ // 1. If element is a USVString, run the following substeps:
+ if (typeof element === 'string') {
+ // 1. Let s be element.
+ let s = element
+
+ // 2. If the endings member of options is "native", set s
+ // to the result of converting line endings to native
+ // of element.
+ if (options.endings === 'native') {
+ s = convertLineEndingsNative(s)
+ }
+
+ // 3. Append the result of UTF-8 encoding s to bytes.
+ bytes.push(encoder.encode(s))
+ } else if (
+ types.isAnyArrayBuffer(element) ||
+ types.isTypedArray(element)
+ ) {
+ // 2. If element is a BufferSource, get a copy of the
+ // bytes held by the buffer source, and append those
+ // bytes to bytes.
+ if (!element.buffer) { // ArrayBuffer
+ bytes.push(new Uint8Array(element))
+ } else {
+ bytes.push(
+ new Uint8Array(element.buffer, element.byteOffset, element.byteLength)
+ )
+ }
+ } else if (isBlobLike(element)) {
+ // 3. If element is a Blob, append the bytes it represents
+ // to bytes.
+ bytes.push(element)
+ }
+ }
+
+ // 3. Return bytes.
+ return bytes
+}
+
+/**
+ * @see https://www.w3.org/TR/FileAPI/#convert-line-endings-to-native
+ * @param {string} s
+ */
+function convertLineEndingsNative (s) {
+ // 1. Let native line ending be be the code point U+000A LF.
+ let nativeLineEnding = '\n'
+
+ // 2. If the underlying platform’s conventions are to
+ // represent newlines as a carriage return and line feed
+ // sequence, set native line ending to the code point
+ // U+000D CR followed by the code point U+000A LF.
+ if (process.platform === 'win32') {
+ nativeLineEnding = '\r\n'
+ }
+
+ return s.replace(/\r?\n/g, nativeLineEnding)
+}
+
+// If this function is moved to ./util.js, some tools (such as
+// rollup) will warn about circular dependencies. See:
+// https://github.com/nodejs/undici/issues/1629
+function isFileLike (object) {
+ return (
+ (NativeFile && object instanceof NativeFile) ||
+ object instanceof File || (
+ object &&
+ (typeof object.stream === 'function' ||
+ typeof object.arrayBuffer === 'function') &&
+ object[Symbol.toStringTag] === 'File'
+ )
+ )
+}
+
+module.exports = { File, FileLike, isFileLike }
+
+
+/***/ }),
+
+/***/ 2015:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { isBlobLike, toUSVString, makeIterator } = __nccwpck_require__(2538)
+const { kState } = __nccwpck_require__(5861)
+const { File: UndiciFile, FileLike, isFileLike } = __nccwpck_require__(8511)
+const { webidl } = __nccwpck_require__(1744)
+const { Blob, File: NativeFile } = __nccwpck_require__(4300)
+
+/** @type {globalThis['File']} */
+const File = NativeFile ?? UndiciFile
+
+// https://xhr.spec.whatwg.org/#formdata
+class FormData {
+ constructor (form) {
+ if (form !== undefined) {
+ throw webidl.errors.conversionFailed({
+ prefix: 'FormData constructor',
+ argument: 'Argument 1',
+ types: ['undefined']
+ })
+ }
+
+ this[kState] = []
+ }
+
+ append (name, value, filename = undefined) {
+ webidl.brandCheck(this, FormData)
+
+ webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.append' })
+
+ if (arguments.length === 3 && !isBlobLike(value)) {
+ throw new TypeError(
+ "Failed to execute 'append' on 'FormData': parameter 2 is not of type 'Blob'"
+ )
+ }
+
+ // 1. Let value be value if given; otherwise blobValue.
+
+ name = webidl.converters.USVString(name)
+ value = isBlobLike(value)
+ ? webidl.converters.Blob(value, { strict: false })
+ : webidl.converters.USVString(value)
+ filename = arguments.length === 3
+ ? webidl.converters.USVString(filename)
+ : undefined
+
+ // 2. Let entry be the result of creating an entry with
+ // name, value, and filename if given.
+ const entry = makeEntry(name, value, filename)
+
+ // 3. Append entry to this’s entry list.
+ this[kState].push(entry)
+ }
+
+ delete (name) {
+ webidl.brandCheck(this, FormData)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.delete' })
+
+ name = webidl.converters.USVString(name)
+
+ // The delete(name) method steps are to remove all entries whose name
+ // is name from this’s entry list.
+ this[kState] = this[kState].filter(entry => entry.name !== name)
+ }
+
+ get (name) {
+ webidl.brandCheck(this, FormData)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.get' })
+
+ name = webidl.converters.USVString(name)
+
+ // 1. If there is no entry whose name is name in this’s entry list,
+ // then return null.
+ const idx = this[kState].findIndex((entry) => entry.name === name)
+ if (idx === -1) {
+ return null
+ }
+
+ // 2. Return the value of the first entry whose name is name from
+ // this’s entry list.
+ return this[kState][idx].value
+ }
+
+ getAll (name) {
+ webidl.brandCheck(this, FormData)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.getAll' })
+
+ name = webidl.converters.USVString(name)
+
+ // 1. If there is no entry whose name is name in this’s entry list,
+ // then return the empty list.
+ // 2. Return the values of all entries whose name is name, in order,
+ // from this’s entry list.
+ return this[kState]
+ .filter((entry) => entry.name === name)
+ .map((entry) => entry.value)
+ }
+
+ has (name) {
+ webidl.brandCheck(this, FormData)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.has' })
+
+ name = webidl.converters.USVString(name)
+
+ // The has(name) method steps are to return true if there is an entry
+ // whose name is name in this’s entry list; otherwise false.
+ return this[kState].findIndex((entry) => entry.name === name) !== -1
+ }
+
+ set (name, value, filename = undefined) {
+ webidl.brandCheck(this, FormData)
+
+ webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.set' })
+
+ if (arguments.length === 3 && !isBlobLike(value)) {
+ throw new TypeError(
+ "Failed to execute 'set' on 'FormData': parameter 2 is not of type 'Blob'"
+ )
+ }
+
+ // The set(name, value) and set(name, blobValue, filename) method steps
+ // are:
+
+ // 1. Let value be value if given; otherwise blobValue.
+
+ name = webidl.converters.USVString(name)
+ value = isBlobLike(value)
+ ? webidl.converters.Blob(value, { strict: false })
+ : webidl.converters.USVString(value)
+ filename = arguments.length === 3
+ ? toUSVString(filename)
+ : undefined
+
+ // 2. Let entry be the result of creating an entry with name, value, and
+ // filename if given.
+ const entry = makeEntry(name, value, filename)
+
+ // 3. If there are entries in this’s entry list whose name is name, then
+ // replace the first such entry with entry and remove the others.
+ const idx = this[kState].findIndex((entry) => entry.name === name)
+ if (idx !== -1) {
+ this[kState] = [
+ ...this[kState].slice(0, idx),
+ entry,
+ ...this[kState].slice(idx + 1).filter((entry) => entry.name !== name)
+ ]
+ } else {
+ // 4. Otherwise, append entry to this’s entry list.
+ this[kState].push(entry)
+ }
+ }
+
+ entries () {
+ webidl.brandCheck(this, FormData)
+
+ return makeIterator(
+ () => this[kState].map(pair => [pair.name, pair.value]),
+ 'FormData',
+ 'key+value'
+ )
+ }
+
+ keys () {
+ webidl.brandCheck(this, FormData)
+
+ return makeIterator(
+ () => this[kState].map(pair => [pair.name, pair.value]),
+ 'FormData',
+ 'key'
+ )
+ }
+
+ values () {
+ webidl.brandCheck(this, FormData)
+
+ return makeIterator(
+ () => this[kState].map(pair => [pair.name, pair.value]),
+ 'FormData',
+ 'value'
+ )
+ }
+
+ /**
+ * @param {(value: string, key: string, self: FormData) => void} callbackFn
+ * @param {unknown} thisArg
+ */
+ forEach (callbackFn, thisArg = globalThis) {
+ webidl.brandCheck(this, FormData)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.forEach' })
+
+ if (typeof callbackFn !== 'function') {
+ throw new TypeError(
+ "Failed to execute 'forEach' on 'FormData': parameter 1 is not of type 'Function'."
+ )
+ }
+
+ for (const [key, value] of this) {
+ callbackFn.apply(thisArg, [value, key, this])
+ }
+ }
+}
+
+FormData.prototype[Symbol.iterator] = FormData.prototype.entries
+
+Object.defineProperties(FormData.prototype, {
+ [Symbol.toStringTag]: {
+ value: 'FormData',
+ configurable: true
+ }
+})
+
+/**
+ * @see https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#create-an-entry
+ * @param {string} name
+ * @param {string|Blob} value
+ * @param {?string} filename
+ * @returns
+ */
+function makeEntry (name, value, filename) {
+ // 1. Set name to the result of converting name into a scalar value string.
+ // "To convert a string into a scalar value string, replace any surrogates
+ // with U+FFFD."
+ // see: https://nodejs.org/dist/latest-v18.x/docs/api/buffer.html#buftostringencoding-start-end
+ name = Buffer.from(name).toString('utf8')
+
+ // 2. If value is a string, then set value to the result of converting
+ // value into a scalar value string.
+ if (typeof value === 'string') {
+ value = Buffer.from(value).toString('utf8')
+ } else {
+ // 3. Otherwise:
+
+ // 1. If value is not a File object, then set value to a new File object,
+ // representing the same bytes, whose name attribute value is "blob"
+ if (!isFileLike(value)) {
+ value = value instanceof Blob
+ ? new File([value], 'blob', { type: value.type })
+ : new FileLike(value, 'blob', { type: value.type })
+ }
+
+ // 2. If filename is given, then set value to a new File object,
+ // representing the same bytes, whose name attribute is filename.
+ if (filename !== undefined) {
+ /** @type {FilePropertyBag} */
+ const options = {
+ type: value.type,
+ lastModified: value.lastModified
+ }
+
+ value = (NativeFile && value instanceof NativeFile) || value instanceof UndiciFile
+ ? new File([value], filename, options)
+ : new FileLike(value, filename, options)
+ }
+ }
+
+ // 4. Return an entry whose name is name and whose value is value.
+ return { name, value }
+}
+
+module.exports = { FormData }
+
+
+/***/ }),
+
+/***/ 1246:
+/***/ ((module) => {
+
+"use strict";
+
+
+// In case of breaking changes, increase the version
+// number to avoid conflicts.
+const globalOrigin = Symbol.for('undici.globalOrigin.1')
+
+function getGlobalOrigin () {
+ return globalThis[globalOrigin]
+}
+
+function setGlobalOrigin (newOrigin) {
+ if (newOrigin === undefined) {
+ Object.defineProperty(globalThis, globalOrigin, {
+ value: undefined,
+ writable: true,
+ enumerable: false,
+ configurable: false
+ })
+
+ return
+ }
+
+ const parsedURL = new URL(newOrigin)
+
+ if (parsedURL.protocol !== 'http:' && parsedURL.protocol !== 'https:') {
+ throw new TypeError(`Only http & https urls are allowed, received ${parsedURL.protocol}`)
+ }
+
+ Object.defineProperty(globalThis, globalOrigin, {
+ value: parsedURL,
+ writable: true,
+ enumerable: false,
+ configurable: false
+ })
+}
+
+module.exports = {
+ getGlobalOrigin,
+ setGlobalOrigin
+}
+
+
+/***/ }),
+
+/***/ 554:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+// https://github.com/Ethan-Arrowood/undici-fetch
+
+
+
+const { kHeadersList, kConstruct } = __nccwpck_require__(2785)
+const { kGuard } = __nccwpck_require__(5861)
+const { kEnumerableProperty } = __nccwpck_require__(3983)
+const {
+ makeIterator,
+ isValidHeaderName,
+ isValidHeaderValue
+} = __nccwpck_require__(2538)
+const { webidl } = __nccwpck_require__(1744)
+const assert = __nccwpck_require__(9491)
+
+const kHeadersMap = Symbol('headers map')
+const kHeadersSortedMap = Symbol('headers map sorted')
+
+/**
+ * @param {number} code
+ */
+function isHTTPWhiteSpaceCharCode (code) {
+ return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#concept-header-value-normalize
+ * @param {string} potentialValue
+ */
+function headerValueNormalize (potentialValue) {
+ // To normalize a byte sequence potentialValue, remove
+ // any leading and trailing HTTP whitespace bytes from
+ // potentialValue.
+ let i = 0; let j = potentialValue.length
+
+ while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j
+ while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i
+
+ return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j)
+}
+
+function fill (headers, object) {
+ // To fill a Headers object headers with a given object object, run these steps:
+
+ // 1. If object is a sequence, then for each header in object:
+ // Note: webidl conversion to array has already been done.
+ if (Array.isArray(object)) {
+ for (let i = 0; i < object.length; ++i) {
+ const header = object[i]
+ // 1. If header does not contain exactly two items, then throw a TypeError.
+ if (header.length !== 2) {
+ throw webidl.errors.exception({
+ header: 'Headers constructor',
+ message: `expected name/value pair to be length 2, found ${header.length}.`
+ })
+ }
+
+ // 2. Append (header’s first item, header’s second item) to headers.
+ appendHeader(headers, header[0], header[1])
+ }
+ } else if (typeof object === 'object' && object !== null) {
+ // Note: null should throw
+
+ // 2. Otherwise, object is a record, then for each key → value in object,
+ // append (key, value) to headers
+ const keys = Object.keys(object)
+ for (let i = 0; i < keys.length; ++i) {
+ appendHeader(headers, keys[i], object[keys[i]])
+ }
+ } else {
+ throw webidl.errors.conversionFailed({
+ prefix: 'Headers constructor',
+ argument: 'Argument 1',
+ types: ['sequence>', 'record']
+ })
+ }
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#concept-headers-append
+ */
+function appendHeader (headers, name, value) {
+ // 1. Normalize value.
+ value = headerValueNormalize(value)
+
+ // 2. If name is not a header name or value is not a
+ // header value, then throw a TypeError.
+ if (!isValidHeaderName(name)) {
+ throw webidl.errors.invalidArgument({
+ prefix: 'Headers.append',
+ value: name,
+ type: 'header name'
+ })
+ } else if (!isValidHeaderValue(value)) {
+ throw webidl.errors.invalidArgument({
+ prefix: 'Headers.append',
+ value,
+ type: 'header value'
+ })
+ }
+
+ // 3. If headers’s guard is "immutable", then throw a TypeError.
+ // 4. Otherwise, if headers’s guard is "request" and name is a
+ // forbidden header name, return.
+ // Note: undici does not implement forbidden header names
+ if (headers[kGuard] === 'immutable') {
+ throw new TypeError('immutable')
+ } else if (headers[kGuard] === 'request-no-cors') {
+ // 5. Otherwise, if headers’s guard is "request-no-cors":
+ // TODO
+ }
+
+ // 6. Otherwise, if headers’s guard is "response" and name is a
+ // forbidden response-header name, return.
+
+ // 7. Append (name, value) to headers’s header list.
+ return headers[kHeadersList].append(name, value)
+
+ // 8. If headers’s guard is "request-no-cors", then remove
+ // privileged no-CORS request headers from headers
+}
+
+class HeadersList {
+ /** @type {[string, string][]|null} */
+ cookies = null
+
+ constructor (init) {
+ if (init instanceof HeadersList) {
+ this[kHeadersMap] = new Map(init[kHeadersMap])
+ this[kHeadersSortedMap] = init[kHeadersSortedMap]
+ this.cookies = init.cookies === null ? null : [...init.cookies]
+ } else {
+ this[kHeadersMap] = new Map(init)
+ this[kHeadersSortedMap] = null
+ }
+ }
+
+ // https://fetch.spec.whatwg.org/#header-list-contains
+ contains (name) {
+ // A header list list contains a header name name if list
+ // contains a header whose name is a byte-case-insensitive
+ // match for name.
+ name = name.toLowerCase()
+
+ return this[kHeadersMap].has(name)
+ }
+
+ clear () {
+ this[kHeadersMap].clear()
+ this[kHeadersSortedMap] = null
+ this.cookies = null
+ }
+
+ // https://fetch.spec.whatwg.org/#concept-header-list-append
+ append (name, value) {
+ this[kHeadersSortedMap] = null
+
+ // 1. If list contains name, then set name to the first such
+ // header’s name.
+ const lowercaseName = name.toLowerCase()
+ const exists = this[kHeadersMap].get(lowercaseName)
+
+ // 2. Append (name, value) to list.
+ if (exists) {
+ const delimiter = lowercaseName === 'cookie' ? '; ' : ', '
+ this[kHeadersMap].set(lowercaseName, {
+ name: exists.name,
+ value: `${exists.value}${delimiter}${value}`
+ })
+ } else {
+ this[kHeadersMap].set(lowercaseName, { name, value })
+ }
+
+ if (lowercaseName === 'set-cookie') {
+ this.cookies ??= []
+ this.cookies.push(value)
+ }
+ }
+
+ // https://fetch.spec.whatwg.org/#concept-header-list-set
+ set (name, value) {
+ this[kHeadersSortedMap] = null
+ const lowercaseName = name.toLowerCase()
+
+ if (lowercaseName === 'set-cookie') {
+ this.cookies = [value]
+ }
+
+ // 1. If list contains name, then set the value of
+ // the first such header to value and remove the
+ // others.
+ // 2. Otherwise, append header (name, value) to list.
+ this[kHeadersMap].set(lowercaseName, { name, value })
+ }
+
+ // https://fetch.spec.whatwg.org/#concept-header-list-delete
+ delete (name) {
+ this[kHeadersSortedMap] = null
+
+ name = name.toLowerCase()
+
+ if (name === 'set-cookie') {
+ this.cookies = null
+ }
+
+ this[kHeadersMap].delete(name)
+ }
+
+ // https://fetch.spec.whatwg.org/#concept-header-list-get
+ get (name) {
+ const value = this[kHeadersMap].get(name.toLowerCase())
+
+ // 1. If list does not contain name, then return null.
+ // 2. Return the values of all headers in list whose name
+ // is a byte-case-insensitive match for name,
+ // separated from each other by 0x2C 0x20, in order.
+ return value === undefined ? null : value.value
+ }
+
+ * [Symbol.iterator] () {
+ // use the lowercased name
+ for (const [name, { value }] of this[kHeadersMap]) {
+ yield [name, value]
+ }
+ }
+
+ get entries () {
+ const headers = {}
+
+ if (this[kHeadersMap].size) {
+ for (const { name, value } of this[kHeadersMap].values()) {
+ headers[name] = value
+ }
+ }
+
+ return headers
+ }
+}
+
+// https://fetch.spec.whatwg.org/#headers-class
+class Headers {
+ constructor (init = undefined) {
+ if (init === kConstruct) {
+ return
+ }
+ this[kHeadersList] = new HeadersList()
+
+ // The new Headers(init) constructor steps are:
+
+ // 1. Set this’s guard to "none".
+ this[kGuard] = 'none'
+
+ // 2. If init is given, then fill this with init.
+ if (init !== undefined) {
+ init = webidl.converters.HeadersInit(init)
+ fill(this, init)
+ }
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-headers-append
+ append (name, value) {
+ webidl.brandCheck(this, Headers)
+
+ webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.append' })
+
+ name = webidl.converters.ByteString(name)
+ value = webidl.converters.ByteString(value)
+
+ return appendHeader(this, name, value)
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-headers-delete
+ delete (name) {
+ webidl.brandCheck(this, Headers)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.delete' })
+
+ name = webidl.converters.ByteString(name)
+
+ // 1. If name is not a header name, then throw a TypeError.
+ if (!isValidHeaderName(name)) {
+ throw webidl.errors.invalidArgument({
+ prefix: 'Headers.delete',
+ value: name,
+ type: 'header name'
+ })
+ }
+
+ // 2. If this’s guard is "immutable", then throw a TypeError.
+ // 3. Otherwise, if this’s guard is "request" and name is a
+ // forbidden header name, return.
+ // 4. Otherwise, if this’s guard is "request-no-cors", name
+ // is not a no-CORS-safelisted request-header name, and
+ // name is not a privileged no-CORS request-header name,
+ // return.
+ // 5. Otherwise, if this’s guard is "response" and name is
+ // a forbidden response-header name, return.
+ // Note: undici does not implement forbidden header names
+ if (this[kGuard] === 'immutable') {
+ throw new TypeError('immutable')
+ } else if (this[kGuard] === 'request-no-cors') {
+ // TODO
+ }
+
+ // 6. If this’s header list does not contain name, then
+ // return.
+ if (!this[kHeadersList].contains(name)) {
+ return
+ }
+
+ // 7. Delete name from this’s header list.
+ // 8. If this’s guard is "request-no-cors", then remove
+ // privileged no-CORS request headers from this.
+ this[kHeadersList].delete(name)
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-headers-get
+ get (name) {
+ webidl.brandCheck(this, Headers)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.get' })
+
+ name = webidl.converters.ByteString(name)
+
+ // 1. If name is not a header name, then throw a TypeError.
+ if (!isValidHeaderName(name)) {
+ throw webidl.errors.invalidArgument({
+ prefix: 'Headers.get',
+ value: name,
+ type: 'header name'
+ })
+ }
+
+ // 2. Return the result of getting name from this’s header
+ // list.
+ return this[kHeadersList].get(name)
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-headers-has
+ has (name) {
+ webidl.brandCheck(this, Headers)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.has' })
+
+ name = webidl.converters.ByteString(name)
+
+ // 1. If name is not a header name, then throw a TypeError.
+ if (!isValidHeaderName(name)) {
+ throw webidl.errors.invalidArgument({
+ prefix: 'Headers.has',
+ value: name,
+ type: 'header name'
+ })
+ }
+
+ // 2. Return true if this’s header list contains name;
+ // otherwise false.
+ return this[kHeadersList].contains(name)
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-headers-set
+ set (name, value) {
+ webidl.brandCheck(this, Headers)
+
+ webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.set' })
+
+ name = webidl.converters.ByteString(name)
+ value = webidl.converters.ByteString(value)
+
+ // 1. Normalize value.
+ value = headerValueNormalize(value)
+
+ // 2. If name is not a header name or value is not a
+ // header value, then throw a TypeError.
+ if (!isValidHeaderName(name)) {
+ throw webidl.errors.invalidArgument({
+ prefix: 'Headers.set',
+ value: name,
+ type: 'header name'
+ })
+ } else if (!isValidHeaderValue(value)) {
+ throw webidl.errors.invalidArgument({
+ prefix: 'Headers.set',
+ value,
+ type: 'header value'
+ })
+ }
+
+ // 3. If this’s guard is "immutable", then throw a TypeError.
+ // 4. Otherwise, if this’s guard is "request" and name is a
+ // forbidden header name, return.
+ // 5. Otherwise, if this’s guard is "request-no-cors" and
+ // name/value is not a no-CORS-safelisted request-header,
+ // return.
+ // 6. Otherwise, if this’s guard is "response" and name is a
+ // forbidden response-header name, return.
+ // Note: undici does not implement forbidden header names
+ if (this[kGuard] === 'immutable') {
+ throw new TypeError('immutable')
+ } else if (this[kGuard] === 'request-no-cors') {
+ // TODO
+ }
+
+ // 7. Set (name, value) in this’s header list.
+ // 8. If this’s guard is "request-no-cors", then remove
+ // privileged no-CORS request headers from this
+ this[kHeadersList].set(name, value)
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie
+ getSetCookie () {
+ webidl.brandCheck(this, Headers)
+
+ // 1. If this’s header list does not contain `Set-Cookie`, then return « ».
+ // 2. Return the values of all headers in this’s header list whose name is
+ // a byte-case-insensitive match for `Set-Cookie`, in order.
+
+ const list = this[kHeadersList].cookies
+
+ if (list) {
+ return [...list]
+ }
+
+ return []
+ }
+
+ // https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine
+ get [kHeadersSortedMap] () {
+ if (this[kHeadersList][kHeadersSortedMap]) {
+ return this[kHeadersList][kHeadersSortedMap]
+ }
+
+ // 1. Let headers be an empty list of headers with the key being the name
+ // and value the value.
+ const headers = []
+
+ // 2. Let names be the result of convert header names to a sorted-lowercase
+ // set with all the names of the headers in list.
+ const names = [...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1)
+ const cookies = this[kHeadersList].cookies
+
+ // 3. For each name of names:
+ for (let i = 0; i < names.length; ++i) {
+ const [name, value] = names[i]
+ // 1. If name is `set-cookie`, then:
+ if (name === 'set-cookie') {
+ // 1. Let values be a list of all values of headers in list whose name
+ // is a byte-case-insensitive match for name, in order.
+
+ // 2. For each value of values:
+ // 1. Append (name, value) to headers.
+ for (let j = 0; j < cookies.length; ++j) {
+ headers.push([name, cookies[j]])
+ }
+ } else {
+ // 2. Otherwise:
+
+ // 1. Let value be the result of getting name from list.
+
+ // 2. Assert: value is non-null.
+ assert(value !== null)
+
+ // 3. Append (name, value) to headers.
+ headers.push([name, value])
+ }
+ }
+
+ this[kHeadersList][kHeadersSortedMap] = headers
+
+ // 4. Return headers.
+ return headers
+ }
+
+ keys () {
+ webidl.brandCheck(this, Headers)
+
+ if (this[kGuard] === 'immutable') {
+ const value = this[kHeadersSortedMap]
+ return makeIterator(() => value, 'Headers',
+ 'key')
+ }
+
+ return makeIterator(
+ () => [...this[kHeadersSortedMap].values()],
+ 'Headers',
+ 'key'
+ )
+ }
+
+ values () {
+ webidl.brandCheck(this, Headers)
+
+ if (this[kGuard] === 'immutable') {
+ const value = this[kHeadersSortedMap]
+ return makeIterator(() => value, 'Headers',
+ 'value')
+ }
+
+ return makeIterator(
+ () => [...this[kHeadersSortedMap].values()],
+ 'Headers',
+ 'value'
+ )
+ }
+
+ entries () {
+ webidl.brandCheck(this, Headers)
+
+ if (this[kGuard] === 'immutable') {
+ const value = this[kHeadersSortedMap]
+ return makeIterator(() => value, 'Headers',
+ 'key+value')
+ }
+
+ return makeIterator(
+ () => [...this[kHeadersSortedMap].values()],
+ 'Headers',
+ 'key+value'
+ )
+ }
+
+ /**
+ * @param {(value: string, key: string, self: Headers) => void} callbackFn
+ * @param {unknown} thisArg
+ */
+ forEach (callbackFn, thisArg = globalThis) {
+ webidl.brandCheck(this, Headers)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.forEach' })
+
+ if (typeof callbackFn !== 'function') {
+ throw new TypeError(
+ "Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'."
+ )
+ }
+
+ for (const [key, value] of this) {
+ callbackFn.apply(thisArg, [value, key, this])
+ }
+ }
+
+ [Symbol.for('nodejs.util.inspect.custom')] () {
+ webidl.brandCheck(this, Headers)
+
+ return this[kHeadersList]
+ }
+}
+
+Headers.prototype[Symbol.iterator] = Headers.prototype.entries
+
+Object.defineProperties(Headers.prototype, {
+ append: kEnumerableProperty,
+ delete: kEnumerableProperty,
+ get: kEnumerableProperty,
+ has: kEnumerableProperty,
+ set: kEnumerableProperty,
+ getSetCookie: kEnumerableProperty,
+ keys: kEnumerableProperty,
+ values: kEnumerableProperty,
+ entries: kEnumerableProperty,
+ forEach: kEnumerableProperty,
+ [Symbol.iterator]: { enumerable: false },
+ [Symbol.toStringTag]: {
+ value: 'Headers',
+ configurable: true
+ }
+})
+
+webidl.converters.HeadersInit = function (V) {
+ if (webidl.util.Type(V) === 'Object') {
+ if (V[Symbol.iterator]) {
+ return webidl.converters['sequence>'](V)
+ }
+
+ return webidl.converters['record'](V)
+ }
+
+ throw webidl.errors.conversionFailed({
+ prefix: 'Headers constructor',
+ argument: 'Argument 1',
+ types: ['sequence>', 'record']
+ })
+}
+
+module.exports = {
+ fill,
+ Headers,
+ HeadersList
+}
+
+
+/***/ }),
+
+/***/ 4881:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+// https://github.com/Ethan-Arrowood/undici-fetch
+
+
+
+const {
+ Response,
+ makeNetworkError,
+ makeAppropriateNetworkError,
+ filterResponse,
+ makeResponse
+} = __nccwpck_require__(7823)
+const { Headers } = __nccwpck_require__(554)
+const { Request, makeRequest } = __nccwpck_require__(8359)
+const zlib = __nccwpck_require__(9796)
+const {
+ bytesMatch,
+ makePolicyContainer,
+ clonePolicyContainer,
+ requestBadPort,
+ TAOCheck,
+ appendRequestOriginHeader,
+ responseLocationURL,
+ requestCurrentURL,
+ setRequestReferrerPolicyOnRedirect,
+ tryUpgradeRequestToAPotentiallyTrustworthyURL,
+ createOpaqueTimingInfo,
+ appendFetchMetadata,
+ corsCheck,
+ crossOriginResourcePolicyCheck,
+ determineRequestsReferrer,
+ coarsenedSharedCurrentTime,
+ createDeferredPromise,
+ isBlobLike,
+ sameOrigin,
+ isCancelled,
+ isAborted,
+ isErrorLike,
+ fullyReadBody,
+ readableStreamClose,
+ isomorphicEncode,
+ urlIsLocal,
+ urlIsHttpHttpsScheme,
+ urlHasHttpsScheme
+} = __nccwpck_require__(2538)
+const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(5861)
+const assert = __nccwpck_require__(9491)
+const { safelyExtractBody } = __nccwpck_require__(1472)
+const {
+ redirectStatusSet,
+ nullBodyStatus,
+ safeMethodsSet,
+ requestBodyHeader,
+ subresourceSet,
+ DOMException
+} = __nccwpck_require__(1037)
+const { kHeadersList } = __nccwpck_require__(2785)
+const EE = __nccwpck_require__(2361)
+const { Readable, pipeline } = __nccwpck_require__(2781)
+const { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor } = __nccwpck_require__(3983)
+const { dataURLProcessor, serializeAMimeType } = __nccwpck_require__(685)
+const { TransformStream } = __nccwpck_require__(5356)
+const { getGlobalDispatcher } = __nccwpck_require__(1892)
+const { webidl } = __nccwpck_require__(1744)
+const { STATUS_CODES } = __nccwpck_require__(3685)
+const GET_OR_HEAD = ['GET', 'HEAD']
+
+/** @type {import('buffer').resolveObjectURL} */
+let resolveObjectURL
+let ReadableStream = globalThis.ReadableStream
+
+class Fetch extends EE {
+ constructor (dispatcher) {
+ super()
+
+ this.dispatcher = dispatcher
+ this.connection = null
+ this.dump = false
+ this.state = 'ongoing'
+ // 2 terminated listeners get added per request,
+ // but only 1 gets removed. If there are 20 redirects,
+ // 21 listeners will be added.
+ // See https://github.com/nodejs/undici/issues/1711
+ // TODO (fix): Find and fix root cause for leaked listener.
+ this.setMaxListeners(21)
+ }
+
+ terminate (reason) {
+ if (this.state !== 'ongoing') {
+ return
+ }
+
+ this.state = 'terminated'
+ this.connection?.destroy(reason)
+ this.emit('terminated', reason)
+ }
+
+ // https://fetch.spec.whatwg.org/#fetch-controller-abort
+ abort (error) {
+ if (this.state !== 'ongoing') {
+ return
+ }
+
+ // 1. Set controller’s state to "aborted".
+ this.state = 'aborted'
+
+ // 2. Let fallbackError be an "AbortError" DOMException.
+ // 3. Set error to fallbackError if it is not given.
+ if (!error) {
+ error = new DOMException('The operation was aborted.', 'AbortError')
+ }
+
+ // 4. Let serializedError be StructuredSerialize(error).
+ // If that threw an exception, catch it, and let
+ // serializedError be StructuredSerialize(fallbackError).
+
+ // 5. Set controller’s serialized abort reason to serializedError.
+ this.serializedAbortReason = error
+
+ this.connection?.destroy(error)
+ this.emit('terminated', error)
+ }
+}
+
+// https://fetch.spec.whatwg.org/#fetch-method
+function fetch (input, init = {}) {
+ webidl.argumentLengthCheck(arguments, 1, { header: 'globalThis.fetch' })
+
+ // 1. Let p be a new promise.
+ const p = createDeferredPromise()
+
+ // 2. Let requestObject be the result of invoking the initial value of
+ // Request as constructor with input and init as arguments. If this throws
+ // an exception, reject p with it and return p.
+ let requestObject
+
+ try {
+ requestObject = new Request(input, init)
+ } catch (e) {
+ p.reject(e)
+ return p.promise
+ }
+
+ // 3. Let request be requestObject’s request.
+ const request = requestObject[kState]
+
+ // 4. If requestObject’s signal’s aborted flag is set, then:
+ if (requestObject.signal.aborted) {
+ // 1. Abort the fetch() call with p, request, null, and
+ // requestObject’s signal’s abort reason.
+ abortFetch(p, request, null, requestObject.signal.reason)
+
+ // 2. Return p.
+ return p.promise
+ }
+
+ // 5. Let globalObject be request’s client’s global object.
+ const globalObject = request.client.globalObject
+
+ // 6. If globalObject is a ServiceWorkerGlobalScope object, then set
+ // request’s service-workers mode to "none".
+ if (globalObject?.constructor?.name === 'ServiceWorkerGlobalScope') {
+ request.serviceWorkers = 'none'
+ }
+
+ // 7. Let responseObject be null.
+ let responseObject = null
+
+ // 8. Let relevantRealm be this’s relevant Realm.
+ const relevantRealm = null
+
+ // 9. Let locallyAborted be false.
+ let locallyAborted = false
+
+ // 10. Let controller be null.
+ let controller = null
+
+ // 11. Add the following abort steps to requestObject’s signal:
+ addAbortListener(
+ requestObject.signal,
+ () => {
+ // 1. Set locallyAborted to true.
+ locallyAborted = true
+
+ // 2. Assert: controller is non-null.
+ assert(controller != null)
+
+ // 3. Abort controller with requestObject’s signal’s abort reason.
+ controller.abort(requestObject.signal.reason)
+
+ // 4. Abort the fetch() call with p, request, responseObject,
+ // and requestObject’s signal’s abort reason.
+ abortFetch(p, request, responseObject, requestObject.signal.reason)
+ }
+ )
+
+ // 12. Let handleFetchDone given response response be to finalize and
+ // report timing with response, globalObject, and "fetch".
+ const handleFetchDone = (response) =>
+ finalizeAndReportTiming(response, 'fetch')
+
+ // 13. Set controller to the result of calling fetch given request,
+ // with processResponseEndOfBody set to handleFetchDone, and processResponse
+ // given response being these substeps:
+
+ const processResponse = (response) => {
+ // 1. If locallyAborted is true, terminate these substeps.
+ if (locallyAborted) {
+ return Promise.resolve()
+ }
+
+ // 2. If response’s aborted flag is set, then:
+ if (response.aborted) {
+ // 1. Let deserializedError be the result of deserialize a serialized
+ // abort reason given controller’s serialized abort reason and
+ // relevantRealm.
+
+ // 2. Abort the fetch() call with p, request, responseObject, and
+ // deserializedError.
+
+ abortFetch(p, request, responseObject, controller.serializedAbortReason)
+ return Promise.resolve()
+ }
+
+ // 3. If response is a network error, then reject p with a TypeError
+ // and terminate these substeps.
+ if (response.type === 'error') {
+ p.reject(
+ Object.assign(new TypeError('fetch failed'), { cause: response.error })
+ )
+ return Promise.resolve()
+ }
+
+ // 4. Set responseObject to the result of creating a Response object,
+ // given response, "immutable", and relevantRealm.
+ responseObject = new Response()
+ responseObject[kState] = response
+ responseObject[kRealm] = relevantRealm
+ responseObject[kHeaders][kHeadersList] = response.headersList
+ responseObject[kHeaders][kGuard] = 'immutable'
+ responseObject[kHeaders][kRealm] = relevantRealm
+
+ // 5. Resolve p with responseObject.
+ p.resolve(responseObject)
+ }
+
+ controller = fetching({
+ request,
+ processResponseEndOfBody: handleFetchDone,
+ processResponse,
+ dispatcher: init.dispatcher ?? getGlobalDispatcher() // undici
+ })
+
+ // 14. Return p.
+ return p.promise
+}
+
+// https://fetch.spec.whatwg.org/#finalize-and-report-timing
+function finalizeAndReportTiming (response, initiatorType = 'other') {
+ // 1. If response is an aborted network error, then return.
+ if (response.type === 'error' && response.aborted) {
+ return
+ }
+
+ // 2. If response’s URL list is null or empty, then return.
+ if (!response.urlList?.length) {
+ return
+ }
+
+ // 3. Let originalURL be response’s URL list[0].
+ const originalURL = response.urlList[0]
+
+ // 4. Let timingInfo be response’s timing info.
+ let timingInfo = response.timingInfo
+
+ // 5. Let cacheState be response’s cache state.
+ let cacheState = response.cacheState
+
+ // 6. If originalURL’s scheme is not an HTTP(S) scheme, then return.
+ if (!urlIsHttpHttpsScheme(originalURL)) {
+ return
+ }
+
+ // 7. If timingInfo is null, then return.
+ if (timingInfo === null) {
+ return
+ }
+
+ // 8. If response’s timing allow passed flag is not set, then:
+ if (!response.timingAllowPassed) {
+ // 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo.
+ timingInfo = createOpaqueTimingInfo({
+ startTime: timingInfo.startTime
+ })
+
+ // 2. Set cacheState to the empty string.
+ cacheState = ''
+ }
+
+ // 9. Set timingInfo’s end time to the coarsened shared current time
+ // given global’s relevant settings object’s cross-origin isolated
+ // capability.
+ // TODO: given global’s relevant settings object’s cross-origin isolated
+ // capability?
+ timingInfo.endTime = coarsenedSharedCurrentTime()
+
+ // 10. Set response’s timing info to timingInfo.
+ response.timingInfo = timingInfo
+
+ // 11. Mark resource timing for timingInfo, originalURL, initiatorType,
+ // global, and cacheState.
+ markResourceTiming(
+ timingInfo,
+ originalURL,
+ initiatorType,
+ globalThis,
+ cacheState
+ )
+}
+
+// https://w3c.github.io/resource-timing/#dfn-mark-resource-timing
+function markResourceTiming (timingInfo, originalURL, initiatorType, globalThis, cacheState) {
+ if (nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 2)) {
+ performance.markResourceTiming(timingInfo, originalURL.href, initiatorType, globalThis, cacheState)
+ }
+}
+
+// https://fetch.spec.whatwg.org/#abort-fetch
+function abortFetch (p, request, responseObject, error) {
+ // Note: AbortSignal.reason was added in node v17.2.0
+ // which would give us an undefined error to reject with.
+ // Remove this once node v16 is no longer supported.
+ if (!error) {
+ error = new DOMException('The operation was aborted.', 'AbortError')
+ }
+
+ // 1. Reject promise with error.
+ p.reject(error)
+
+ // 2. If request’s body is not null and is readable, then cancel request’s
+ // body with error.
+ if (request.body != null && isReadable(request.body?.stream)) {
+ request.body.stream.cancel(error).catch((err) => {
+ if (err.code === 'ERR_INVALID_STATE') {
+ // Node bug?
+ return
+ }
+ throw err
+ })
+ }
+
+ // 3. If responseObject is null, then return.
+ if (responseObject == null) {
+ return
+ }
+
+ // 4. Let response be responseObject’s response.
+ const response = responseObject[kState]
+
+ // 5. If response’s body is not null and is readable, then error response’s
+ // body with error.
+ if (response.body != null && isReadable(response.body?.stream)) {
+ response.body.stream.cancel(error).catch((err) => {
+ if (err.code === 'ERR_INVALID_STATE') {
+ // Node bug?
+ return
+ }
+ throw err
+ })
+ }
+}
+
+// https://fetch.spec.whatwg.org/#fetching
+function fetching ({
+ request,
+ processRequestBodyChunkLength,
+ processRequestEndOfBody,
+ processResponse,
+ processResponseEndOfBody,
+ processResponseConsumeBody,
+ useParallelQueue = false,
+ dispatcher // undici
+}) {
+ // 1. Let taskDestination be null.
+ let taskDestination = null
+
+ // 2. Let crossOriginIsolatedCapability be false.
+ let crossOriginIsolatedCapability = false
+
+ // 3. If request’s client is non-null, then:
+ if (request.client != null) {
+ // 1. Set taskDestination to request’s client’s global object.
+ taskDestination = request.client.globalObject
+
+ // 2. Set crossOriginIsolatedCapability to request’s client’s cross-origin
+ // isolated capability.
+ crossOriginIsolatedCapability =
+ request.client.crossOriginIsolatedCapability
+ }
+
+ // 4. If useParallelQueue is true, then set taskDestination to the result of
+ // starting a new parallel queue.
+ // TODO
+
+ // 5. Let timingInfo be a new fetch timing info whose start time and
+ // post-redirect start time are the coarsened shared current time given
+ // crossOriginIsolatedCapability.
+ const currenTime = coarsenedSharedCurrentTime(crossOriginIsolatedCapability)
+ const timingInfo = createOpaqueTimingInfo({
+ startTime: currenTime
+ })
+
+ // 6. Let fetchParams be a new fetch params whose
+ // request is request,
+ // timing info is timingInfo,
+ // process request body chunk length is processRequestBodyChunkLength,
+ // process request end-of-body is processRequestEndOfBody,
+ // process response is processResponse,
+ // process response consume body is processResponseConsumeBody,
+ // process response end-of-body is processResponseEndOfBody,
+ // task destination is taskDestination,
+ // and cross-origin isolated capability is crossOriginIsolatedCapability.
+ const fetchParams = {
+ controller: new Fetch(dispatcher),
+ request,
+ timingInfo,
+ processRequestBodyChunkLength,
+ processRequestEndOfBody,
+ processResponse,
+ processResponseConsumeBody,
+ processResponseEndOfBody,
+ taskDestination,
+ crossOriginIsolatedCapability
+ }
+
+ // 7. If request’s body is a byte sequence, then set request’s body to
+ // request’s body as a body.
+ // NOTE: Since fetching is only called from fetch, body should already be
+ // extracted.
+ assert(!request.body || request.body.stream)
+
+ // 8. If request’s window is "client", then set request’s window to request’s
+ // client, if request’s client’s global object is a Window object; otherwise
+ // "no-window".
+ if (request.window === 'client') {
+ // TODO: What if request.client is null?
+ request.window =
+ request.client?.globalObject?.constructor?.name === 'Window'
+ ? request.client
+ : 'no-window'
+ }
+
+ // 9. If request’s origin is "client", then set request’s origin to request’s
+ // client’s origin.
+ if (request.origin === 'client') {
+ // TODO: What if request.client is null?
+ request.origin = request.client?.origin
+ }
+
+ // 10. If all of the following conditions are true:
+ // TODO
+
+ // 11. If request’s policy container is "client", then:
+ if (request.policyContainer === 'client') {
+ // 1. If request’s client is non-null, then set request’s policy
+ // container to a clone of request’s client’s policy container. [HTML]
+ if (request.client != null) {
+ request.policyContainer = clonePolicyContainer(
+ request.client.policyContainer
+ )
+ } else {
+ // 2. Otherwise, set request’s policy container to a new policy
+ // container.
+ request.policyContainer = makePolicyContainer()
+ }
+ }
+
+ // 12. If request’s header list does not contain `Accept`, then:
+ if (!request.headersList.contains('accept')) {
+ // 1. Let value be `*/*`.
+ const value = '*/*'
+
+ // 2. A user agent should set value to the first matching statement, if
+ // any, switching on request’s destination:
+ // "document"
+ // "frame"
+ // "iframe"
+ // `text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8`
+ // "image"
+ // `image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5`
+ // "style"
+ // `text/css,*/*;q=0.1`
+ // TODO
+
+ // 3. Append `Accept`/value to request’s header list.
+ request.headersList.append('accept', value)
+ }
+
+ // 13. If request’s header list does not contain `Accept-Language`, then
+ // user agents should append `Accept-Language`/an appropriate value to
+ // request’s header list.
+ if (!request.headersList.contains('accept-language')) {
+ request.headersList.append('accept-language', '*')
+ }
+
+ // 14. If request’s priority is null, then use request’s initiator and
+ // destination appropriately in setting request’s priority to a
+ // user-agent-defined object.
+ if (request.priority === null) {
+ // TODO
+ }
+
+ // 15. If request is a subresource request, then:
+ if (subresourceSet.has(request.destination)) {
+ // TODO
+ }
+
+ // 16. Run main fetch given fetchParams.
+ mainFetch(fetchParams)
+ .catch(err => {
+ fetchParams.controller.terminate(err)
+ })
+
+ // 17. Return fetchParam's controller
+ return fetchParams.controller
+}
+
+// https://fetch.spec.whatwg.org/#concept-main-fetch
+async function mainFetch (fetchParams, recursive = false) {
+ // 1. Let request be fetchParams’s request.
+ const request = fetchParams.request
+
+ // 2. Let response be null.
+ let response = null
+
+ // 3. If request’s local-URLs-only flag is set and request’s current URL is
+ // not local, then set response to a network error.
+ if (request.localURLsOnly && !urlIsLocal(requestCurrentURL(request))) {
+ response = makeNetworkError('local URLs only')
+ }
+
+ // 4. Run report Content Security Policy violations for request.
+ // TODO
+
+ // 5. Upgrade request to a potentially trustworthy URL, if appropriate.
+ tryUpgradeRequestToAPotentiallyTrustworthyURL(request)
+
+ // 6. If should request be blocked due to a bad port, should fetching request
+ // be blocked as mixed content, or should request be blocked by Content
+ // Security Policy returns blocked, then set response to a network error.
+ if (requestBadPort(request) === 'blocked') {
+ response = makeNetworkError('bad port')
+ }
+ // TODO: should fetching request be blocked as mixed content?
+ // TODO: should request be blocked by Content Security Policy?
+
+ // 7. If request’s referrer policy is the empty string, then set request’s
+ // referrer policy to request’s policy container’s referrer policy.
+ if (request.referrerPolicy === '') {
+ request.referrerPolicy = request.policyContainer.referrerPolicy
+ }
+
+ // 8. If request’s referrer is not "no-referrer", then set request’s
+ // referrer to the result of invoking determine request’s referrer.
+ if (request.referrer !== 'no-referrer') {
+ request.referrer = determineRequestsReferrer(request)
+ }
+
+ // 9. Set request’s current URL’s scheme to "https" if all of the following
+ // conditions are true:
+ // - request’s current URL’s scheme is "http"
+ // - request’s current URL’s host is a domain
+ // - Matching request’s current URL’s host per Known HSTS Host Domain Name
+ // Matching results in either a superdomain match with an asserted
+ // includeSubDomains directive or a congruent match (with or without an
+ // asserted includeSubDomains directive). [HSTS]
+ // TODO
+
+ // 10. If recursive is false, then run the remaining steps in parallel.
+ // TODO
+
+ // 11. If response is null, then set response to the result of running
+ // the steps corresponding to the first matching statement:
+ if (response === null) {
+ response = await (async () => {
+ const currentURL = requestCurrentURL(request)
+
+ if (
+ // - request’s current URL’s origin is same origin with request’s origin,
+ // and request’s response tainting is "basic"
+ (sameOrigin(currentURL, request.url) && request.responseTainting === 'basic') ||
+ // request’s current URL’s scheme is "data"
+ (currentURL.protocol === 'data:') ||
+ // - request’s mode is "navigate" or "websocket"
+ (request.mode === 'navigate' || request.mode === 'websocket')
+ ) {
+ // 1. Set request’s response tainting to "basic".
+ request.responseTainting = 'basic'
+
+ // 2. Return the result of running scheme fetch given fetchParams.
+ return await schemeFetch(fetchParams)
+ }
+
+ // request’s mode is "same-origin"
+ if (request.mode === 'same-origin') {
+ // 1. Return a network error.
+ return makeNetworkError('request mode cannot be "same-origin"')
+ }
+
+ // request’s mode is "no-cors"
+ if (request.mode === 'no-cors') {
+ // 1. If request’s redirect mode is not "follow", then return a network
+ // error.
+ if (request.redirect !== 'follow') {
+ return makeNetworkError(
+ 'redirect mode cannot be "follow" for "no-cors" request'
+ )
+ }
+
+ // 2. Set request’s response tainting to "opaque".
+ request.responseTainting = 'opaque'
+
+ // 3. Return the result of running scheme fetch given fetchParams.
+ return await schemeFetch(fetchParams)
+ }
+
+ // request’s current URL’s scheme is not an HTTP(S) scheme
+ if (!urlIsHttpHttpsScheme(requestCurrentURL(request))) {
+ // Return a network error.
+ return makeNetworkError('URL scheme must be a HTTP(S) scheme')
+ }
+
+ // - request’s use-CORS-preflight flag is set
+ // - request’s unsafe-request flag is set and either request’s method is
+ // not a CORS-safelisted method or CORS-unsafe request-header names with
+ // request’s header list is not empty
+ // 1. Set request’s response tainting to "cors".
+ // 2. Let corsWithPreflightResponse be the result of running HTTP fetch
+ // given fetchParams and true.
+ // 3. If corsWithPreflightResponse is a network error, then clear cache
+ // entries using request.
+ // 4. Return corsWithPreflightResponse.
+ // TODO
+
+ // Otherwise
+ // 1. Set request’s response tainting to "cors".
+ request.responseTainting = 'cors'
+
+ // 2. Return the result of running HTTP fetch given fetchParams.
+ return await httpFetch(fetchParams)
+ })()
+ }
+
+ // 12. If recursive is true, then return response.
+ if (recursive) {
+ return response
+ }
+
+ // 13. If response is not a network error and response is not a filtered
+ // response, then:
+ if (response.status !== 0 && !response.internalResponse) {
+ // If request’s response tainting is "cors", then:
+ if (request.responseTainting === 'cors') {
+ // 1. Let headerNames be the result of extracting header list values
+ // given `Access-Control-Expose-Headers` and response’s header list.
+ // TODO
+ // 2. If request’s credentials mode is not "include" and headerNames
+ // contains `*`, then set response’s CORS-exposed header-name list to
+ // all unique header names in response’s header list.
+ // TODO
+ // 3. Otherwise, if headerNames is not null or failure, then set
+ // response’s CORS-exposed header-name list to headerNames.
+ // TODO
+ }
+
+ // Set response to the following filtered response with response as its
+ // internal response, depending on request’s response tainting:
+ if (request.responseTainting === 'basic') {
+ response = filterResponse(response, 'basic')
+ } else if (request.responseTainting === 'cors') {
+ response = filterResponse(response, 'cors')
+ } else if (request.responseTainting === 'opaque') {
+ response = filterResponse(response, 'opaque')
+ } else {
+ assert(false)
+ }
+ }
+
+ // 14. Let internalResponse be response, if response is a network error,
+ // and response’s internal response otherwise.
+ let internalResponse =
+ response.status === 0 ? response : response.internalResponse
+
+ // 15. If internalResponse’s URL list is empty, then set it to a clone of
+ // request’s URL list.
+ if (internalResponse.urlList.length === 0) {
+ internalResponse.urlList.push(...request.urlList)
+ }
+
+ // 16. If request’s timing allow failed flag is unset, then set
+ // internalResponse’s timing allow passed flag.
+ if (!request.timingAllowFailed) {
+ response.timingAllowPassed = true
+ }
+
+ // 17. If response is not a network error and any of the following returns
+ // blocked
+ // - should internalResponse to request be blocked as mixed content
+ // - should internalResponse to request be blocked by Content Security Policy
+ // - should internalResponse to request be blocked due to its MIME type
+ // - should internalResponse to request be blocked due to nosniff
+ // TODO
+
+ // 18. If response’s type is "opaque", internalResponse’s status is 206,
+ // internalResponse’s range-requested flag is set, and request’s header
+ // list does not contain `Range`, then set response and internalResponse
+ // to a network error.
+ if (
+ response.type === 'opaque' &&
+ internalResponse.status === 206 &&
+ internalResponse.rangeRequested &&
+ !request.headers.contains('range')
+ ) {
+ response = internalResponse = makeNetworkError()
+ }
+
+ // 19. If response is not a network error and either request’s method is
+ // `HEAD` or `CONNECT`, or internalResponse’s status is a null body status,
+ // set internalResponse’s body to null and disregard any enqueuing toward
+ // it (if any).
+ if (
+ response.status !== 0 &&
+ (request.method === 'HEAD' ||
+ request.method === 'CONNECT' ||
+ nullBodyStatus.includes(internalResponse.status))
+ ) {
+ internalResponse.body = null
+ fetchParams.controller.dump = true
+ }
+
+ // 20. If request’s integrity metadata is not the empty string, then:
+ if (request.integrity) {
+ // 1. Let processBodyError be this step: run fetch finale given fetchParams
+ // and a network error.
+ const processBodyError = (reason) =>
+ fetchFinale(fetchParams, makeNetworkError(reason))
+
+ // 2. If request’s response tainting is "opaque", or response’s body is null,
+ // then run processBodyError and abort these steps.
+ if (request.responseTainting === 'opaque' || response.body == null) {
+ processBodyError(response.error)
+ return
+ }
+
+ // 3. Let processBody given bytes be these steps:
+ const processBody = (bytes) => {
+ // 1. If bytes do not match request’s integrity metadata,
+ // then run processBodyError and abort these steps. [SRI]
+ if (!bytesMatch(bytes, request.integrity)) {
+ processBodyError('integrity mismatch')
+ return
+ }
+
+ // 2. Set response’s body to bytes as a body.
+ response.body = safelyExtractBody(bytes)[0]
+
+ // 3. Run fetch finale given fetchParams and response.
+ fetchFinale(fetchParams, response)
+ }
+
+ // 4. Fully read response’s body given processBody and processBodyError.
+ await fullyReadBody(response.body, processBody, processBodyError)
+ } else {
+ // 21. Otherwise, run fetch finale given fetchParams and response.
+ fetchFinale(fetchParams, response)
+ }
+}
+
+// https://fetch.spec.whatwg.org/#concept-scheme-fetch
+// given a fetch params fetchParams
+function schemeFetch (fetchParams) {
+ // Note: since the connection is destroyed on redirect, which sets fetchParams to a
+ // cancelled state, we do not want this condition to trigger *unless* there have been
+ // no redirects. See https://github.com/nodejs/undici/issues/1776
+ // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams.
+ if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) {
+ return Promise.resolve(makeAppropriateNetworkError(fetchParams))
+ }
+
+ // 2. Let request be fetchParams’s request.
+ const { request } = fetchParams
+
+ const { protocol: scheme } = requestCurrentURL(request)
+
+ // 3. Switch on request’s current URL’s scheme and run the associated steps:
+ switch (scheme) {
+ case 'about:': {
+ // If request’s current URL’s path is the string "blank", then return a new response
+ // whose status message is `OK`, header list is « (`Content-Type`, `text/html;charset=utf-8`) »,
+ // and body is the empty byte sequence as a body.
+
+ // Otherwise, return a network error.
+ return Promise.resolve(makeNetworkError('about scheme is not supported'))
+ }
+ case 'blob:': {
+ if (!resolveObjectURL) {
+ resolveObjectURL = (__nccwpck_require__(4300).resolveObjectURL)
+ }
+
+ // 1. Let blobURLEntry be request’s current URL’s blob URL entry.
+ const blobURLEntry = requestCurrentURL(request)
+
+ // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56
+ // Buffer.resolveObjectURL does not ignore URL queries.
+ if (blobURLEntry.search.length !== 0) {
+ return Promise.resolve(makeNetworkError('NetworkError when attempting to fetch resource.'))
+ }
+
+ const blobURLEntryObject = resolveObjectURL(blobURLEntry.toString())
+
+ // 2. If request’s method is not `GET`, blobURLEntry is null, or blobURLEntry’s
+ // object is not a Blob object, then return a network error.
+ if (request.method !== 'GET' || !isBlobLike(blobURLEntryObject)) {
+ return Promise.resolve(makeNetworkError('invalid method'))
+ }
+
+ // 3. Let bodyWithType be the result of safely extracting blobURLEntry’s object.
+ const bodyWithType = safelyExtractBody(blobURLEntryObject)
+
+ // 4. Let body be bodyWithType’s body.
+ const body = bodyWithType[0]
+
+ // 5. Let length be body’s length, serialized and isomorphic encoded.
+ const length = isomorphicEncode(`${body.length}`)
+
+ // 6. Let type be bodyWithType’s type if it is non-null; otherwise the empty byte sequence.
+ const type = bodyWithType[1] ?? ''
+
+ // 7. Return a new response whose status message is `OK`, header list is
+ // « (`Content-Length`, length), (`Content-Type`, type) », and body is body.
+ const response = makeResponse({
+ statusText: 'OK',
+ headersList: [
+ ['content-length', { name: 'Content-Length', value: length }],
+ ['content-type', { name: 'Content-Type', value: type }]
+ ]
+ })
+
+ response.body = body
+
+ return Promise.resolve(response)
+ }
+ case 'data:': {
+ // 1. Let dataURLStruct be the result of running the
+ // data: URL processor on request’s current URL.
+ const currentURL = requestCurrentURL(request)
+ const dataURLStruct = dataURLProcessor(currentURL)
+
+ // 2. If dataURLStruct is failure, then return a
+ // network error.
+ if (dataURLStruct === 'failure') {
+ return Promise.resolve(makeNetworkError('failed to fetch the data URL'))
+ }
+
+ // 3. Let mimeType be dataURLStruct’s MIME type, serialized.
+ const mimeType = serializeAMimeType(dataURLStruct.mimeType)
+
+ // 4. Return a response whose status message is `OK`,
+ // header list is « (`Content-Type`, mimeType) »,
+ // and body is dataURLStruct’s body as a body.
+ return Promise.resolve(makeResponse({
+ statusText: 'OK',
+ headersList: [
+ ['content-type', { name: 'Content-Type', value: mimeType }]
+ ],
+ body: safelyExtractBody(dataURLStruct.body)[0]
+ }))
+ }
+ case 'file:': {
+ // For now, unfortunate as it is, file URLs are left as an exercise for the reader.
+ // When in doubt, return a network error.
+ return Promise.resolve(makeNetworkError('not implemented... yet...'))
+ }
+ case 'http:':
+ case 'https:': {
+ // Return the result of running HTTP fetch given fetchParams.
+
+ return httpFetch(fetchParams)
+ .catch((err) => makeNetworkError(err))
+ }
+ default: {
+ return Promise.resolve(makeNetworkError('unknown scheme'))
+ }
+ }
+}
+
+// https://fetch.spec.whatwg.org/#finalize-response
+function finalizeResponse (fetchParams, response) {
+ // 1. Set fetchParams’s request’s done flag.
+ fetchParams.request.done = true
+
+ // 2, If fetchParams’s process response done is not null, then queue a fetch
+ // task to run fetchParams’s process response done given response, with
+ // fetchParams’s task destination.
+ if (fetchParams.processResponseDone != null) {
+ queueMicrotask(() => fetchParams.processResponseDone(response))
+ }
+}
+
+// https://fetch.spec.whatwg.org/#fetch-finale
+function fetchFinale (fetchParams, response) {
+ // 1. If response is a network error, then:
+ if (response.type === 'error') {
+ // 1. Set response’s URL list to « fetchParams’s request’s URL list[0] ».
+ response.urlList = [fetchParams.request.urlList[0]]
+
+ // 2. Set response’s timing info to the result of creating an opaque timing
+ // info for fetchParams’s timing info.
+ response.timingInfo = createOpaqueTimingInfo({
+ startTime: fetchParams.timingInfo.startTime
+ })
+ }
+
+ // 2. Let processResponseEndOfBody be the following steps:
+ const processResponseEndOfBody = () => {
+ // 1. Set fetchParams’s request’s done flag.
+ fetchParams.request.done = true
+
+ // If fetchParams’s process response end-of-body is not null,
+ // then queue a fetch task to run fetchParams’s process response
+ // end-of-body given response with fetchParams’s task destination.
+ if (fetchParams.processResponseEndOfBody != null) {
+ queueMicrotask(() => fetchParams.processResponseEndOfBody(response))
+ }
+ }
+
+ // 3. If fetchParams’s process response is non-null, then queue a fetch task
+ // to run fetchParams’s process response given response, with fetchParams’s
+ // task destination.
+ if (fetchParams.processResponse != null) {
+ queueMicrotask(() => fetchParams.processResponse(response))
+ }
+
+ // 4. If response’s body is null, then run processResponseEndOfBody.
+ if (response.body == null) {
+ processResponseEndOfBody()
+ } else {
+ // 5. Otherwise:
+
+ // 1. Let transformStream be a new a TransformStream.
+
+ // 2. Let identityTransformAlgorithm be an algorithm which, given chunk,
+ // enqueues chunk in transformStream.
+ const identityTransformAlgorithm = (chunk, controller) => {
+ controller.enqueue(chunk)
+ }
+
+ // 3. Set up transformStream with transformAlgorithm set to identityTransformAlgorithm
+ // and flushAlgorithm set to processResponseEndOfBody.
+ const transformStream = new TransformStream({
+ start () {},
+ transform: identityTransformAlgorithm,
+ flush: processResponseEndOfBody
+ }, {
+ size () {
+ return 1
+ }
+ }, {
+ size () {
+ return 1
+ }
+ })
+
+ // 4. Set response’s body to the result of piping response’s body through transformStream.
+ response.body = { stream: response.body.stream.pipeThrough(transformStream) }
+ }
+
+ // 6. If fetchParams’s process response consume body is non-null, then:
+ if (fetchParams.processResponseConsumeBody != null) {
+ // 1. Let processBody given nullOrBytes be this step: run fetchParams’s
+ // process response consume body given response and nullOrBytes.
+ const processBody = (nullOrBytes) => fetchParams.processResponseConsumeBody(response, nullOrBytes)
+
+ // 2. Let processBodyError be this step: run fetchParams’s process
+ // response consume body given response and failure.
+ const processBodyError = (failure) => fetchParams.processResponseConsumeBody(response, failure)
+
+ // 3. If response’s body is null, then queue a fetch task to run processBody
+ // given null, with fetchParams’s task destination.
+ if (response.body == null) {
+ queueMicrotask(() => processBody(null))
+ } else {
+ // 4. Otherwise, fully read response’s body given processBody, processBodyError,
+ // and fetchParams’s task destination.
+ return fullyReadBody(response.body, processBody, processBodyError)
+ }
+ return Promise.resolve()
+ }
+}
+
+// https://fetch.spec.whatwg.org/#http-fetch
+async function httpFetch (fetchParams) {
+ // 1. Let request be fetchParams’s request.
+ const request = fetchParams.request
+
+ // 2. Let response be null.
+ let response = null
+
+ // 3. Let actualResponse be null.
+ let actualResponse = null
+
+ // 4. Let timingInfo be fetchParams’s timing info.
+ const timingInfo = fetchParams.timingInfo
+
+ // 5. If request’s service-workers mode is "all", then:
+ if (request.serviceWorkers === 'all') {
+ // TODO
+ }
+
+ // 6. If response is null, then:
+ if (response === null) {
+ // 1. If makeCORSPreflight is true and one of these conditions is true:
+ // TODO
+
+ // 2. If request’s redirect mode is "follow", then set request’s
+ // service-workers mode to "none".
+ if (request.redirect === 'follow') {
+ request.serviceWorkers = 'none'
+ }
+
+ // 3. Set response and actualResponse to the result of running
+ // HTTP-network-or-cache fetch given fetchParams.
+ actualResponse = response = await httpNetworkOrCacheFetch(fetchParams)
+
+ // 4. If request’s response tainting is "cors" and a CORS check
+ // for request and response returns failure, then return a network error.
+ if (
+ request.responseTainting === 'cors' &&
+ corsCheck(request, response) === 'failure'
+ ) {
+ return makeNetworkError('cors failure')
+ }
+
+ // 5. If the TAO check for request and response returns failure, then set
+ // request’s timing allow failed flag.
+ if (TAOCheck(request, response) === 'failure') {
+ request.timingAllowFailed = true
+ }
+ }
+
+ // 7. If either request’s response tainting or response’s type
+ // is "opaque", and the cross-origin resource policy check with
+ // request’s origin, request’s client, request’s destination,
+ // and actualResponse returns blocked, then return a network error.
+ if (
+ (request.responseTainting === 'opaque' || response.type === 'opaque') &&
+ crossOriginResourcePolicyCheck(
+ request.origin,
+ request.client,
+ request.destination,
+ actualResponse
+ ) === 'blocked'
+ ) {
+ return makeNetworkError('blocked')
+ }
+
+ // 8. If actualResponse’s status is a redirect status, then:
+ if (redirectStatusSet.has(actualResponse.status)) {
+ // 1. If actualResponse’s status is not 303, request’s body is not null,
+ // and the connection uses HTTP/2, then user agents may, and are even
+ // encouraged to, transmit an RST_STREAM frame.
+ // See, https://github.com/whatwg/fetch/issues/1288
+ if (request.redirect !== 'manual') {
+ fetchParams.controller.connection.destroy()
+ }
+
+ // 2. Switch on request’s redirect mode:
+ if (request.redirect === 'error') {
+ // Set response to a network error.
+ response = makeNetworkError('unexpected redirect')
+ } else if (request.redirect === 'manual') {
+ // Set response to an opaque-redirect filtered response whose internal
+ // response is actualResponse.
+ // NOTE(spec): On the web this would return an `opaqueredirect` response,
+ // but that doesn't make sense server side.
+ // See https://github.com/nodejs/undici/issues/1193.
+ response = actualResponse
+ } else if (request.redirect === 'follow') {
+ // Set response to the result of running HTTP-redirect fetch given
+ // fetchParams and response.
+ response = await httpRedirectFetch(fetchParams, response)
+ } else {
+ assert(false)
+ }
+ }
+
+ // 9. Set response’s timing info to timingInfo.
+ response.timingInfo = timingInfo
+
+ // 10. Return response.
+ return response
+}
+
+// https://fetch.spec.whatwg.org/#http-redirect-fetch
+function httpRedirectFetch (fetchParams, response) {
+ // 1. Let request be fetchParams’s request.
+ const request = fetchParams.request
+
+ // 2. Let actualResponse be response, if response is not a filtered response,
+ // and response’s internal response otherwise.
+ const actualResponse = response.internalResponse
+ ? response.internalResponse
+ : response
+
+ // 3. Let locationURL be actualResponse’s location URL given request’s current
+ // URL’s fragment.
+ let locationURL
+
+ try {
+ locationURL = responseLocationURL(
+ actualResponse,
+ requestCurrentURL(request).hash
+ )
+
+ // 4. If locationURL is null, then return response.
+ if (locationURL == null) {
+ return response
+ }
+ } catch (err) {
+ // 5. If locationURL is failure, then return a network error.
+ return Promise.resolve(makeNetworkError(err))
+ }
+
+ // 6. If locationURL’s scheme is not an HTTP(S) scheme, then return a network
+ // error.
+ if (!urlIsHttpHttpsScheme(locationURL)) {
+ return Promise.resolve(makeNetworkError('URL scheme must be a HTTP(S) scheme'))
+ }
+
+ // 7. If request’s redirect count is 20, then return a network error.
+ if (request.redirectCount === 20) {
+ return Promise.resolve(makeNetworkError('redirect count exceeded'))
+ }
+
+ // 8. Increase request’s redirect count by 1.
+ request.redirectCount += 1
+
+ // 9. If request’s mode is "cors", locationURL includes credentials, and
+ // request’s origin is not same origin with locationURL’s origin, then return
+ // a network error.
+ if (
+ request.mode === 'cors' &&
+ (locationURL.username || locationURL.password) &&
+ !sameOrigin(request, locationURL)
+ ) {
+ return Promise.resolve(makeNetworkError('cross origin not allowed for request mode "cors"'))
+ }
+
+ // 10. If request’s response tainting is "cors" and locationURL includes
+ // credentials, then return a network error.
+ if (
+ request.responseTainting === 'cors' &&
+ (locationURL.username || locationURL.password)
+ ) {
+ return Promise.resolve(makeNetworkError(
+ 'URL cannot contain credentials for request mode "cors"'
+ ))
+ }
+
+ // 11. If actualResponse’s status is not 303, request’s body is non-null,
+ // and request’s body’s source is null, then return a network error.
+ if (
+ actualResponse.status !== 303 &&
+ request.body != null &&
+ request.body.source == null
+ ) {
+ return Promise.resolve(makeNetworkError())
+ }
+
+ // 12. If one of the following is true
+ // - actualResponse’s status is 301 or 302 and request’s method is `POST`
+ // - actualResponse’s status is 303 and request’s method is not `GET` or `HEAD`
+ if (
+ ([301, 302].includes(actualResponse.status) && request.method === 'POST') ||
+ (actualResponse.status === 303 &&
+ !GET_OR_HEAD.includes(request.method))
+ ) {
+ // then:
+ // 1. Set request’s method to `GET` and request’s body to null.
+ request.method = 'GET'
+ request.body = null
+
+ // 2. For each headerName of request-body-header name, delete headerName from
+ // request’s header list.
+ for (const headerName of requestBodyHeader) {
+ request.headersList.delete(headerName)
+ }
+ }
+
+ // 13. If request’s current URL’s origin is not same origin with locationURL’s
+ // origin, then for each headerName of CORS non-wildcard request-header name,
+ // delete headerName from request’s header list.
+ if (!sameOrigin(requestCurrentURL(request), locationURL)) {
+ // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name
+ request.headersList.delete('authorization')
+
+ // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement.
+ request.headersList.delete('cookie')
+ request.headersList.delete('host')
+ }
+
+ // 14. If request’s body is non-null, then set request’s body to the first return
+ // value of safely extracting request’s body’s source.
+ if (request.body != null) {
+ assert(request.body.source != null)
+ request.body = safelyExtractBody(request.body.source)[0]
+ }
+
+ // 15. Let timingInfo be fetchParams’s timing info.
+ const timingInfo = fetchParams.timingInfo
+
+ // 16. Set timingInfo’s redirect end time and post-redirect start time to the
+ // coarsened shared current time given fetchParams’s cross-origin isolated
+ // capability.
+ timingInfo.redirectEndTime = timingInfo.postRedirectStartTime =
+ coarsenedSharedCurrentTime(fetchParams.crossOriginIsolatedCapability)
+
+ // 17. If timingInfo’s redirect start time is 0, then set timingInfo’s
+ // redirect start time to timingInfo’s start time.
+ if (timingInfo.redirectStartTime === 0) {
+ timingInfo.redirectStartTime = timingInfo.startTime
+ }
+
+ // 18. Append locationURL to request’s URL list.
+ request.urlList.push(locationURL)
+
+ // 19. Invoke set request’s referrer policy on redirect on request and
+ // actualResponse.
+ setRequestReferrerPolicyOnRedirect(request, actualResponse)
+
+ // 20. Return the result of running main fetch given fetchParams and true.
+ return mainFetch(fetchParams, true)
+}
+
+// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch
+async function httpNetworkOrCacheFetch (
+ fetchParams,
+ isAuthenticationFetch = false,
+ isNewConnectionFetch = false
+) {
+ // 1. Let request be fetchParams’s request.
+ const request = fetchParams.request
+
+ // 2. Let httpFetchParams be null.
+ let httpFetchParams = null
+
+ // 3. Let httpRequest be null.
+ let httpRequest = null
+
+ // 4. Let response be null.
+ let response = null
+
+ // 5. Let storedResponse be null.
+ // TODO: cache
+
+ // 6. Let httpCache be null.
+ const httpCache = null
+
+ // 7. Let the revalidatingFlag be unset.
+ const revalidatingFlag = false
+
+ // 8. Run these steps, but abort when the ongoing fetch is terminated:
+
+ // 1. If request’s window is "no-window" and request’s redirect mode is
+ // "error", then set httpFetchParams to fetchParams and httpRequest to
+ // request.
+ if (request.window === 'no-window' && request.redirect === 'error') {
+ httpFetchParams = fetchParams
+ httpRequest = request
+ } else {
+ // Otherwise:
+
+ // 1. Set httpRequest to a clone of request.
+ httpRequest = makeRequest(request)
+
+ // 2. Set httpFetchParams to a copy of fetchParams.
+ httpFetchParams = { ...fetchParams }
+
+ // 3. Set httpFetchParams’s request to httpRequest.
+ httpFetchParams.request = httpRequest
+ }
+
+ // 3. Let includeCredentials be true if one of
+ const includeCredentials =
+ request.credentials === 'include' ||
+ (request.credentials === 'same-origin' &&
+ request.responseTainting === 'basic')
+
+ // 4. Let contentLength be httpRequest’s body’s length, if httpRequest’s
+ // body is non-null; otherwise null.
+ const contentLength = httpRequest.body ? httpRequest.body.length : null
+
+ // 5. Let contentLengthHeaderValue be null.
+ let contentLengthHeaderValue = null
+
+ // 6. If httpRequest’s body is null and httpRequest’s method is `POST` or
+ // `PUT`, then set contentLengthHeaderValue to `0`.
+ if (
+ httpRequest.body == null &&
+ ['POST', 'PUT'].includes(httpRequest.method)
+ ) {
+ contentLengthHeaderValue = '0'
+ }
+
+ // 7. If contentLength is non-null, then set contentLengthHeaderValue to
+ // contentLength, serialized and isomorphic encoded.
+ if (contentLength != null) {
+ contentLengthHeaderValue = isomorphicEncode(`${contentLength}`)
+ }
+
+ // 8. If contentLengthHeaderValue is non-null, then append
+ // `Content-Length`/contentLengthHeaderValue to httpRequest’s header
+ // list.
+ if (contentLengthHeaderValue != null) {
+ httpRequest.headersList.append('content-length', contentLengthHeaderValue)
+ }
+
+ // 9. If contentLengthHeaderValue is non-null, then append (`Content-Length`,
+ // contentLengthHeaderValue) to httpRequest’s header list.
+
+ // 10. If contentLength is non-null and httpRequest’s keepalive is true,
+ // then:
+ if (contentLength != null && httpRequest.keepalive) {
+ // NOTE: keepalive is a noop outside of browser context.
+ }
+
+ // 11. If httpRequest’s referrer is a URL, then append
+ // `Referer`/httpRequest’s referrer, serialized and isomorphic encoded,
+ // to httpRequest’s header list.
+ if (httpRequest.referrer instanceof URL) {
+ httpRequest.headersList.append('referer', isomorphicEncode(httpRequest.referrer.href))
+ }
+
+ // 12. Append a request `Origin` header for httpRequest.
+ appendRequestOriginHeader(httpRequest)
+
+ // 13. Append the Fetch metadata headers for httpRequest. [FETCH-METADATA]
+ appendFetchMetadata(httpRequest)
+
+ // 14. If httpRequest’s header list does not contain `User-Agent`, then
+ // user agents should append `User-Agent`/default `User-Agent` value to
+ // httpRequest’s header list.
+ if (!httpRequest.headersList.contains('user-agent')) {
+ httpRequest.headersList.append('user-agent', typeof esbuildDetection === 'undefined' ? 'undici' : 'node')
+ }
+
+ // 15. If httpRequest’s cache mode is "default" and httpRequest’s header
+ // list contains `If-Modified-Since`, `If-None-Match`,
+ // `If-Unmodified-Since`, `If-Match`, or `If-Range`, then set
+ // httpRequest’s cache mode to "no-store".
+ if (
+ httpRequest.cache === 'default' &&
+ (httpRequest.headersList.contains('if-modified-since') ||
+ httpRequest.headersList.contains('if-none-match') ||
+ httpRequest.headersList.contains('if-unmodified-since') ||
+ httpRequest.headersList.contains('if-match') ||
+ httpRequest.headersList.contains('if-range'))
+ ) {
+ httpRequest.cache = 'no-store'
+ }
+
+ // 16. If httpRequest’s cache mode is "no-cache", httpRequest’s prevent
+ // no-cache cache-control header modification flag is unset, and
+ // httpRequest’s header list does not contain `Cache-Control`, then append
+ // `Cache-Control`/`max-age=0` to httpRequest’s header list.
+ if (
+ httpRequest.cache === 'no-cache' &&
+ !httpRequest.preventNoCacheCacheControlHeaderModification &&
+ !httpRequest.headersList.contains('cache-control')
+ ) {
+ httpRequest.headersList.append('cache-control', 'max-age=0')
+ }
+
+ // 17. If httpRequest’s cache mode is "no-store" or "reload", then:
+ if (httpRequest.cache === 'no-store' || httpRequest.cache === 'reload') {
+ // 1. If httpRequest’s header list does not contain `Pragma`, then append
+ // `Pragma`/`no-cache` to httpRequest’s header list.
+ if (!httpRequest.headersList.contains('pragma')) {
+ httpRequest.headersList.append('pragma', 'no-cache')
+ }
+
+ // 2. If httpRequest’s header list does not contain `Cache-Control`,
+ // then append `Cache-Control`/`no-cache` to httpRequest’s header list.
+ if (!httpRequest.headersList.contains('cache-control')) {
+ httpRequest.headersList.append('cache-control', 'no-cache')
+ }
+ }
+
+ // 18. If httpRequest’s header list contains `Range`, then append
+ // `Accept-Encoding`/`identity` to httpRequest’s header list.
+ if (httpRequest.headersList.contains('range')) {
+ httpRequest.headersList.append('accept-encoding', 'identity')
+ }
+
+ // 19. Modify httpRequest’s header list per HTTP. Do not append a given
+ // header if httpRequest’s header list contains that header’s name.
+ // TODO: https://github.com/whatwg/fetch/issues/1285#issuecomment-896560129
+ if (!httpRequest.headersList.contains('accept-encoding')) {
+ if (urlHasHttpsScheme(requestCurrentURL(httpRequest))) {
+ httpRequest.headersList.append('accept-encoding', 'br, gzip, deflate')
+ } else {
+ httpRequest.headersList.append('accept-encoding', 'gzip, deflate')
+ }
+ }
+
+ httpRequest.headersList.delete('host')
+
+ // 20. If includeCredentials is true, then:
+ if (includeCredentials) {
+ // 1. If the user agent is not configured to block cookies for httpRequest
+ // (see section 7 of [COOKIES]), then:
+ // TODO: credentials
+ // 2. If httpRequest’s header list does not contain `Authorization`, then:
+ // TODO: credentials
+ }
+
+ // 21. If there’s a proxy-authentication entry, use it as appropriate.
+ // TODO: proxy-authentication
+
+ // 22. Set httpCache to the result of determining the HTTP cache
+ // partition, given httpRequest.
+ // TODO: cache
+
+ // 23. If httpCache is null, then set httpRequest’s cache mode to
+ // "no-store".
+ if (httpCache == null) {
+ httpRequest.cache = 'no-store'
+ }
+
+ // 24. If httpRequest’s cache mode is neither "no-store" nor "reload",
+ // then:
+ if (httpRequest.mode !== 'no-store' && httpRequest.mode !== 'reload') {
+ // TODO: cache
+ }
+
+ // 9. If aborted, then return the appropriate network error for fetchParams.
+ // TODO
+
+ // 10. If response is null, then:
+ if (response == null) {
+ // 1. If httpRequest’s cache mode is "only-if-cached", then return a
+ // network error.
+ if (httpRequest.mode === 'only-if-cached') {
+ return makeNetworkError('only if cached')
+ }
+
+ // 2. Let forwardResponse be the result of running HTTP-network fetch
+ // given httpFetchParams, includeCredentials, and isNewConnectionFetch.
+ const forwardResponse = await httpNetworkFetch(
+ httpFetchParams,
+ includeCredentials,
+ isNewConnectionFetch
+ )
+
+ // 3. If httpRequest’s method is unsafe and forwardResponse’s status is
+ // in the range 200 to 399, inclusive, invalidate appropriate stored
+ // responses in httpCache, as per the "Invalidation" chapter of HTTP
+ // Caching, and set storedResponse to null. [HTTP-CACHING]
+ if (
+ !safeMethodsSet.has(httpRequest.method) &&
+ forwardResponse.status >= 200 &&
+ forwardResponse.status <= 399
+ ) {
+ // TODO: cache
+ }
+
+ // 4. If the revalidatingFlag is set and forwardResponse’s status is 304,
+ // then:
+ if (revalidatingFlag && forwardResponse.status === 304) {
+ // TODO: cache
+ }
+
+ // 5. If response is null, then:
+ if (response == null) {
+ // 1. Set response to forwardResponse.
+ response = forwardResponse
+
+ // 2. Store httpRequest and forwardResponse in httpCache, as per the
+ // "Storing Responses in Caches" chapter of HTTP Caching. [HTTP-CACHING]
+ // TODO: cache
+ }
+ }
+
+ // 11. Set response’s URL list to a clone of httpRequest’s URL list.
+ response.urlList = [...httpRequest.urlList]
+
+ // 12. If httpRequest’s header list contains `Range`, then set response’s
+ // range-requested flag.
+ if (httpRequest.headersList.contains('range')) {
+ response.rangeRequested = true
+ }
+
+ // 13. Set response’s request-includes-credentials to includeCredentials.
+ response.requestIncludesCredentials = includeCredentials
+
+ // 14. If response’s status is 401, httpRequest’s response tainting is not
+ // "cors", includeCredentials is true, and request’s window is an environment
+ // settings object, then:
+ // TODO
+
+ // 15. If response’s status is 407, then:
+ if (response.status === 407) {
+ // 1. If request’s window is "no-window", then return a network error.
+ if (request.window === 'no-window') {
+ return makeNetworkError()
+ }
+
+ // 2. ???
+
+ // 3. If fetchParams is canceled, then return the appropriate network error for fetchParams.
+ if (isCancelled(fetchParams)) {
+ return makeAppropriateNetworkError(fetchParams)
+ }
+
+ // 4. Prompt the end user as appropriate in request’s window and store
+ // the result as a proxy-authentication entry. [HTTP-AUTH]
+ // TODO: Invoke some kind of callback?
+
+ // 5. Set response to the result of running HTTP-network-or-cache fetch given
+ // fetchParams.
+ // TODO
+ return makeNetworkError('proxy authentication required')
+ }
+
+ // 16. If all of the following are true
+ if (
+ // response’s status is 421
+ response.status === 421 &&
+ // isNewConnectionFetch is false
+ !isNewConnectionFetch &&
+ // request’s body is null, or request’s body is non-null and request’s body’s source is non-null
+ (request.body == null || request.body.source != null)
+ ) {
+ // then:
+
+ // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams.
+ if (isCancelled(fetchParams)) {
+ return makeAppropriateNetworkError(fetchParams)
+ }
+
+ // 2. Set response to the result of running HTTP-network-or-cache
+ // fetch given fetchParams, isAuthenticationFetch, and true.
+
+ // TODO (spec): The spec doesn't specify this but we need to cancel
+ // the active response before we can start a new one.
+ // https://github.com/whatwg/fetch/issues/1293
+ fetchParams.controller.connection.destroy()
+
+ response = await httpNetworkOrCacheFetch(
+ fetchParams,
+ isAuthenticationFetch,
+ true
+ )
+ }
+
+ // 17. If isAuthenticationFetch is true, then create an authentication entry
+ if (isAuthenticationFetch) {
+ // TODO
+ }
+
+ // 18. Return response.
+ return response
+}
+
+// https://fetch.spec.whatwg.org/#http-network-fetch
+async function httpNetworkFetch (
+ fetchParams,
+ includeCredentials = false,
+ forceNewConnection = false
+) {
+ assert(!fetchParams.controller.connection || fetchParams.controller.connection.destroyed)
+
+ fetchParams.controller.connection = {
+ abort: null,
+ destroyed: false,
+ destroy (err) {
+ if (!this.destroyed) {
+ this.destroyed = true
+ this.abort?.(err ?? new DOMException('The operation was aborted.', 'AbortError'))
+ }
+ }
+ }
+
+ // 1. Let request be fetchParams’s request.
+ const request = fetchParams.request
+
+ // 2. Let response be null.
+ let response = null
+
+ // 3. Let timingInfo be fetchParams’s timing info.
+ const timingInfo = fetchParams.timingInfo
+
+ // 4. Let httpCache be the result of determining the HTTP cache partition,
+ // given request.
+ // TODO: cache
+ const httpCache = null
+
+ // 5. If httpCache is null, then set request’s cache mode to "no-store".
+ if (httpCache == null) {
+ request.cache = 'no-store'
+ }
+
+ // 6. Let networkPartitionKey be the result of determining the network
+ // partition key given request.
+ // TODO
+
+ // 7. Let newConnection be "yes" if forceNewConnection is true; otherwise
+ // "no".
+ const newConnection = forceNewConnection ? 'yes' : 'no' // eslint-disable-line no-unused-vars
+
+ // 8. Switch on request’s mode:
+ if (request.mode === 'websocket') {
+ // Let connection be the result of obtaining a WebSocket connection,
+ // given request’s current URL.
+ // TODO
+ } else {
+ // Let connection be the result of obtaining a connection, given
+ // networkPartitionKey, request’s current URL’s origin,
+ // includeCredentials, and forceNewConnection.
+ // TODO
+ }
+
+ // 9. Run these steps, but abort when the ongoing fetch is terminated:
+
+ // 1. If connection is failure, then return a network error.
+
+ // 2. Set timingInfo’s final connection timing info to the result of
+ // calling clamp and coarsen connection timing info with connection’s
+ // timing info, timingInfo’s post-redirect start time, and fetchParams’s
+ // cross-origin isolated capability.
+
+ // 3. If connection is not an HTTP/2 connection, request’s body is non-null,
+ // and request’s body’s source is null, then append (`Transfer-Encoding`,
+ // `chunked`) to request’s header list.
+
+ // 4. Set timingInfo’s final network-request start time to the coarsened
+ // shared current time given fetchParams’s cross-origin isolated
+ // capability.
+
+ // 5. Set response to the result of making an HTTP request over connection
+ // using request with the following caveats:
+
+ // - Follow the relevant requirements from HTTP. [HTTP] [HTTP-SEMANTICS]
+ // [HTTP-COND] [HTTP-CACHING] [HTTP-AUTH]
+
+ // - If request’s body is non-null, and request’s body’s source is null,
+ // then the user agent may have a buffer of up to 64 kibibytes and store
+ // a part of request’s body in that buffer. If the user agent reads from
+ // request’s body beyond that buffer’s size and the user agent needs to
+ // resend request, then instead return a network error.
+
+ // - Set timingInfo’s final network-response start time to the coarsened
+ // shared current time given fetchParams’s cross-origin isolated capability,
+ // immediately after the user agent’s HTTP parser receives the first byte
+ // of the response (e.g., frame header bytes for HTTP/2 or response status
+ // line for HTTP/1.x).
+
+ // - Wait until all the headers are transmitted.
+
+ // - Any responses whose status is in the range 100 to 199, inclusive,
+ // and is not 101, are to be ignored, except for the purposes of setting
+ // timingInfo’s final network-response start time above.
+
+ // - If request’s header list contains `Transfer-Encoding`/`chunked` and
+ // response is transferred via HTTP/1.0 or older, then return a network
+ // error.
+
+ // - If the HTTP request results in a TLS client certificate dialog, then:
+
+ // 1. If request’s window is an environment settings object, make the
+ // dialog available in request’s window.
+
+ // 2. Otherwise, return a network error.
+
+ // To transmit request’s body body, run these steps:
+ let requestBody = null
+ // 1. If body is null and fetchParams’s process request end-of-body is
+ // non-null, then queue a fetch task given fetchParams’s process request
+ // end-of-body and fetchParams’s task destination.
+ if (request.body == null && fetchParams.processRequestEndOfBody) {
+ queueMicrotask(() => fetchParams.processRequestEndOfBody())
+ } else if (request.body != null) {
+ // 2. Otherwise, if body is non-null:
+
+ // 1. Let processBodyChunk given bytes be these steps:
+ const processBodyChunk = async function * (bytes) {
+ // 1. If the ongoing fetch is terminated, then abort these steps.
+ if (isCancelled(fetchParams)) {
+ return
+ }
+
+ // 2. Run this step in parallel: transmit bytes.
+ yield bytes
+
+ // 3. If fetchParams’s process request body is non-null, then run
+ // fetchParams’s process request body given bytes’s length.
+ fetchParams.processRequestBodyChunkLength?.(bytes.byteLength)
+ }
+
+ // 2. Let processEndOfBody be these steps:
+ const processEndOfBody = () => {
+ // 1. If fetchParams is canceled, then abort these steps.
+ if (isCancelled(fetchParams)) {
+ return
+ }
+
+ // 2. If fetchParams’s process request end-of-body is non-null,
+ // then run fetchParams’s process request end-of-body.
+ if (fetchParams.processRequestEndOfBody) {
+ fetchParams.processRequestEndOfBody()
+ }
+ }
+
+ // 3. Let processBodyError given e be these steps:
+ const processBodyError = (e) => {
+ // 1. If fetchParams is canceled, then abort these steps.
+ if (isCancelled(fetchParams)) {
+ return
+ }
+
+ // 2. If e is an "AbortError" DOMException, then abort fetchParams’s controller.
+ if (e.name === 'AbortError') {
+ fetchParams.controller.abort()
+ } else {
+ fetchParams.controller.terminate(e)
+ }
+ }
+
+ // 4. Incrementally read request’s body given processBodyChunk, processEndOfBody,
+ // processBodyError, and fetchParams’s task destination.
+ requestBody = (async function * () {
+ try {
+ for await (const bytes of request.body.stream) {
+ yield * processBodyChunk(bytes)
+ }
+ processEndOfBody()
+ } catch (err) {
+ processBodyError(err)
+ }
+ })()
+ }
+
+ try {
+ // socket is only provided for websockets
+ const { body, status, statusText, headersList, socket } = await dispatch({ body: requestBody })
+
+ if (socket) {
+ response = makeResponse({ status, statusText, headersList, socket })
+ } else {
+ const iterator = body[Symbol.asyncIterator]()
+ fetchParams.controller.next = () => iterator.next()
+
+ response = makeResponse({ status, statusText, headersList })
+ }
+ } catch (err) {
+ // 10. If aborted, then:
+ if (err.name === 'AbortError') {
+ // 1. If connection uses HTTP/2, then transmit an RST_STREAM frame.
+ fetchParams.controller.connection.destroy()
+
+ // 2. Return the appropriate network error for fetchParams.
+ return makeAppropriateNetworkError(fetchParams, err)
+ }
+
+ return makeNetworkError(err)
+ }
+
+ // 11. Let pullAlgorithm be an action that resumes the ongoing fetch
+ // if it is suspended.
+ const pullAlgorithm = () => {
+ fetchParams.controller.resume()
+ }
+
+ // 12. Let cancelAlgorithm be an algorithm that aborts fetchParams’s
+ // controller with reason, given reason.
+ const cancelAlgorithm = (reason) => {
+ fetchParams.controller.abort(reason)
+ }
+
+ // 13. Let highWaterMark be a non-negative, non-NaN number, chosen by
+ // the user agent.
+ // TODO
+
+ // 14. Let sizeAlgorithm be an algorithm that accepts a chunk object
+ // and returns a non-negative, non-NaN, non-infinite number, chosen by the user agent.
+ // TODO
+
+ // 15. Let stream be a new ReadableStream.
+ // 16. Set up stream with pullAlgorithm set to pullAlgorithm,
+ // cancelAlgorithm set to cancelAlgorithm, highWaterMark set to
+ // highWaterMark, and sizeAlgorithm set to sizeAlgorithm.
+ if (!ReadableStream) {
+ ReadableStream = (__nccwpck_require__(5356).ReadableStream)
+ }
+
+ const stream = new ReadableStream(
+ {
+ async start (controller) {
+ fetchParams.controller.controller = controller
+ },
+ async pull (controller) {
+ await pullAlgorithm(controller)
+ },
+ async cancel (reason) {
+ await cancelAlgorithm(reason)
+ }
+ },
+ {
+ highWaterMark: 0,
+ size () {
+ return 1
+ }
+ }
+ )
+
+ // 17. Run these steps, but abort when the ongoing fetch is terminated:
+
+ // 1. Set response’s body to a new body whose stream is stream.
+ response.body = { stream }
+
+ // 2. If response is not a network error and request’s cache mode is
+ // not "no-store", then update response in httpCache for request.
+ // TODO
+
+ // 3. If includeCredentials is true and the user agent is not configured
+ // to block cookies for request (see section 7 of [COOKIES]), then run the
+ // "set-cookie-string" parsing algorithm (see section 5.2 of [COOKIES]) on
+ // the value of each header whose name is a byte-case-insensitive match for
+ // `Set-Cookie` in response’s header list, if any, and request’s current URL.
+ // TODO
+
+ // 18. If aborted, then:
+ // TODO
+
+ // 19. Run these steps in parallel:
+
+ // 1. Run these steps, but abort when fetchParams is canceled:
+ fetchParams.controller.on('terminated', onAborted)
+ fetchParams.controller.resume = async () => {
+ // 1. While true
+ while (true) {
+ // 1-3. See onData...
+
+ // 4. Set bytes to the result of handling content codings given
+ // codings and bytes.
+ let bytes
+ let isFailure
+ try {
+ const { done, value } = await fetchParams.controller.next()
+
+ if (isAborted(fetchParams)) {
+ break
+ }
+
+ bytes = done ? undefined : value
+ } catch (err) {
+ if (fetchParams.controller.ended && !timingInfo.encodedBodySize) {
+ // zlib doesn't like empty streams.
+ bytes = undefined
+ } else {
+ bytes = err
+
+ // err may be propagated from the result of calling readablestream.cancel,
+ // which might not be an error. https://github.com/nodejs/undici/issues/2009
+ isFailure = true
+ }
+ }
+
+ if (bytes === undefined) {
+ // 2. Otherwise, if the bytes transmission for response’s message
+ // body is done normally and stream is readable, then close
+ // stream, finalize response for fetchParams and response, and
+ // abort these in-parallel steps.
+ readableStreamClose(fetchParams.controller.controller)
+
+ finalizeResponse(fetchParams, response)
+
+ return
+ }
+
+ // 5. Increase timingInfo’s decoded body size by bytes’s length.
+ timingInfo.decodedBodySize += bytes?.byteLength ?? 0
+
+ // 6. If bytes is failure, then terminate fetchParams’s controller.
+ if (isFailure) {
+ fetchParams.controller.terminate(bytes)
+ return
+ }
+
+ // 7. Enqueue a Uint8Array wrapping an ArrayBuffer containing bytes
+ // into stream.
+ fetchParams.controller.controller.enqueue(new Uint8Array(bytes))
+
+ // 8. If stream is errored, then terminate the ongoing fetch.
+ if (isErrored(stream)) {
+ fetchParams.controller.terminate()
+ return
+ }
+
+ // 9. If stream doesn’t need more data ask the user agent to suspend
+ // the ongoing fetch.
+ if (!fetchParams.controller.controller.desiredSize) {
+ return
+ }
+ }
+ }
+
+ // 2. If aborted, then:
+ function onAborted (reason) {
+ // 2. If fetchParams is aborted, then:
+ if (isAborted(fetchParams)) {
+ // 1. Set response’s aborted flag.
+ response.aborted = true
+
+ // 2. If stream is readable, then error stream with the result of
+ // deserialize a serialized abort reason given fetchParams’s
+ // controller’s serialized abort reason and an
+ // implementation-defined realm.
+ if (isReadable(stream)) {
+ fetchParams.controller.controller.error(
+ fetchParams.controller.serializedAbortReason
+ )
+ }
+ } else {
+ // 3. Otherwise, if stream is readable, error stream with a TypeError.
+ if (isReadable(stream)) {
+ fetchParams.controller.controller.error(new TypeError('terminated', {
+ cause: isErrorLike(reason) ? reason : undefined
+ }))
+ }
+ }
+
+ // 4. If connection uses HTTP/2, then transmit an RST_STREAM frame.
+ // 5. Otherwise, the user agent should close connection unless it would be bad for performance to do so.
+ fetchParams.controller.connection.destroy()
+ }
+
+ // 20. Return response.
+ return response
+
+ async function dispatch ({ body }) {
+ const url = requestCurrentURL(request)
+ /** @type {import('../..').Agent} */
+ const agent = fetchParams.controller.dispatcher
+
+ return new Promise((resolve, reject) => agent.dispatch(
+ {
+ path: url.pathname + url.search,
+ origin: url.origin,
+ method: request.method,
+ body: fetchParams.controller.dispatcher.isMockActive ? request.body && (request.body.source || request.body.stream) : body,
+ headers: request.headersList.entries,
+ maxRedirections: 0,
+ upgrade: request.mode === 'websocket' ? 'websocket' : undefined
+ },
+ {
+ body: null,
+ abort: null,
+
+ onConnect (abort) {
+ // TODO (fix): Do we need connection here?
+ const { connection } = fetchParams.controller
+
+ if (connection.destroyed) {
+ abort(new DOMException('The operation was aborted.', 'AbortError'))
+ } else {
+ fetchParams.controller.on('terminated', abort)
+ this.abort = connection.abort = abort
+ }
+ },
+
+ onHeaders (status, headersList, resume, statusText) {
+ if (status < 200) {
+ return
+ }
+
+ let codings = []
+ let location = ''
+
+ const headers = new Headers()
+
+ // For H2, the headers are a plain JS object
+ // We distinguish between them and iterate accordingly
+ if (Array.isArray(headersList)) {
+ for (let n = 0; n < headersList.length; n += 2) {
+ const key = headersList[n + 0].toString('latin1')
+ const val = headersList[n + 1].toString('latin1')
+ if (key.toLowerCase() === 'content-encoding') {
+ // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
+ // "All content-coding values are case-insensitive..."
+ codings = val.toLowerCase().split(',').map((x) => x.trim())
+ } else if (key.toLowerCase() === 'location') {
+ location = val
+ }
+
+ headers[kHeadersList].append(key, val)
+ }
+ } else {
+ const keys = Object.keys(headersList)
+ for (const key of keys) {
+ const val = headersList[key]
+ if (key.toLowerCase() === 'content-encoding') {
+ // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
+ // "All content-coding values are case-insensitive..."
+ codings = val.toLowerCase().split(',').map((x) => x.trim()).reverse()
+ } else if (key.toLowerCase() === 'location') {
+ location = val
+ }
+
+ headers[kHeadersList].append(key, val)
+ }
+ }
+
+ this.body = new Readable({ read: resume })
+
+ const decoders = []
+
+ const willFollow = request.redirect === 'follow' &&
+ location &&
+ redirectStatusSet.has(status)
+
+ // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding
+ if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) {
+ for (const coding of codings) {
+ // https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2
+ if (coding === 'x-gzip' || coding === 'gzip') {
+ decoders.push(zlib.createGunzip({
+ // Be less strict when decoding compressed responses, since sometimes
+ // servers send slightly invalid responses that are still accepted
+ // by common browsers.
+ // Always using Z_SYNC_FLUSH is what cURL does.
+ flush: zlib.constants.Z_SYNC_FLUSH,
+ finishFlush: zlib.constants.Z_SYNC_FLUSH
+ }))
+ } else if (coding === 'deflate') {
+ decoders.push(zlib.createInflate())
+ } else if (coding === 'br') {
+ decoders.push(zlib.createBrotliDecompress())
+ } else {
+ decoders.length = 0
+ break
+ }
+ }
+ }
+
+ resolve({
+ status,
+ statusText,
+ headersList: headers[kHeadersList],
+ body: decoders.length
+ ? pipeline(this.body, ...decoders, () => { })
+ : this.body.on('error', () => {})
+ })
+
+ return true
+ },
+
+ onData (chunk) {
+ if (fetchParams.controller.dump) {
+ return
+ }
+
+ // 1. If one or more bytes have been transmitted from response’s
+ // message body, then:
+
+ // 1. Let bytes be the transmitted bytes.
+ const bytes = chunk
+
+ // 2. Let codings be the result of extracting header list values
+ // given `Content-Encoding` and response’s header list.
+ // See pullAlgorithm.
+
+ // 3. Increase timingInfo’s encoded body size by bytes’s length.
+ timingInfo.encodedBodySize += bytes.byteLength
+
+ // 4. See pullAlgorithm...
+
+ return this.body.push(bytes)
+ },
+
+ onComplete () {
+ if (this.abort) {
+ fetchParams.controller.off('terminated', this.abort)
+ }
+
+ fetchParams.controller.ended = true
+
+ this.body.push(null)
+ },
+
+ onError (error) {
+ if (this.abort) {
+ fetchParams.controller.off('terminated', this.abort)
+ }
+
+ this.body?.destroy(error)
+
+ fetchParams.controller.terminate(error)
+
+ reject(error)
+ },
+
+ onUpgrade (status, headersList, socket) {
+ if (status !== 101) {
+ return
+ }
+
+ const headers = new Headers()
+
+ for (let n = 0; n < headersList.length; n += 2) {
+ const key = headersList[n + 0].toString('latin1')
+ const val = headersList[n + 1].toString('latin1')
+
+ headers[kHeadersList].append(key, val)
+ }
+
+ resolve({
+ status,
+ statusText: STATUS_CODES[status],
+ headersList: headers[kHeadersList],
+ socket
+ })
+
+ return true
+ }
+ }
+ ))
+ }
+}
+
+module.exports = {
+ fetch,
+ Fetch,
+ fetching,
+ finalizeAndReportTiming
+}
+
+
+/***/ }),
+
+/***/ 8359:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+/* globals AbortController */
+
+
+
+const { extractBody, mixinBody, cloneBody } = __nccwpck_require__(1472)
+const { Headers, fill: fillHeaders, HeadersList } = __nccwpck_require__(554)
+const { FinalizationRegistry } = __nccwpck_require__(6436)()
+const util = __nccwpck_require__(3983)
+const {
+ isValidHTTPToken,
+ sameOrigin,
+ normalizeMethod,
+ makePolicyContainer,
+ normalizeMethodRecord
+} = __nccwpck_require__(2538)
+const {
+ forbiddenMethodsSet,
+ corsSafeListedMethodsSet,
+ referrerPolicy,
+ requestRedirect,
+ requestMode,
+ requestCredentials,
+ requestCache,
+ requestDuplex
+} = __nccwpck_require__(1037)
+const { kEnumerableProperty } = util
+const { kHeaders, kSignal, kState, kGuard, kRealm } = __nccwpck_require__(5861)
+const { webidl } = __nccwpck_require__(1744)
+const { getGlobalOrigin } = __nccwpck_require__(1246)
+const { URLSerializer } = __nccwpck_require__(685)
+const { kHeadersList, kConstruct } = __nccwpck_require__(2785)
+const assert = __nccwpck_require__(9491)
+const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = __nccwpck_require__(2361)
+
+let TransformStream = globalThis.TransformStream
+
+const kAbortController = Symbol('abortController')
+
+const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => {
+ signal.removeEventListener('abort', abort)
+})
+
+// https://fetch.spec.whatwg.org/#request-class
+class Request {
+ // https://fetch.spec.whatwg.org/#dom-request
+ constructor (input, init = {}) {
+ if (input === kConstruct) {
+ return
+ }
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Request constructor' })
+
+ input = webidl.converters.RequestInfo(input)
+ init = webidl.converters.RequestInit(init)
+
+ // https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object
+ this[kRealm] = {
+ settingsObject: {
+ baseUrl: getGlobalOrigin(),
+ get origin () {
+ return this.baseUrl?.origin
+ },
+ policyContainer: makePolicyContainer()
+ }
+ }
+
+ // 1. Let request be null.
+ let request = null
+
+ // 2. Let fallbackMode be null.
+ let fallbackMode = null
+
+ // 3. Let baseURL be this’s relevant settings object’s API base URL.
+ const baseUrl = this[kRealm].settingsObject.baseUrl
+
+ // 4. Let signal be null.
+ let signal = null
+
+ // 5. If input is a string, then:
+ if (typeof input === 'string') {
+ // 1. Let parsedURL be the result of parsing input with baseURL.
+ // 2. If parsedURL is failure, then throw a TypeError.
+ let parsedURL
+ try {
+ parsedURL = new URL(input, baseUrl)
+ } catch (err) {
+ throw new TypeError('Failed to parse URL from ' + input, { cause: err })
+ }
+
+ // 3. If parsedURL includes credentials, then throw a TypeError.
+ if (parsedURL.username || parsedURL.password) {
+ throw new TypeError(
+ 'Request cannot be constructed from a URL that includes credentials: ' +
+ input
+ )
+ }
+
+ // 4. Set request to a new request whose URL is parsedURL.
+ request = makeRequest({ urlList: [parsedURL] })
+
+ // 5. Set fallbackMode to "cors".
+ fallbackMode = 'cors'
+ } else {
+ // 6. Otherwise:
+
+ // 7. Assert: input is a Request object.
+ assert(input instanceof Request)
+
+ // 8. Set request to input’s request.
+ request = input[kState]
+
+ // 9. Set signal to input’s signal.
+ signal = input[kSignal]
+ }
+
+ // 7. Let origin be this’s relevant settings object’s origin.
+ const origin = this[kRealm].settingsObject.origin
+
+ // 8. Let window be "client".
+ let window = 'client'
+
+ // 9. If request’s window is an environment settings object and its origin
+ // is same origin with origin, then set window to request’s window.
+ if (
+ request.window?.constructor?.name === 'EnvironmentSettingsObject' &&
+ sameOrigin(request.window, origin)
+ ) {
+ window = request.window
+ }
+
+ // 10. If init["window"] exists and is non-null, then throw a TypeError.
+ if (init.window != null) {
+ throw new TypeError(`'window' option '${window}' must be null`)
+ }
+
+ // 11. If init["window"] exists, then set window to "no-window".
+ if ('window' in init) {
+ window = 'no-window'
+ }
+
+ // 12. Set request to a new request with the following properties:
+ request = makeRequest({
+ // URL request’s URL.
+ // undici implementation note: this is set as the first item in request's urlList in makeRequest
+ // method request’s method.
+ method: request.method,
+ // header list A copy of request’s header list.
+ // undici implementation note: headersList is cloned in makeRequest
+ headersList: request.headersList,
+ // unsafe-request flag Set.
+ unsafeRequest: request.unsafeRequest,
+ // client This’s relevant settings object.
+ client: this[kRealm].settingsObject,
+ // window window.
+ window,
+ // priority request’s priority.
+ priority: request.priority,
+ // origin request’s origin. The propagation of the origin is only significant for navigation requests
+ // being handled by a service worker. In this scenario a request can have an origin that is different
+ // from the current client.
+ origin: request.origin,
+ // referrer request’s referrer.
+ referrer: request.referrer,
+ // referrer policy request’s referrer policy.
+ referrerPolicy: request.referrerPolicy,
+ // mode request’s mode.
+ mode: request.mode,
+ // credentials mode request’s credentials mode.
+ credentials: request.credentials,
+ // cache mode request’s cache mode.
+ cache: request.cache,
+ // redirect mode request’s redirect mode.
+ redirect: request.redirect,
+ // integrity metadata request’s integrity metadata.
+ integrity: request.integrity,
+ // keepalive request’s keepalive.
+ keepalive: request.keepalive,
+ // reload-navigation flag request’s reload-navigation flag.
+ reloadNavigation: request.reloadNavigation,
+ // history-navigation flag request’s history-navigation flag.
+ historyNavigation: request.historyNavigation,
+ // URL list A clone of request’s URL list.
+ urlList: [...request.urlList]
+ })
+
+ const initHasKey = Object.keys(init).length !== 0
+
+ // 13. If init is not empty, then:
+ if (initHasKey) {
+ // 1. If request’s mode is "navigate", then set it to "same-origin".
+ if (request.mode === 'navigate') {
+ request.mode = 'same-origin'
+ }
+
+ // 2. Unset request’s reload-navigation flag.
+ request.reloadNavigation = false
+
+ // 3. Unset request’s history-navigation flag.
+ request.historyNavigation = false
+
+ // 4. Set request’s origin to "client".
+ request.origin = 'client'
+
+ // 5. Set request’s referrer to "client"
+ request.referrer = 'client'
+
+ // 6. Set request’s referrer policy to the empty string.
+ request.referrerPolicy = ''
+
+ // 7. Set request’s URL to request’s current URL.
+ request.url = request.urlList[request.urlList.length - 1]
+
+ // 8. Set request’s URL list to « request’s URL ».
+ request.urlList = [request.url]
+ }
+
+ // 14. If init["referrer"] exists, then:
+ if (init.referrer !== undefined) {
+ // 1. Let referrer be init["referrer"].
+ const referrer = init.referrer
+
+ // 2. If referrer is the empty string, then set request’s referrer to "no-referrer".
+ if (referrer === '') {
+ request.referrer = 'no-referrer'
+ } else {
+ // 1. Let parsedReferrer be the result of parsing referrer with
+ // baseURL.
+ // 2. If parsedReferrer is failure, then throw a TypeError.
+ let parsedReferrer
+ try {
+ parsedReferrer = new URL(referrer, baseUrl)
+ } catch (err) {
+ throw new TypeError(`Referrer "${referrer}" is not a valid URL.`, { cause: err })
+ }
+
+ // 3. If one of the following is true
+ // - parsedReferrer’s scheme is "about" and path is the string "client"
+ // - parsedReferrer’s origin is not same origin with origin
+ // then set request’s referrer to "client".
+ if (
+ (parsedReferrer.protocol === 'about:' && parsedReferrer.hostname === 'client') ||
+ (origin && !sameOrigin(parsedReferrer, this[kRealm].settingsObject.baseUrl))
+ ) {
+ request.referrer = 'client'
+ } else {
+ // 4. Otherwise, set request’s referrer to parsedReferrer.
+ request.referrer = parsedReferrer
+ }
+ }
+ }
+
+ // 15. If init["referrerPolicy"] exists, then set request’s referrer policy
+ // to it.
+ if (init.referrerPolicy !== undefined) {
+ request.referrerPolicy = init.referrerPolicy
+ }
+
+ // 16. Let mode be init["mode"] if it exists, and fallbackMode otherwise.
+ let mode
+ if (init.mode !== undefined) {
+ mode = init.mode
+ } else {
+ mode = fallbackMode
+ }
+
+ // 17. If mode is "navigate", then throw a TypeError.
+ if (mode === 'navigate') {
+ throw webidl.errors.exception({
+ header: 'Request constructor',
+ message: 'invalid request mode navigate.'
+ })
+ }
+
+ // 18. If mode is non-null, set request’s mode to mode.
+ if (mode != null) {
+ request.mode = mode
+ }
+
+ // 19. If init["credentials"] exists, then set request’s credentials mode
+ // to it.
+ if (init.credentials !== undefined) {
+ request.credentials = init.credentials
+ }
+
+ // 18. If init["cache"] exists, then set request’s cache mode to it.
+ if (init.cache !== undefined) {
+ request.cache = init.cache
+ }
+
+ // 21. If request’s cache mode is "only-if-cached" and request’s mode is
+ // not "same-origin", then throw a TypeError.
+ if (request.cache === 'only-if-cached' && request.mode !== 'same-origin') {
+ throw new TypeError(
+ "'only-if-cached' can be set only with 'same-origin' mode"
+ )
+ }
+
+ // 22. If init["redirect"] exists, then set request’s redirect mode to it.
+ if (init.redirect !== undefined) {
+ request.redirect = init.redirect
+ }
+
+ // 23. If init["integrity"] exists, then set request’s integrity metadata to it.
+ if (init.integrity != null) {
+ request.integrity = String(init.integrity)
+ }
+
+ // 24. If init["keepalive"] exists, then set request’s keepalive to it.
+ if (init.keepalive !== undefined) {
+ request.keepalive = Boolean(init.keepalive)
+ }
+
+ // 25. If init["method"] exists, then:
+ if (init.method !== undefined) {
+ // 1. Let method be init["method"].
+ let method = init.method
+
+ // 2. If method is not a method or method is a forbidden method, then
+ // throw a TypeError.
+ if (!isValidHTTPToken(method)) {
+ throw new TypeError(`'${method}' is not a valid HTTP method.`)
+ }
+
+ if (forbiddenMethodsSet.has(method.toUpperCase())) {
+ throw new TypeError(`'${method}' HTTP method is unsupported.`)
+ }
+
+ // 3. Normalize method.
+ method = normalizeMethodRecord[method] ?? normalizeMethod(method)
+
+ // 4. Set request’s method to method.
+ request.method = method
+ }
+
+ // 26. If init["signal"] exists, then set signal to it.
+ if (init.signal !== undefined) {
+ signal = init.signal
+ }
+
+ // 27. Set this’s request to request.
+ this[kState] = request
+
+ // 28. Set this’s signal to a new AbortSignal object with this’s relevant
+ // Realm.
+ // TODO: could this be simplified with AbortSignal.any
+ // (https://dom.spec.whatwg.org/#dom-abortsignal-any)
+ const ac = new AbortController()
+ this[kSignal] = ac.signal
+ this[kSignal][kRealm] = this[kRealm]
+
+ // 29. If signal is not null, then make this’s signal follow signal.
+ if (signal != null) {
+ if (
+ !signal ||
+ typeof signal.aborted !== 'boolean' ||
+ typeof signal.addEventListener !== 'function'
+ ) {
+ throw new TypeError(
+ "Failed to construct 'Request': member signal is not of type AbortSignal."
+ )
+ }
+
+ if (signal.aborted) {
+ ac.abort(signal.reason)
+ } else {
+ // Keep a strong ref to ac while request object
+ // is alive. This is needed to prevent AbortController
+ // from being prematurely garbage collected.
+ // See, https://github.com/nodejs/undici/issues/1926.
+ this[kAbortController] = ac
+
+ const acRef = new WeakRef(ac)
+ const abort = function () {
+ const ac = acRef.deref()
+ if (ac !== undefined) {
+ ac.abort(this.reason)
+ }
+ }
+
+ // Third-party AbortControllers may not work with these.
+ // See, https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619.
+ try {
+ // If the max amount of listeners is equal to the default, increase it
+ // This is only available in node >= v19.9.0
+ if (typeof getMaxListeners === 'function' && getMaxListeners(signal) === defaultMaxListeners) {
+ setMaxListeners(100, signal)
+ } else if (getEventListeners(signal, 'abort').length >= defaultMaxListeners) {
+ setMaxListeners(100, signal)
+ }
+ } catch {}
+
+ util.addAbortListener(signal, abort)
+ requestFinalizer.register(ac, { signal, abort })
+ }
+ }
+
+ // 30. Set this’s headers to a new Headers object with this’s relevant
+ // Realm, whose header list is request’s header list and guard is
+ // "request".
+ this[kHeaders] = new Headers(kConstruct)
+ this[kHeaders][kHeadersList] = request.headersList
+ this[kHeaders][kGuard] = 'request'
+ this[kHeaders][kRealm] = this[kRealm]
+
+ // 31. If this’s request’s mode is "no-cors", then:
+ if (mode === 'no-cors') {
+ // 1. If this’s request’s method is not a CORS-safelisted method,
+ // then throw a TypeError.
+ if (!corsSafeListedMethodsSet.has(request.method)) {
+ throw new TypeError(
+ `'${request.method} is unsupported in no-cors mode.`
+ )
+ }
+
+ // 2. Set this’s headers’s guard to "request-no-cors".
+ this[kHeaders][kGuard] = 'request-no-cors'
+ }
+
+ // 32. If init is not empty, then:
+ if (initHasKey) {
+ /** @type {HeadersList} */
+ const headersList = this[kHeaders][kHeadersList]
+ // 1. Let headers be a copy of this’s headers and its associated header
+ // list.
+ // 2. If init["headers"] exists, then set headers to init["headers"].
+ const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList)
+
+ // 3. Empty this’s headers’s header list.
+ headersList.clear()
+
+ // 4. If headers is a Headers object, then for each header in its header
+ // list, append header’s name/header’s value to this’s headers.
+ if (headers instanceof HeadersList) {
+ for (const [key, val] of headers) {
+ headersList.append(key, val)
+ }
+ // Note: Copy the `set-cookie` meta-data.
+ headersList.cookies = headers.cookies
+ } else {
+ // 5. Otherwise, fill this’s headers with headers.
+ fillHeaders(this[kHeaders], headers)
+ }
+ }
+
+ // 33. Let inputBody be input’s request’s body if input is a Request
+ // object; otherwise null.
+ const inputBody = input instanceof Request ? input[kState].body : null
+
+ // 34. If either init["body"] exists and is non-null or inputBody is
+ // non-null, and request’s method is `GET` or `HEAD`, then throw a
+ // TypeError.
+ if (
+ (init.body != null || inputBody != null) &&
+ (request.method === 'GET' || request.method === 'HEAD')
+ ) {
+ throw new TypeError('Request with GET/HEAD method cannot have body.')
+ }
+
+ // 35. Let initBody be null.
+ let initBody = null
+
+ // 36. If init["body"] exists and is non-null, then:
+ if (init.body != null) {
+ // 1. Let Content-Type be null.
+ // 2. Set initBody and Content-Type to the result of extracting
+ // init["body"], with keepalive set to request’s keepalive.
+ const [extractedBody, contentType] = extractBody(
+ init.body,
+ request.keepalive
+ )
+ initBody = extractedBody
+
+ // 3, If Content-Type is non-null and this’s headers’s header list does
+ // not contain `Content-Type`, then append `Content-Type`/Content-Type to
+ // this’s headers.
+ if (contentType && !this[kHeaders][kHeadersList].contains('content-type')) {
+ this[kHeaders].append('content-type', contentType)
+ }
+ }
+
+ // 37. Let inputOrInitBody be initBody if it is non-null; otherwise
+ // inputBody.
+ const inputOrInitBody = initBody ?? inputBody
+
+ // 38. If inputOrInitBody is non-null and inputOrInitBody’s source is
+ // null, then:
+ if (inputOrInitBody != null && inputOrInitBody.source == null) {
+ // 1. If initBody is non-null and init["duplex"] does not exist,
+ // then throw a TypeError.
+ if (initBody != null && init.duplex == null) {
+ throw new TypeError('RequestInit: duplex option is required when sending a body.')
+ }
+
+ // 2. If this’s request’s mode is neither "same-origin" nor "cors",
+ // then throw a TypeError.
+ if (request.mode !== 'same-origin' && request.mode !== 'cors') {
+ throw new TypeError(
+ 'If request is made from ReadableStream, mode should be "same-origin" or "cors"'
+ )
+ }
+
+ // 3. Set this’s request’s use-CORS-preflight flag.
+ request.useCORSPreflightFlag = true
+ }
+
+ // 39. Let finalBody be inputOrInitBody.
+ let finalBody = inputOrInitBody
+
+ // 40. If initBody is null and inputBody is non-null, then:
+ if (initBody == null && inputBody != null) {
+ // 1. If input is unusable, then throw a TypeError.
+ if (util.isDisturbed(inputBody.stream) || inputBody.stream.locked) {
+ throw new TypeError(
+ 'Cannot construct a Request with a Request object that has already been used.'
+ )
+ }
+
+ // 2. Set finalBody to the result of creating a proxy for inputBody.
+ if (!TransformStream) {
+ TransformStream = (__nccwpck_require__(5356).TransformStream)
+ }
+
+ // https://streams.spec.whatwg.org/#readablestream-create-a-proxy
+ const identityTransform = new TransformStream()
+ inputBody.stream.pipeThrough(identityTransform)
+ finalBody = {
+ source: inputBody.source,
+ length: inputBody.length,
+ stream: identityTransform.readable
+ }
+ }
+
+ // 41. Set this’s request’s body to finalBody.
+ this[kState].body = finalBody
+ }
+
+ // Returns request’s HTTP method, which is "GET" by default.
+ get method () {
+ webidl.brandCheck(this, Request)
+
+ // The method getter steps are to return this’s request’s method.
+ return this[kState].method
+ }
+
+ // Returns the URL of request as a string.
+ get url () {
+ webidl.brandCheck(this, Request)
+
+ // The url getter steps are to return this’s request’s URL, serialized.
+ return URLSerializer(this[kState].url)
+ }
+
+ // Returns a Headers object consisting of the headers associated with request.
+ // Note that headers added in the network layer by the user agent will not
+ // be accounted for in this object, e.g., the "Host" header.
+ get headers () {
+ webidl.brandCheck(this, Request)
+
+ // The headers getter steps are to return this’s headers.
+ return this[kHeaders]
+ }
+
+ // Returns the kind of resource requested by request, e.g., "document"
+ // or "script".
+ get destination () {
+ webidl.brandCheck(this, Request)
+
+ // The destination getter are to return this’s request’s destination.
+ return this[kState].destination
+ }
+
+ // Returns the referrer of request. Its value can be a same-origin URL if
+ // explicitly set in init, the empty string to indicate no referrer, and
+ // "about:client" when defaulting to the global’s default. This is used
+ // during fetching to determine the value of the `Referer` header of the
+ // request being made.
+ get referrer () {
+ webidl.brandCheck(this, Request)
+
+ // 1. If this’s request’s referrer is "no-referrer", then return the
+ // empty string.
+ if (this[kState].referrer === 'no-referrer') {
+ return ''
+ }
+
+ // 2. If this’s request’s referrer is "client", then return
+ // "about:client".
+ if (this[kState].referrer === 'client') {
+ return 'about:client'
+ }
+
+ // Return this’s request’s referrer, serialized.
+ return this[kState].referrer.toString()
+ }
+
+ // Returns the referrer policy associated with request.
+ // This is used during fetching to compute the value of the request’s
+ // referrer.
+ get referrerPolicy () {
+ webidl.brandCheck(this, Request)
+
+ // The referrerPolicy getter steps are to return this’s request’s referrer policy.
+ return this[kState].referrerPolicy
+ }
+
+ // Returns the mode associated with request, which is a string indicating
+ // whether the request will use CORS, or will be restricted to same-origin
+ // URLs.
+ get mode () {
+ webidl.brandCheck(this, Request)
+
+ // The mode getter steps are to return this’s request’s mode.
+ return this[kState].mode
+ }
+
+ // Returns the credentials mode associated with request,
+ // which is a string indicating whether credentials will be sent with the
+ // request always, never, or only when sent to a same-origin URL.
+ get credentials () {
+ // The credentials getter steps are to return this’s request’s credentials mode.
+ return this[kState].credentials
+ }
+
+ // Returns the cache mode associated with request,
+ // which is a string indicating how the request will
+ // interact with the browser’s cache when fetching.
+ get cache () {
+ webidl.brandCheck(this, Request)
+
+ // The cache getter steps are to return this’s request’s cache mode.
+ return this[kState].cache
+ }
+
+ // Returns the redirect mode associated with request,
+ // which is a string indicating how redirects for the
+ // request will be handled during fetching. A request
+ // will follow redirects by default.
+ get redirect () {
+ webidl.brandCheck(this, Request)
+
+ // The redirect getter steps are to return this’s request’s redirect mode.
+ return this[kState].redirect
+ }
+
+ // Returns request’s subresource integrity metadata, which is a
+ // cryptographic hash of the resource being fetched. Its value
+ // consists of multiple hashes separated by whitespace. [SRI]
+ get integrity () {
+ webidl.brandCheck(this, Request)
+
+ // The integrity getter steps are to return this’s request’s integrity
+ // metadata.
+ return this[kState].integrity
+ }
+
+ // Returns a boolean indicating whether or not request can outlive the
+ // global in which it was created.
+ get keepalive () {
+ webidl.brandCheck(this, Request)
+
+ // The keepalive getter steps are to return this’s request’s keepalive.
+ return this[kState].keepalive
+ }
+
+ // Returns a boolean indicating whether or not request is for a reload
+ // navigation.
+ get isReloadNavigation () {
+ webidl.brandCheck(this, Request)
+
+ // The isReloadNavigation getter steps are to return true if this’s
+ // request’s reload-navigation flag is set; otherwise false.
+ return this[kState].reloadNavigation
+ }
+
+ // Returns a boolean indicating whether or not request is for a history
+ // navigation (a.k.a. back-foward navigation).
+ get isHistoryNavigation () {
+ webidl.brandCheck(this, Request)
+
+ // The isHistoryNavigation getter steps are to return true if this’s request’s
+ // history-navigation flag is set; otherwise false.
+ return this[kState].historyNavigation
+ }
+
+ // Returns the signal associated with request, which is an AbortSignal
+ // object indicating whether or not request has been aborted, and its
+ // abort event handler.
+ get signal () {
+ webidl.brandCheck(this, Request)
+
+ // The signal getter steps are to return this’s signal.
+ return this[kSignal]
+ }
+
+ get body () {
+ webidl.brandCheck(this, Request)
+
+ return this[kState].body ? this[kState].body.stream : null
+ }
+
+ get bodyUsed () {
+ webidl.brandCheck(this, Request)
+
+ return !!this[kState].body && util.isDisturbed(this[kState].body.stream)
+ }
+
+ get duplex () {
+ webidl.brandCheck(this, Request)
+
+ return 'half'
+ }
+
+ // Returns a clone of request.
+ clone () {
+ webidl.brandCheck(this, Request)
+
+ // 1. If this is unusable, then throw a TypeError.
+ if (this.bodyUsed || this.body?.locked) {
+ throw new TypeError('unusable')
+ }
+
+ // 2. Let clonedRequest be the result of cloning this’s request.
+ const clonedRequest = cloneRequest(this[kState])
+
+ // 3. Let clonedRequestObject be the result of creating a Request object,
+ // given clonedRequest, this’s headers’s guard, and this’s relevant Realm.
+ const clonedRequestObject = new Request(kConstruct)
+ clonedRequestObject[kState] = clonedRequest
+ clonedRequestObject[kRealm] = this[kRealm]
+ clonedRequestObject[kHeaders] = new Headers(kConstruct)
+ clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList
+ clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard]
+ clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm]
+
+ // 4. Make clonedRequestObject’s signal follow this’s signal.
+ const ac = new AbortController()
+ if (this.signal.aborted) {
+ ac.abort(this.signal.reason)
+ } else {
+ util.addAbortListener(
+ this.signal,
+ () => {
+ ac.abort(this.signal.reason)
+ }
+ )
+ }
+ clonedRequestObject[kSignal] = ac.signal
+
+ // 4. Return clonedRequestObject.
+ return clonedRequestObject
+ }
+}
+
+mixinBody(Request)
+
+function makeRequest (init) {
+ // https://fetch.spec.whatwg.org/#requests
+ const request = {
+ method: 'GET',
+ localURLsOnly: false,
+ unsafeRequest: false,
+ body: null,
+ client: null,
+ reservedClient: null,
+ replacesClientId: '',
+ window: 'client',
+ keepalive: false,
+ serviceWorkers: 'all',
+ initiator: '',
+ destination: '',
+ priority: null,
+ origin: 'client',
+ policyContainer: 'client',
+ referrer: 'client',
+ referrerPolicy: '',
+ mode: 'no-cors',
+ useCORSPreflightFlag: false,
+ credentials: 'same-origin',
+ useCredentials: false,
+ cache: 'default',
+ redirect: 'follow',
+ integrity: '',
+ cryptoGraphicsNonceMetadata: '',
+ parserMetadata: '',
+ reloadNavigation: false,
+ historyNavigation: false,
+ userActivation: false,
+ taintedOrigin: false,
+ redirectCount: 0,
+ responseTainting: 'basic',
+ preventNoCacheCacheControlHeaderModification: false,
+ done: false,
+ timingAllowFailed: false,
+ ...init,
+ headersList: init.headersList
+ ? new HeadersList(init.headersList)
+ : new HeadersList()
+ }
+ request.url = request.urlList[0]
+ return request
+}
+
+// https://fetch.spec.whatwg.org/#concept-request-clone
+function cloneRequest (request) {
+ // To clone a request request, run these steps:
+
+ // 1. Let newRequest be a copy of request, except for its body.
+ const newRequest = makeRequest({ ...request, body: null })
+
+ // 2. If request’s body is non-null, set newRequest’s body to the
+ // result of cloning request’s body.
+ if (request.body != null) {
+ newRequest.body = cloneBody(request.body)
+ }
+
+ // 3. Return newRequest.
+ return newRequest
+}
+
+Object.defineProperties(Request.prototype, {
+ method: kEnumerableProperty,
+ url: kEnumerableProperty,
+ headers: kEnumerableProperty,
+ redirect: kEnumerableProperty,
+ clone: kEnumerableProperty,
+ signal: kEnumerableProperty,
+ duplex: kEnumerableProperty,
+ destination: kEnumerableProperty,
+ body: kEnumerableProperty,
+ bodyUsed: kEnumerableProperty,
+ isHistoryNavigation: kEnumerableProperty,
+ isReloadNavigation: kEnumerableProperty,
+ keepalive: kEnumerableProperty,
+ integrity: kEnumerableProperty,
+ cache: kEnumerableProperty,
+ credentials: kEnumerableProperty,
+ attribute: kEnumerableProperty,
+ referrerPolicy: kEnumerableProperty,
+ referrer: kEnumerableProperty,
+ mode: kEnumerableProperty,
+ [Symbol.toStringTag]: {
+ value: 'Request',
+ configurable: true
+ }
+})
+
+webidl.converters.Request = webidl.interfaceConverter(
+ Request
+)
+
+// https://fetch.spec.whatwg.org/#requestinfo
+webidl.converters.RequestInfo = function (V) {
+ if (typeof V === 'string') {
+ return webidl.converters.USVString(V)
+ }
+
+ if (V instanceof Request) {
+ return webidl.converters.Request(V)
+ }
+
+ return webidl.converters.USVString(V)
+}
+
+webidl.converters.AbortSignal = webidl.interfaceConverter(
+ AbortSignal
+)
+
+// https://fetch.spec.whatwg.org/#requestinit
+webidl.converters.RequestInit = webidl.dictionaryConverter([
+ {
+ key: 'method',
+ converter: webidl.converters.ByteString
+ },
+ {
+ key: 'headers',
+ converter: webidl.converters.HeadersInit
+ },
+ {
+ key: 'body',
+ converter: webidl.nullableConverter(
+ webidl.converters.BodyInit
+ )
+ },
+ {
+ key: 'referrer',
+ converter: webidl.converters.USVString
+ },
+ {
+ key: 'referrerPolicy',
+ converter: webidl.converters.DOMString,
+ // https://w3c.github.io/webappsec-referrer-policy/#referrer-policy
+ allowedValues: referrerPolicy
+ },
+ {
+ key: 'mode',
+ converter: webidl.converters.DOMString,
+ // https://fetch.spec.whatwg.org/#concept-request-mode
+ allowedValues: requestMode
+ },
+ {
+ key: 'credentials',
+ converter: webidl.converters.DOMString,
+ // https://fetch.spec.whatwg.org/#requestcredentials
+ allowedValues: requestCredentials
+ },
+ {
+ key: 'cache',
+ converter: webidl.converters.DOMString,
+ // https://fetch.spec.whatwg.org/#requestcache
+ allowedValues: requestCache
+ },
+ {
+ key: 'redirect',
+ converter: webidl.converters.DOMString,
+ // https://fetch.spec.whatwg.org/#requestredirect
+ allowedValues: requestRedirect
+ },
+ {
+ key: 'integrity',
+ converter: webidl.converters.DOMString
+ },
+ {
+ key: 'keepalive',
+ converter: webidl.converters.boolean
+ },
+ {
+ key: 'signal',
+ converter: webidl.nullableConverter(
+ (signal) => webidl.converters.AbortSignal(
+ signal,
+ { strict: false }
+ )
+ )
+ },
+ {
+ key: 'window',
+ converter: webidl.converters.any
+ },
+ {
+ key: 'duplex',
+ converter: webidl.converters.DOMString,
+ allowedValues: requestDuplex
+ }
+])
+
+module.exports = { Request, makeRequest }
+
+
+/***/ }),
+
+/***/ 7823:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { Headers, HeadersList, fill } = __nccwpck_require__(554)
+const { extractBody, cloneBody, mixinBody } = __nccwpck_require__(1472)
+const util = __nccwpck_require__(3983)
+const { kEnumerableProperty } = util
+const {
+ isValidReasonPhrase,
+ isCancelled,
+ isAborted,
+ isBlobLike,
+ serializeJavascriptValueToJSONString,
+ isErrorLike,
+ isomorphicEncode
+} = __nccwpck_require__(2538)
+const {
+ redirectStatusSet,
+ nullBodyStatus,
+ DOMException
+} = __nccwpck_require__(1037)
+const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(5861)
+const { webidl } = __nccwpck_require__(1744)
+const { FormData } = __nccwpck_require__(2015)
+const { getGlobalOrigin } = __nccwpck_require__(1246)
+const { URLSerializer } = __nccwpck_require__(685)
+const { kHeadersList, kConstruct } = __nccwpck_require__(2785)
+const assert = __nccwpck_require__(9491)
+const { types } = __nccwpck_require__(3837)
+
+const ReadableStream = globalThis.ReadableStream || (__nccwpck_require__(5356).ReadableStream)
+const textEncoder = new TextEncoder('utf-8')
+
+// https://fetch.spec.whatwg.org/#response-class
+class Response {
+ // Creates network error Response.
+ static error () {
+ // TODO
+ const relevantRealm = { settingsObject: {} }
+
+ // The static error() method steps are to return the result of creating a
+ // Response object, given a new network error, "immutable", and this’s
+ // relevant Realm.
+ const responseObject = new Response()
+ responseObject[kState] = makeNetworkError()
+ responseObject[kRealm] = relevantRealm
+ responseObject[kHeaders][kHeadersList] = responseObject[kState].headersList
+ responseObject[kHeaders][kGuard] = 'immutable'
+ responseObject[kHeaders][kRealm] = relevantRealm
+ return responseObject
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-response-json
+ static json (data, init = {}) {
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Response.json' })
+
+ if (init !== null) {
+ init = webidl.converters.ResponseInit(init)
+ }
+
+ // 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data.
+ const bytes = textEncoder.encode(
+ serializeJavascriptValueToJSONString(data)
+ )
+
+ // 2. Let body be the result of extracting bytes.
+ const body = extractBody(bytes)
+
+ // 3. Let responseObject be the result of creating a Response object, given a new response,
+ // "response", and this’s relevant Realm.
+ const relevantRealm = { settingsObject: {} }
+ const responseObject = new Response()
+ responseObject[kRealm] = relevantRealm
+ responseObject[kHeaders][kGuard] = 'response'
+ responseObject[kHeaders][kRealm] = relevantRealm
+
+ // 4. Perform initialize a response given responseObject, init, and (body, "application/json").
+ initializeResponse(responseObject, init, { body: body[0], type: 'application/json' })
+
+ // 5. Return responseObject.
+ return responseObject
+ }
+
+ // Creates a redirect Response that redirects to url with status status.
+ static redirect (url, status = 302) {
+ const relevantRealm = { settingsObject: {} }
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'Response.redirect' })
+
+ url = webidl.converters.USVString(url)
+ status = webidl.converters['unsigned short'](status)
+
+ // 1. Let parsedURL be the result of parsing url with current settings
+ // object’s API base URL.
+ // 2. If parsedURL is failure, then throw a TypeError.
+ // TODO: base-URL?
+ let parsedURL
+ try {
+ parsedURL = new URL(url, getGlobalOrigin())
+ } catch (err) {
+ throw Object.assign(new TypeError('Failed to parse URL from ' + url), {
+ cause: err
+ })
+ }
+
+ // 3. If status is not a redirect status, then throw a RangeError.
+ if (!redirectStatusSet.has(status)) {
+ throw new RangeError('Invalid status code ' + status)
+ }
+
+ // 4. Let responseObject be the result of creating a Response object,
+ // given a new response, "immutable", and this’s relevant Realm.
+ const responseObject = new Response()
+ responseObject[kRealm] = relevantRealm
+ responseObject[kHeaders][kGuard] = 'immutable'
+ responseObject[kHeaders][kRealm] = relevantRealm
+
+ // 5. Set responseObject’s response’s status to status.
+ responseObject[kState].status = status
+
+ // 6. Let value be parsedURL, serialized and isomorphic encoded.
+ const value = isomorphicEncode(URLSerializer(parsedURL))
+
+ // 7. Append `Location`/value to responseObject’s response’s header list.
+ responseObject[kState].headersList.append('location', value)
+
+ // 8. Return responseObject.
+ return responseObject
+ }
+
+ // https://fetch.spec.whatwg.org/#dom-response
+ constructor (body = null, init = {}) {
+ if (body !== null) {
+ body = webidl.converters.BodyInit(body)
+ }
+
+ init = webidl.converters.ResponseInit(init)
+
+ // TODO
+ this[kRealm] = { settingsObject: {} }
+
+ // 1. Set this’s response to a new response.
+ this[kState] = makeResponse({})
+
+ // 2. Set this’s headers to a new Headers object with this’s relevant
+ // Realm, whose header list is this’s response’s header list and guard
+ // is "response".
+ this[kHeaders] = new Headers(kConstruct)
+ this[kHeaders][kGuard] = 'response'
+ this[kHeaders][kHeadersList] = this[kState].headersList
+ this[kHeaders][kRealm] = this[kRealm]
+
+ // 3. Let bodyWithType be null.
+ let bodyWithType = null
+
+ // 4. If body is non-null, then set bodyWithType to the result of extracting body.
+ if (body != null) {
+ const [extractedBody, type] = extractBody(body)
+ bodyWithType = { body: extractedBody, type }
+ }
+
+ // 5. Perform initialize a response given this, init, and bodyWithType.
+ initializeResponse(this, init, bodyWithType)
+ }
+
+ // Returns response’s type, e.g., "cors".
+ get type () {
+ webidl.brandCheck(this, Response)
+
+ // The type getter steps are to return this’s response’s type.
+ return this[kState].type
+ }
+
+ // Returns response’s URL, if it has one; otherwise the empty string.
+ get url () {
+ webidl.brandCheck(this, Response)
+
+ const urlList = this[kState].urlList
+
+ // The url getter steps are to return the empty string if this’s
+ // response’s URL is null; otherwise this’s response’s URL,
+ // serialized with exclude fragment set to true.
+ const url = urlList[urlList.length - 1] ?? null
+
+ if (url === null) {
+ return ''
+ }
+
+ return URLSerializer(url, true)
+ }
+
+ // Returns whether response was obtained through a redirect.
+ get redirected () {
+ webidl.brandCheck(this, Response)
+
+ // The redirected getter steps are to return true if this’s response’s URL
+ // list has more than one item; otherwise false.
+ return this[kState].urlList.length > 1
+ }
+
+ // Returns response’s status.
+ get status () {
+ webidl.brandCheck(this, Response)
+
+ // The status getter steps are to return this’s response’s status.
+ return this[kState].status
+ }
+
+ // Returns whether response’s status is an ok status.
+ get ok () {
+ webidl.brandCheck(this, Response)
+
+ // The ok getter steps are to return true if this’s response’s status is an
+ // ok status; otherwise false.
+ return this[kState].status >= 200 && this[kState].status <= 299
+ }
+
+ // Returns response’s status message.
+ get statusText () {
+ webidl.brandCheck(this, Response)
+
+ // The statusText getter steps are to return this’s response’s status
+ // message.
+ return this[kState].statusText
+ }
+
+ // Returns response’s headers as Headers.
+ get headers () {
+ webidl.brandCheck(this, Response)
+
+ // The headers getter steps are to return this’s headers.
+ return this[kHeaders]
+ }
+
+ get body () {
+ webidl.brandCheck(this, Response)
+
+ return this[kState].body ? this[kState].body.stream : null
+ }
+
+ get bodyUsed () {
+ webidl.brandCheck(this, Response)
+
+ return !!this[kState].body && util.isDisturbed(this[kState].body.stream)
+ }
+
+ // Returns a clone of response.
+ clone () {
+ webidl.brandCheck(this, Response)
+
+ // 1. If this is unusable, then throw a TypeError.
+ if (this.bodyUsed || (this.body && this.body.locked)) {
+ throw webidl.errors.exception({
+ header: 'Response.clone',
+ message: 'Body has already been consumed.'
+ })
+ }
+
+ // 2. Let clonedResponse be the result of cloning this’s response.
+ const clonedResponse = cloneResponse(this[kState])
+
+ // 3. Return the result of creating a Response object, given
+ // clonedResponse, this’s headers’s guard, and this’s relevant Realm.
+ const clonedResponseObject = new Response()
+ clonedResponseObject[kState] = clonedResponse
+ clonedResponseObject[kRealm] = this[kRealm]
+ clonedResponseObject[kHeaders][kHeadersList] = clonedResponse.headersList
+ clonedResponseObject[kHeaders][kGuard] = this[kHeaders][kGuard]
+ clonedResponseObject[kHeaders][kRealm] = this[kHeaders][kRealm]
+
+ return clonedResponseObject
+ }
+}
+
+mixinBody(Response)
+
+Object.defineProperties(Response.prototype, {
+ type: kEnumerableProperty,
+ url: kEnumerableProperty,
+ status: kEnumerableProperty,
+ ok: kEnumerableProperty,
+ redirected: kEnumerableProperty,
+ statusText: kEnumerableProperty,
+ headers: kEnumerableProperty,
+ clone: kEnumerableProperty,
+ body: kEnumerableProperty,
+ bodyUsed: kEnumerableProperty,
+ [Symbol.toStringTag]: {
+ value: 'Response',
+ configurable: true
+ }
+})
+
+Object.defineProperties(Response, {
+ json: kEnumerableProperty,
+ redirect: kEnumerableProperty,
+ error: kEnumerableProperty
+})
+
+// https://fetch.spec.whatwg.org/#concept-response-clone
+function cloneResponse (response) {
+ // To clone a response response, run these steps:
+
+ // 1. If response is a filtered response, then return a new identical
+ // filtered response whose internal response is a clone of response’s
+ // internal response.
+ if (response.internalResponse) {
+ return filterResponse(
+ cloneResponse(response.internalResponse),
+ response.type
+ )
+ }
+
+ // 2. Let newResponse be a copy of response, except for its body.
+ const newResponse = makeResponse({ ...response, body: null })
+
+ // 3. If response’s body is non-null, then set newResponse’s body to the
+ // result of cloning response’s body.
+ if (response.body != null) {
+ newResponse.body = cloneBody(response.body)
+ }
+
+ // 4. Return newResponse.
+ return newResponse
+}
+
+function makeResponse (init) {
+ return {
+ aborted: false,
+ rangeRequested: false,
+ timingAllowPassed: false,
+ requestIncludesCredentials: false,
+ type: 'default',
+ status: 200,
+ timingInfo: null,
+ cacheState: '',
+ statusText: '',
+ ...init,
+ headersList: init.headersList
+ ? new HeadersList(init.headersList)
+ : new HeadersList(),
+ urlList: init.urlList ? [...init.urlList] : []
+ }
+}
+
+function makeNetworkError (reason) {
+ const isError = isErrorLike(reason)
+ return makeResponse({
+ type: 'error',
+ status: 0,
+ error: isError
+ ? reason
+ : new Error(reason ? String(reason) : reason),
+ aborted: reason && reason.name === 'AbortError'
+ })
+}
+
+function makeFilteredResponse (response, state) {
+ state = {
+ internalResponse: response,
+ ...state
+ }
+
+ return new Proxy(response, {
+ get (target, p) {
+ return p in state ? state[p] : target[p]
+ },
+ set (target, p, value) {
+ assert(!(p in state))
+ target[p] = value
+ return true
+ }
+ })
+}
+
+// https://fetch.spec.whatwg.org/#concept-filtered-response
+function filterResponse (response, type) {
+ // Set response to the following filtered response with response as its
+ // internal response, depending on request’s response tainting:
+ if (type === 'basic') {
+ // A basic filtered response is a filtered response whose type is "basic"
+ // and header list excludes any headers in internal response’s header list
+ // whose name is a forbidden response-header name.
+
+ // Note: undici does not implement forbidden response-header names
+ return makeFilteredResponse(response, {
+ type: 'basic',
+ headersList: response.headersList
+ })
+ } else if (type === 'cors') {
+ // A CORS filtered response is a filtered response whose type is "cors"
+ // and header list excludes any headers in internal response’s header
+ // list whose name is not a CORS-safelisted response-header name, given
+ // internal response’s CORS-exposed header-name list.
+
+ // Note: undici does not implement CORS-safelisted response-header names
+ return makeFilteredResponse(response, {
+ type: 'cors',
+ headersList: response.headersList
+ })
+ } else if (type === 'opaque') {
+ // An opaque filtered response is a filtered response whose type is
+ // "opaque", URL list is the empty list, status is 0, status message
+ // is the empty byte sequence, header list is empty, and body is null.
+
+ return makeFilteredResponse(response, {
+ type: 'opaque',
+ urlList: Object.freeze([]),
+ status: 0,
+ statusText: '',
+ body: null
+ })
+ } else if (type === 'opaqueredirect') {
+ // An opaque-redirect filtered response is a filtered response whose type
+ // is "opaqueredirect", status is 0, status message is the empty byte
+ // sequence, header list is empty, and body is null.
+
+ return makeFilteredResponse(response, {
+ type: 'opaqueredirect',
+ status: 0,
+ statusText: '',
+ headersList: [],
+ body: null
+ })
+ } else {
+ assert(false)
+ }
+}
+
+// https://fetch.spec.whatwg.org/#appropriate-network-error
+function makeAppropriateNetworkError (fetchParams, err = null) {
+ // 1. Assert: fetchParams is canceled.
+ assert(isCancelled(fetchParams))
+
+ // 2. Return an aborted network error if fetchParams is aborted;
+ // otherwise return a network error.
+ return isAborted(fetchParams)
+ ? makeNetworkError(Object.assign(new DOMException('The operation was aborted.', 'AbortError'), { cause: err }))
+ : makeNetworkError(Object.assign(new DOMException('Request was cancelled.'), { cause: err }))
+}
+
+// https://whatpr.org/fetch/1392.html#initialize-a-response
+function initializeResponse (response, init, body) {
+ // 1. If init["status"] is not in the range 200 to 599, inclusive, then
+ // throw a RangeError.
+ if (init.status !== null && (init.status < 200 || init.status > 599)) {
+ throw new RangeError('init["status"] must be in the range of 200 to 599, inclusive.')
+ }
+
+ // 2. If init["statusText"] does not match the reason-phrase token production,
+ // then throw a TypeError.
+ if ('statusText' in init && init.statusText != null) {
+ // See, https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2:
+ // reason-phrase = *( HTAB / SP / VCHAR / obs-text )
+ if (!isValidReasonPhrase(String(init.statusText))) {
+ throw new TypeError('Invalid statusText')
+ }
+ }
+
+ // 3. Set response’s response’s status to init["status"].
+ if ('status' in init && init.status != null) {
+ response[kState].status = init.status
+ }
+
+ // 4. Set response’s response’s status message to init["statusText"].
+ if ('statusText' in init && init.statusText != null) {
+ response[kState].statusText = init.statusText
+ }
+
+ // 5. If init["headers"] exists, then fill response’s headers with init["headers"].
+ if ('headers' in init && init.headers != null) {
+ fill(response[kHeaders], init.headers)
+ }
+
+ // 6. If body was given, then:
+ if (body) {
+ // 1. If response's status is a null body status, then throw a TypeError.
+ if (nullBodyStatus.includes(response.status)) {
+ throw webidl.errors.exception({
+ header: 'Response constructor',
+ message: 'Invalid response status code ' + response.status
+ })
+ }
+
+ // 2. Set response's body to body's body.
+ response[kState].body = body.body
+
+ // 3. If body's type is non-null and response's header list does not contain
+ // `Content-Type`, then append (`Content-Type`, body's type) to response's header list.
+ if (body.type != null && !response[kState].headersList.contains('Content-Type')) {
+ response[kState].headersList.append('content-type', body.type)
+ }
+ }
+}
+
+webidl.converters.ReadableStream = webidl.interfaceConverter(
+ ReadableStream
+)
+
+webidl.converters.FormData = webidl.interfaceConverter(
+ FormData
+)
+
+webidl.converters.URLSearchParams = webidl.interfaceConverter(
+ URLSearchParams
+)
+
+// https://fetch.spec.whatwg.org/#typedefdef-xmlhttprequestbodyinit
+webidl.converters.XMLHttpRequestBodyInit = function (V) {
+ if (typeof V === 'string') {
+ return webidl.converters.USVString(V)
+ }
+
+ if (isBlobLike(V)) {
+ return webidl.converters.Blob(V, { strict: false })
+ }
+
+ if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) {
+ return webidl.converters.BufferSource(V)
+ }
+
+ if (util.isFormDataLike(V)) {
+ return webidl.converters.FormData(V, { strict: false })
+ }
+
+ if (V instanceof URLSearchParams) {
+ return webidl.converters.URLSearchParams(V)
+ }
+
+ return webidl.converters.DOMString(V)
+}
+
+// https://fetch.spec.whatwg.org/#bodyinit
+webidl.converters.BodyInit = function (V) {
+ if (V instanceof ReadableStream) {
+ return webidl.converters.ReadableStream(V)
+ }
+
+ // Note: the spec doesn't include async iterables,
+ // this is an undici extension.
+ if (V?.[Symbol.asyncIterator]) {
+ return V
+ }
+
+ return webidl.converters.XMLHttpRequestBodyInit(V)
+}
+
+webidl.converters.ResponseInit = webidl.dictionaryConverter([
+ {
+ key: 'status',
+ converter: webidl.converters['unsigned short'],
+ defaultValue: 200
+ },
+ {
+ key: 'statusText',
+ converter: webidl.converters.ByteString,
+ defaultValue: ''
+ },
+ {
+ key: 'headers',
+ converter: webidl.converters.HeadersInit
+ }
+])
+
+module.exports = {
+ makeNetworkError,
+ makeResponse,
+ makeAppropriateNetworkError,
+ filterResponse,
+ Response,
+ cloneResponse
+}
+
+
+/***/ }),
+
+/***/ 5861:
+/***/ ((module) => {
+
+"use strict";
+
+
+module.exports = {
+ kUrl: Symbol('url'),
+ kHeaders: Symbol('headers'),
+ kSignal: Symbol('signal'),
+ kState: Symbol('state'),
+ kGuard: Symbol('guard'),
+ kRealm: Symbol('realm')
+}
+
+
+/***/ }),
+
+/***/ 2538:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = __nccwpck_require__(1037)
+const { getGlobalOrigin } = __nccwpck_require__(1246)
+const { performance } = __nccwpck_require__(4074)
+const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3983)
+const assert = __nccwpck_require__(9491)
+const { isUint8Array } = __nccwpck_require__(9830)
+
+// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
+/** @type {import('crypto')|undefined} */
+let crypto
+
+try {
+ crypto = __nccwpck_require__(6113)
+} catch {
+
+}
+
+function responseURL (response) {
+ // https://fetch.spec.whatwg.org/#responses
+ // A response has an associated URL. It is a pointer to the last URL
+ // in response’s URL list and null if response’s URL list is empty.
+ const urlList = response.urlList
+ const length = urlList.length
+ return length === 0 ? null : urlList[length - 1].toString()
+}
+
+// https://fetch.spec.whatwg.org/#concept-response-location-url
+function responseLocationURL (response, requestFragment) {
+ // 1. If response’s status is not a redirect status, then return null.
+ if (!redirectStatusSet.has(response.status)) {
+ return null
+ }
+
+ // 2. Let location be the result of extracting header list values given
+ // `Location` and response’s header list.
+ let location = response.headersList.get('location')
+
+ // 3. If location is a header value, then set location to the result of
+ // parsing location with response’s URL.
+ if (location !== null && isValidHeaderValue(location)) {
+ location = new URL(location, responseURL(response))
+ }
+
+ // 4. If location is a URL whose fragment is null, then set location’s
+ // fragment to requestFragment.
+ if (location && !location.hash) {
+ location.hash = requestFragment
+ }
+
+ // 5. Return location.
+ return location
+}
+
+/** @returns {URL} */
+function requestCurrentURL (request) {
+ return request.urlList[request.urlList.length - 1]
+}
+
+function requestBadPort (request) {
+ // 1. Let url be request’s current URL.
+ const url = requestCurrentURL(request)
+
+ // 2. If url’s scheme is an HTTP(S) scheme and url’s port is a bad port,
+ // then return blocked.
+ if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) {
+ return 'blocked'
+ }
+
+ // 3. Return allowed.
+ return 'allowed'
+}
+
+function isErrorLike (object) {
+ return object instanceof Error || (
+ object?.constructor?.name === 'Error' ||
+ object?.constructor?.name === 'DOMException'
+ )
+}
+
+// Check whether |statusText| is a ByteString and
+// matches the Reason-Phrase token production.
+// RFC 2616: https://tools.ietf.org/html/rfc2616
+// RFC 7230: https://tools.ietf.org/html/rfc7230
+// "reason-phrase = *( HTAB / SP / VCHAR / obs-text )"
+// https://github.com/chromium/chromium/blob/94.0.4604.1/third_party/blink/renderer/core/fetch/response.cc#L116
+function isValidReasonPhrase (statusText) {
+ for (let i = 0; i < statusText.length; ++i) {
+ const c = statusText.charCodeAt(i)
+ if (
+ !(
+ (
+ c === 0x09 || // HTAB
+ (c >= 0x20 && c <= 0x7e) || // SP / VCHAR
+ (c >= 0x80 && c <= 0xff)
+ ) // obs-text
+ )
+ ) {
+ return false
+ }
+ }
+ return true
+}
+
+/**
+ * @see https://tools.ietf.org/html/rfc7230#section-3.2.6
+ * @param {number} c
+ */
+function isTokenCharCode (c) {
+ switch (c) {
+ case 0x22:
+ case 0x28:
+ case 0x29:
+ case 0x2c:
+ case 0x2f:
+ case 0x3a:
+ case 0x3b:
+ case 0x3c:
+ case 0x3d:
+ case 0x3e:
+ case 0x3f:
+ case 0x40:
+ case 0x5b:
+ case 0x5c:
+ case 0x5d:
+ case 0x7b:
+ case 0x7d:
+ // DQUOTE and "(),/:;<=>?@[\]{}"
+ return false
+ default:
+ // VCHAR %x21-7E
+ return c >= 0x21 && c <= 0x7e
+ }
+}
+
+/**
+ * @param {string} characters
+ */
+function isValidHTTPToken (characters) {
+ if (characters.length === 0) {
+ return false
+ }
+ for (let i = 0; i < characters.length; ++i) {
+ if (!isTokenCharCode(characters.charCodeAt(i))) {
+ return false
+ }
+ }
+ return true
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#header-name
+ * @param {string} potentialValue
+ */
+function isValidHeaderName (potentialValue) {
+ return isValidHTTPToken(potentialValue)
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#header-value
+ * @param {string} potentialValue
+ */
+function isValidHeaderValue (potentialValue) {
+ // - Has no leading or trailing HTTP tab or space bytes.
+ // - Contains no 0x00 (NUL) or HTTP newline bytes.
+ if (
+ potentialValue.startsWith('\t') ||
+ potentialValue.startsWith(' ') ||
+ potentialValue.endsWith('\t') ||
+ potentialValue.endsWith(' ')
+ ) {
+ return false
+ }
+
+ if (
+ potentialValue.includes('\0') ||
+ potentialValue.includes('\r') ||
+ potentialValue.includes('\n')
+ ) {
+ return false
+ }
+
+ return true
+}
+
+// https://w3c.github.io/webappsec-referrer-policy/#set-requests-referrer-policy-on-redirect
+function setRequestReferrerPolicyOnRedirect (request, actualResponse) {
+ // Given a request request and a response actualResponse, this algorithm
+ // updates request’s referrer policy according to the Referrer-Policy
+ // header (if any) in actualResponse.
+
+ // 1. Let policy be the result of executing § 8.1 Parse a referrer policy
+ // from a Referrer-Policy header on actualResponse.
+
+ // 8.1 Parse a referrer policy from a Referrer-Policy header
+ // 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy` and response’s header list.
+ const { headersList } = actualResponse
+ // 2. Let policy be the empty string.
+ // 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty string, then set policy to token.
+ // 4. Return policy.
+ const policyHeader = (headersList.get('referrer-policy') ?? '').split(',')
+
+ // Note: As the referrer-policy can contain multiple policies
+ // separated by comma, we need to loop through all of them
+ // and pick the first valid one.
+ // Ref: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy#specify_a_fallback_policy
+ let policy = ''
+ if (policyHeader.length > 0) {
+ // The right-most policy takes precedence.
+ // The left-most policy is the fallback.
+ for (let i = policyHeader.length; i !== 0; i--) {
+ const token = policyHeader[i - 1].trim()
+ if (referrerPolicyTokens.has(token)) {
+ policy = token
+ break
+ }
+ }
+ }
+
+ // 2. If policy is not the empty string, then set request’s referrer policy to policy.
+ if (policy !== '') {
+ request.referrerPolicy = policy
+ }
+}
+
+// https://fetch.spec.whatwg.org/#cross-origin-resource-policy-check
+function crossOriginResourcePolicyCheck () {
+ // TODO
+ return 'allowed'
+}
+
+// https://fetch.spec.whatwg.org/#concept-cors-check
+function corsCheck () {
+ // TODO
+ return 'success'
+}
+
+// https://fetch.spec.whatwg.org/#concept-tao-check
+function TAOCheck () {
+ // TODO
+ return 'success'
+}
+
+function appendFetchMetadata (httpRequest) {
+ // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-dest-header
+ // TODO
+
+ // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-mode-header
+
+ // 1. Assert: r’s url is a potentially trustworthy URL.
+ // TODO
+
+ // 2. Let header be a Structured Header whose value is a token.
+ let header = null
+
+ // 3. Set header’s value to r’s mode.
+ header = httpRequest.mode
+
+ // 4. Set a structured field value `Sec-Fetch-Mode`/header in r’s header list.
+ httpRequest.headersList.set('sec-fetch-mode', header)
+
+ // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-site-header
+ // TODO
+
+ // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-user-header
+ // TODO
+}
+
+// https://fetch.spec.whatwg.org/#append-a-request-origin-header
+function appendRequestOriginHeader (request) {
+ // 1. Let serializedOrigin be the result of byte-serializing a request origin with request.
+ let serializedOrigin = request.origin
+
+ // 2. If request’s response tainting is "cors" or request’s mode is "websocket", then append (`Origin`, serializedOrigin) to request’s header list.
+ if (request.responseTainting === 'cors' || request.mode === 'websocket') {
+ if (serializedOrigin) {
+ request.headersList.append('origin', serializedOrigin)
+ }
+
+ // 3. Otherwise, if request’s method is neither `GET` nor `HEAD`, then:
+ } else if (request.method !== 'GET' && request.method !== 'HEAD') {
+ // 1. Switch on request’s referrer policy:
+ switch (request.referrerPolicy) {
+ case 'no-referrer':
+ // Set serializedOrigin to `null`.
+ serializedOrigin = null
+ break
+ case 'no-referrer-when-downgrade':
+ case 'strict-origin':
+ case 'strict-origin-when-cross-origin':
+ // If request’s origin is a tuple origin, its scheme is "https", and request’s current URL’s scheme is not "https", then set serializedOrigin to `null`.
+ if (request.origin && urlHasHttpsScheme(request.origin) && !urlHasHttpsScheme(requestCurrentURL(request))) {
+ serializedOrigin = null
+ }
+ break
+ case 'same-origin':
+ // If request’s origin is not same origin with request’s current URL’s origin, then set serializedOrigin to `null`.
+ if (!sameOrigin(request, requestCurrentURL(request))) {
+ serializedOrigin = null
+ }
+ break
+ default:
+ // Do nothing.
+ }
+
+ if (serializedOrigin) {
+ // 2. Append (`Origin`, serializedOrigin) to request’s header list.
+ request.headersList.append('origin', serializedOrigin)
+ }
+ }
+}
+
+function coarsenedSharedCurrentTime (crossOriginIsolatedCapability) {
+ // TODO
+ return performance.now()
+}
+
+// https://fetch.spec.whatwg.org/#create-an-opaque-timing-info
+function createOpaqueTimingInfo (timingInfo) {
+ return {
+ startTime: timingInfo.startTime ?? 0,
+ redirectStartTime: 0,
+ redirectEndTime: 0,
+ postRedirectStartTime: timingInfo.startTime ?? 0,
+ finalServiceWorkerStartTime: 0,
+ finalNetworkResponseStartTime: 0,
+ finalNetworkRequestStartTime: 0,
+ endTime: 0,
+ encodedBodySize: 0,
+ decodedBodySize: 0,
+ finalConnectionTimingInfo: null
+ }
+}
+
+// https://html.spec.whatwg.org/multipage/origin.html#policy-container
+function makePolicyContainer () {
+ // Note: the fetch spec doesn't make use of embedder policy or CSP list
+ return {
+ referrerPolicy: 'strict-origin-when-cross-origin'
+ }
+}
+
+// https://html.spec.whatwg.org/multipage/origin.html#clone-a-policy-container
+function clonePolicyContainer (policyContainer) {
+ return {
+ referrerPolicy: policyContainer.referrerPolicy
+ }
+}
+
+// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer
+function determineRequestsReferrer (request) {
+ // 1. Let policy be request's referrer policy.
+ const policy = request.referrerPolicy
+
+ // Note: policy cannot (shouldn't) be null or an empty string.
+ assert(policy)
+
+ // 2. Let environment be request’s client.
+
+ let referrerSource = null
+
+ // 3. Switch on request’s referrer:
+ if (request.referrer === 'client') {
+ // Note: node isn't a browser and doesn't implement document/iframes,
+ // so we bypass this step and replace it with our own.
+
+ const globalOrigin = getGlobalOrigin()
+
+ if (!globalOrigin || globalOrigin.origin === 'null') {
+ return 'no-referrer'
+ }
+
+ // note: we need to clone it as it's mutated
+ referrerSource = new URL(globalOrigin)
+ } else if (request.referrer instanceof URL) {
+ // Let referrerSource be request’s referrer.
+ referrerSource = request.referrer
+ }
+
+ // 4. Let request’s referrerURL be the result of stripping referrerSource for
+ // use as a referrer.
+ let referrerURL = stripURLForReferrer(referrerSource)
+
+ // 5. Let referrerOrigin be the result of stripping referrerSource for use as
+ // a referrer, with the origin-only flag set to true.
+ const referrerOrigin = stripURLForReferrer(referrerSource, true)
+
+ // 6. If the result of serializing referrerURL is a string whose length is
+ // greater than 4096, set referrerURL to referrerOrigin.
+ if (referrerURL.toString().length > 4096) {
+ referrerURL = referrerOrigin
+ }
+
+ const areSameOrigin = sameOrigin(request, referrerURL)
+ const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) &&
+ !isURLPotentiallyTrustworthy(request.url)
+
+ // 8. Execute the switch statements corresponding to the value of policy:
+ switch (policy) {
+ case 'origin': return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true)
+ case 'unsafe-url': return referrerURL
+ case 'same-origin':
+ return areSameOrigin ? referrerOrigin : 'no-referrer'
+ case 'origin-when-cross-origin':
+ return areSameOrigin ? referrerURL : referrerOrigin
+ case 'strict-origin-when-cross-origin': {
+ const currentURL = requestCurrentURL(request)
+
+ // 1. If the origin of referrerURL and the origin of request’s current
+ // URL are the same, then return referrerURL.
+ if (sameOrigin(referrerURL, currentURL)) {
+ return referrerURL
+ }
+
+ // 2. If referrerURL is a potentially trustworthy URL and request’s
+ // current URL is not a potentially trustworthy URL, then return no
+ // referrer.
+ if (isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(currentURL)) {
+ return 'no-referrer'
+ }
+
+ // 3. Return referrerOrigin.
+ return referrerOrigin
+ }
+ case 'strict-origin': // eslint-disable-line
+ /**
+ * 1. If referrerURL is a potentially trustworthy URL and
+ * request’s current URL is not a potentially trustworthy URL,
+ * then return no referrer.
+ * 2. Return referrerOrigin
+ */
+ case 'no-referrer-when-downgrade': // eslint-disable-line
+ /**
+ * 1. If referrerURL is a potentially trustworthy URL and
+ * request’s current URL is not a potentially trustworthy URL,
+ * then return no referrer.
+ * 2. Return referrerOrigin
+ */
+
+ default: // eslint-disable-line
+ return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin
+ }
+}
+
+/**
+ * @see https://w3c.github.io/webappsec-referrer-policy/#strip-url
+ * @param {URL} url
+ * @param {boolean|undefined} originOnly
+ */
+function stripURLForReferrer (url, originOnly) {
+ // 1. Assert: url is a URL.
+ assert(url instanceof URL)
+
+ // 2. If url’s scheme is a local scheme, then return no referrer.
+ if (url.protocol === 'file:' || url.protocol === 'about:' || url.protocol === 'blank:') {
+ return 'no-referrer'
+ }
+
+ // 3. Set url’s username to the empty string.
+ url.username = ''
+
+ // 4. Set url’s password to the empty string.
+ url.password = ''
+
+ // 5. Set url’s fragment to null.
+ url.hash = ''
+
+ // 6. If the origin-only flag is true, then:
+ if (originOnly) {
+ // 1. Set url’s path to « the empty string ».
+ url.pathname = ''
+
+ // 2. Set url’s query to null.
+ url.search = ''
+ }
+
+ // 7. Return url.
+ return url
+}
+
+function isURLPotentiallyTrustworthy (url) {
+ if (!(url instanceof URL)) {
+ return false
+ }
+
+ // If child of about, return true
+ if (url.href === 'about:blank' || url.href === 'about:srcdoc') {
+ return true
+ }
+
+ // If scheme is data, return true
+ if (url.protocol === 'data:') return true
+
+ // If file, return true
+ if (url.protocol === 'file:') return true
+
+ return isOriginPotentiallyTrustworthy(url.origin)
+
+ function isOriginPotentiallyTrustworthy (origin) {
+ // If origin is explicitly null, return false
+ if (origin == null || origin === 'null') return false
+
+ const originAsURL = new URL(origin)
+
+ // If secure, return true
+ if (originAsURL.protocol === 'https:' || originAsURL.protocol === 'wss:') {
+ return true
+ }
+
+ // If localhost or variants, return true
+ if (/^127(?:\.[0-9]+){0,2}\.[0-9]+$|^\[(?:0*:)*?:?0*1\]$/.test(originAsURL.hostname) ||
+ (originAsURL.hostname === 'localhost' || originAsURL.hostname.includes('localhost.')) ||
+ (originAsURL.hostname.endsWith('.localhost'))) {
+ return true
+ }
+
+ // If any other, return false
+ return false
+ }
+}
+
+/**
+ * @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist
+ * @param {Uint8Array} bytes
+ * @param {string} metadataList
+ */
+function bytesMatch (bytes, metadataList) {
+ // If node is not built with OpenSSL support, we cannot check
+ // a request's integrity, so allow it by default (the spec will
+ // allow requests if an invalid hash is given, as precedence).
+ /* istanbul ignore if: only if node is built with --without-ssl */
+ if (crypto === undefined) {
+ return true
+ }
+
+ // 1. Let parsedMetadata be the result of parsing metadataList.
+ const parsedMetadata = parseMetadata(metadataList)
+
+ // 2. If parsedMetadata is no metadata, return true.
+ if (parsedMetadata === 'no metadata') {
+ return true
+ }
+
+ // 3. If parsedMetadata is the empty set, return true.
+ if (parsedMetadata.length === 0) {
+ return true
+ }
+
+ // 4. Let metadata be the result of getting the strongest
+ // metadata from parsedMetadata.
+ const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo))
+ // get the strongest algorithm
+ const strongest = list[0].algo
+ // get all entries that use the strongest algorithm; ignore weaker
+ const metadata = list.filter((item) => item.algo === strongest)
+
+ // 5. For each item in metadata:
+ for (const item of metadata) {
+ // 1. Let algorithm be the alg component of item.
+ const algorithm = item.algo
+
+ // 2. Let expectedValue be the val component of item.
+ let expectedValue = item.hash
+
+ // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
+ // "be liberal with padding". This is annoying, and it's not even in the spec.
+
+ if (expectedValue.endsWith('==')) {
+ expectedValue = expectedValue.slice(0, -2)
+ }
+
+ // 3. Let actualValue be the result of applying algorithm to bytes.
+ let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
+
+ if (actualValue.endsWith('==')) {
+ actualValue = actualValue.slice(0, -2)
+ }
+
+ // 4. If actualValue is a case-sensitive match for expectedValue,
+ // return true.
+ if (actualValue === expectedValue) {
+ return true
+ }
+
+ let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url')
+
+ if (actualBase64URL.endsWith('==')) {
+ actualBase64URL = actualBase64URL.slice(0, -2)
+ }
+
+ if (actualBase64URL === expectedValue) {
+ return true
+ }
+ }
+
+ // 6. Return false.
+ return false
+}
+
+// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
+// https://www.w3.org/TR/CSP2/#source-list-syntax
+// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
+const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i
+
+/**
+ * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
+ * @param {string} metadata
+ */
+function parseMetadata (metadata) {
+ // 1. Let result be the empty set.
+ /** @type {{ algo: string, hash: string }[]} */
+ const result = []
+
+ // 2. Let empty be equal to true.
+ let empty = true
+
+ const supportedHashes = crypto.getHashes()
+
+ // 3. For each token returned by splitting metadata on spaces:
+ for (const token of metadata.split(' ')) {
+ // 1. Set empty to false.
+ empty = false
+
+ // 2. Parse token as a hash-with-options.
+ const parsedToken = parseHashWithOptions.exec(token)
+
+ // 3. If token does not parse, continue to the next token.
+ if (parsedToken === null || parsedToken.groups === undefined) {
+ // Note: Chromium blocks the request at this point, but Firefox
+ // gives a warning that an invalid integrity was given. The
+ // correct behavior is to ignore these, and subsequently not
+ // check the integrity of the resource.
+ continue
+ }
+
+ // 4. Let algorithm be the hash-algo component of token.
+ const algorithm = parsedToken.groups.algo
+
+ // 5. If algorithm is a hash function recognized by the user
+ // agent, add the parsed token to result.
+ if (supportedHashes.includes(algorithm.toLowerCase())) {
+ result.push(parsedToken.groups)
+ }
+ }
+
+ // 4. Return no metadata if empty is true, otherwise return result.
+ if (empty === true) {
+ return 'no metadata'
+ }
+
+ return result
+}
+
+// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
+function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
+ // TODO
+}
+
+/**
+ * @link {https://html.spec.whatwg.org/multipage/origin.html#same-origin}
+ * @param {URL} A
+ * @param {URL} B
+ */
+function sameOrigin (A, B) {
+ // 1. If A and B are the same opaque origin, then return true.
+ if (A.origin === B.origin && A.origin === 'null') {
+ return true
+ }
+
+ // 2. If A and B are both tuple origins and their schemes,
+ // hosts, and port are identical, then return true.
+ if (A.protocol === B.protocol && A.hostname === B.hostname && A.port === B.port) {
+ return true
+ }
+
+ // 3. Return false.
+ return false
+}
+
+function createDeferredPromise () {
+ let res
+ let rej
+ const promise = new Promise((resolve, reject) => {
+ res = resolve
+ rej = reject
+ })
+
+ return { promise, resolve: res, reject: rej }
+}
+
+function isAborted (fetchParams) {
+ return fetchParams.controller.state === 'aborted'
+}
+
+function isCancelled (fetchParams) {
+ return fetchParams.controller.state === 'aborted' ||
+ fetchParams.controller.state === 'terminated'
+}
+
+const normalizeMethodRecord = {
+ delete: 'DELETE',
+ DELETE: 'DELETE',
+ get: 'GET',
+ GET: 'GET',
+ head: 'HEAD',
+ HEAD: 'HEAD',
+ options: 'OPTIONS',
+ OPTIONS: 'OPTIONS',
+ post: 'POST',
+ POST: 'POST',
+ put: 'PUT',
+ PUT: 'PUT'
+}
+
+// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
+Object.setPrototypeOf(normalizeMethodRecord, null)
+
+/**
+ * @see https://fetch.spec.whatwg.org/#concept-method-normalize
+ * @param {string} method
+ */
+function normalizeMethod (method) {
+ return normalizeMethodRecord[method.toLowerCase()] ?? method
+}
+
+// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string
+function serializeJavascriptValueToJSONString (value) {
+ // 1. Let result be ? Call(%JSON.stringify%, undefined, « value »).
+ const result = JSON.stringify(value)
+
+ // 2. If result is undefined, then throw a TypeError.
+ if (result === undefined) {
+ throw new TypeError('Value is not JSON serializable')
+ }
+
+ // 3. Assert: result is a string.
+ assert(typeof result === 'string')
+
+ // 4. Return result.
+ return result
+}
+
+// https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object
+const esIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))
+
+/**
+ * @see https://webidl.spec.whatwg.org/#dfn-iterator-prototype-object
+ * @param {() => unknown[]} iterator
+ * @param {string} name name of the instance
+ * @param {'key'|'value'|'key+value'} kind
+ */
+function makeIterator (iterator, name, kind) {
+ const object = {
+ index: 0,
+ kind,
+ target: iterator
+ }
+
+ const i = {
+ next () {
+ // 1. Let interface be the interface for which the iterator prototype object exists.
+
+ // 2. Let thisValue be the this value.
+
+ // 3. Let object be ? ToObject(thisValue).
+
+ // 4. If object is a platform object, then perform a security
+ // check, passing:
+
+ // 5. If object is not a default iterator object for interface,
+ // then throw a TypeError.
+ if (Object.getPrototypeOf(this) !== i) {
+ throw new TypeError(
+ `'next' called on an object that does not implement interface ${name} Iterator.`
+ )
+ }
+
+ // 6. Let index be object’s index.
+ // 7. Let kind be object’s kind.
+ // 8. Let values be object’s target's value pairs to iterate over.
+ const { index, kind, target } = object
+ const values = target()
+
+ // 9. Let len be the length of values.
+ const len = values.length
+
+ // 10. If index is greater than or equal to len, then return
+ // CreateIterResultObject(undefined, true).
+ if (index >= len) {
+ return { value: undefined, done: true }
+ }
+
+ // 11. Let pair be the entry in values at index index.
+ const pair = values[index]
+
+ // 12. Set object’s index to index + 1.
+ object.index = index + 1
+
+ // 13. Return the iterator result for pair and kind.
+ return iteratorResult(pair, kind)
+ },
+ // The class string of an iterator prototype object for a given interface is the
+ // result of concatenating the identifier of the interface and the string " Iterator".
+ [Symbol.toStringTag]: `${name} Iterator`
+ }
+
+ // The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%.
+ Object.setPrototypeOf(i, esIteratorPrototype)
+ // esIteratorPrototype needs to be the prototype of i
+ // which is the prototype of an empty object. Yes, it's confusing.
+ return Object.setPrototypeOf({}, i)
+}
+
+// https://webidl.spec.whatwg.org/#iterator-result
+function iteratorResult (pair, kind) {
+ let result
+
+ // 1. Let result be a value determined by the value of kind:
+ switch (kind) {
+ case 'key': {
+ // 1. Let idlKey be pair’s key.
+ // 2. Let key be the result of converting idlKey to an
+ // ECMAScript value.
+ // 3. result is key.
+ result = pair[0]
+ break
+ }
+ case 'value': {
+ // 1. Let idlValue be pair’s value.
+ // 2. Let value be the result of converting idlValue to
+ // an ECMAScript value.
+ // 3. result is value.
+ result = pair[1]
+ break
+ }
+ case 'key+value': {
+ // 1. Let idlKey be pair’s key.
+ // 2. Let idlValue be pair’s value.
+ // 3. Let key be the result of converting idlKey to an
+ // ECMAScript value.
+ // 4. Let value be the result of converting idlValue to
+ // an ECMAScript value.
+ // 5. Let array be ! ArrayCreate(2).
+ // 6. Call ! CreateDataProperty(array, "0", key).
+ // 7. Call ! CreateDataProperty(array, "1", value).
+ // 8. result is array.
+ result = pair
+ break
+ }
+ }
+
+ // 2. Return CreateIterResultObject(result, false).
+ return { value: result, done: false }
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#body-fully-read
+ */
+async function fullyReadBody (body, processBody, processBodyError) {
+ // 1. If taskDestination is null, then set taskDestination to
+ // the result of starting a new parallel queue.
+
+ // 2. Let successSteps given a byte sequence bytes be to queue a
+ // fetch task to run processBody given bytes, with taskDestination.
+ const successSteps = processBody
+
+ // 3. Let errorSteps be to queue a fetch task to run processBodyError,
+ // with taskDestination.
+ const errorSteps = processBodyError
+
+ // 4. Let reader be the result of getting a reader for body’s stream.
+ // If that threw an exception, then run errorSteps with that
+ // exception and return.
+ let reader
+
+ try {
+ reader = body.stream.getReader()
+ } catch (e) {
+ errorSteps(e)
+ return
+ }
+
+ // 5. Read all bytes from reader, given successSteps and errorSteps.
+ try {
+ const result = await readAllBytes(reader)
+ successSteps(result)
+ } catch (e) {
+ errorSteps(e)
+ }
+}
+
+/** @type {ReadableStream} */
+let ReadableStream = globalThis.ReadableStream
+
+function isReadableStreamLike (stream) {
+ if (!ReadableStream) {
+ ReadableStream = (__nccwpck_require__(5356).ReadableStream)
+ }
+
+ return stream instanceof ReadableStream || (
+ stream[Symbol.toStringTag] === 'ReadableStream' &&
+ typeof stream.tee === 'function'
+ )
+}
+
+const MAXIMUM_ARGUMENT_LENGTH = 65535
+
+/**
+ * @see https://infra.spec.whatwg.org/#isomorphic-decode
+ * @param {number[]|Uint8Array} input
+ */
+function isomorphicDecode (input) {
+ // 1. To isomorphic decode a byte sequence input, return a string whose code point
+ // length is equal to input’s length and whose code points have the same values
+ // as the values of input’s bytes, in the same order.
+
+ if (input.length < MAXIMUM_ARGUMENT_LENGTH) {
+ return String.fromCharCode(...input)
+ }
+
+ return input.reduce((previous, current) => previous + String.fromCharCode(current), '')
+}
+
+/**
+ * @param {ReadableStreamController} controller
+ */
+function readableStreamClose (controller) {
+ try {
+ controller.close()
+ } catch (err) {
+ // TODO: add comment explaining why this error occurs.
+ if (!err.message.includes('Controller is already closed')) {
+ throw err
+ }
+ }
+}
+
+/**
+ * @see https://infra.spec.whatwg.org/#isomorphic-encode
+ * @param {string} input
+ */
+function isomorphicEncode (input) {
+ // 1. Assert: input contains no code points greater than U+00FF.
+ for (let i = 0; i < input.length; i++) {
+ assert(input.charCodeAt(i) <= 0xFF)
+ }
+
+ // 2. Return a byte sequence whose length is equal to input’s code
+ // point length and whose bytes have the same values as the
+ // values of input’s code points, in the same order
+ return input
+}
+
+/**
+ * @see https://streams.spec.whatwg.org/#readablestreamdefaultreader-read-all-bytes
+ * @see https://streams.spec.whatwg.org/#read-loop
+ * @param {ReadableStreamDefaultReader} reader
+ */
+async function readAllBytes (reader) {
+ const bytes = []
+ let byteLength = 0
+
+ while (true) {
+ const { done, value: chunk } = await reader.read()
+
+ if (done) {
+ // 1. Call successSteps with bytes.
+ return Buffer.concat(bytes, byteLength)
+ }
+
+ // 1. If chunk is not a Uint8Array object, call failureSteps
+ // with a TypeError and abort these steps.
+ if (!isUint8Array(chunk)) {
+ throw new TypeError('Received non-Uint8Array chunk')
+ }
+
+ // 2. Append the bytes represented by chunk to bytes.
+ bytes.push(chunk)
+ byteLength += chunk.length
+
+ // 3. Read-loop given reader, bytes, successSteps, and failureSteps.
+ }
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#is-local
+ * @param {URL} url
+ */
+function urlIsLocal (url) {
+ assert('protocol' in url) // ensure it's a url object
+
+ const protocol = url.protocol
+
+ return protocol === 'about:' || protocol === 'blob:' || protocol === 'data:'
+}
+
+/**
+ * @param {string|URL} url
+ */
+function urlHasHttpsScheme (url) {
+ if (typeof url === 'string') {
+ return url.startsWith('https:')
+ }
+
+ return url.protocol === 'https:'
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#http-scheme
+ * @param {URL} url
+ */
+function urlIsHttpHttpsScheme (url) {
+ assert('protocol' in url) // ensure it's a url object
+
+ const protocol = url.protocol
+
+ return protocol === 'http:' || protocol === 'https:'
+}
+
+/**
+ * Fetch supports node >= 16.8.0, but Object.hasOwn was added in v16.9.0.
+ */
+const hasOwn = Object.hasOwn || ((dict, key) => Object.prototype.hasOwnProperty.call(dict, key))
+
+module.exports = {
+ isAborted,
+ isCancelled,
+ createDeferredPromise,
+ ReadableStreamFrom,
+ toUSVString,
+ tryUpgradeRequestToAPotentiallyTrustworthyURL,
+ coarsenedSharedCurrentTime,
+ determineRequestsReferrer,
+ makePolicyContainer,
+ clonePolicyContainer,
+ appendFetchMetadata,
+ appendRequestOriginHeader,
+ TAOCheck,
+ corsCheck,
+ crossOriginResourcePolicyCheck,
+ createOpaqueTimingInfo,
+ setRequestReferrerPolicyOnRedirect,
+ isValidHTTPToken,
+ requestBadPort,
+ requestCurrentURL,
+ responseURL,
+ responseLocationURL,
+ isBlobLike,
+ isURLPotentiallyTrustworthy,
+ isValidReasonPhrase,
+ sameOrigin,
+ normalizeMethod,
+ serializeJavascriptValueToJSONString,
+ makeIterator,
+ isValidHeaderName,
+ isValidHeaderValue,
+ hasOwn,
+ isErrorLike,
+ fullyReadBody,
+ bytesMatch,
+ isReadableStreamLike,
+ readableStreamClose,
+ isomorphicEncode,
+ isomorphicDecode,
+ urlIsLocal,
+ urlHasHttpsScheme,
+ urlIsHttpHttpsScheme,
+ readAllBytes,
+ normalizeMethodRecord
+}
+
+
+/***/ }),
+
+/***/ 1744:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { types } = __nccwpck_require__(3837)
+const { hasOwn, toUSVString } = __nccwpck_require__(2538)
+
+/** @type {import('../../types/webidl').Webidl} */
+const webidl = {}
+webidl.converters = {}
+webidl.util = {}
+webidl.errors = {}
+
+webidl.errors.exception = function (message) {
+ return new TypeError(`${message.header}: ${message.message}`)
+}
+
+webidl.errors.conversionFailed = function (context) {
+ const plural = context.types.length === 1 ? '' : ' one of'
+ const message =
+ `${context.argument} could not be converted to` +
+ `${plural}: ${context.types.join(', ')}.`
+
+ return webidl.errors.exception({
+ header: context.prefix,
+ message
+ })
+}
+
+webidl.errors.invalidArgument = function (context) {
+ return webidl.errors.exception({
+ header: context.prefix,
+ message: `"${context.value}" is an invalid ${context.type}.`
+ })
+}
+
+// https://webidl.spec.whatwg.org/#implements
+webidl.brandCheck = function (V, I, opts = undefined) {
+ if (opts?.strict !== false && !(V instanceof I)) {
+ throw new TypeError('Illegal invocation')
+ } else {
+ return V?.[Symbol.toStringTag] === I.prototype[Symbol.toStringTag]
+ }
+}
+
+webidl.argumentLengthCheck = function ({ length }, min, ctx) {
+ if (length < min) {
+ throw webidl.errors.exception({
+ message: `${min} argument${min !== 1 ? 's' : ''} required, ` +
+ `but${length ? ' only' : ''} ${length} found.`,
+ ...ctx
+ })
+ }
+}
+
+webidl.illegalConstructor = function () {
+ throw webidl.errors.exception({
+ header: 'TypeError',
+ message: 'Illegal constructor'
+ })
+}
+
+// https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values
+webidl.util.Type = function (V) {
+ switch (typeof V) {
+ case 'undefined': return 'Undefined'
+ case 'boolean': return 'Boolean'
+ case 'string': return 'String'
+ case 'symbol': return 'Symbol'
+ case 'number': return 'Number'
+ case 'bigint': return 'BigInt'
+ case 'function':
+ case 'object': {
+ if (V === null) {
+ return 'Null'
+ }
+
+ return 'Object'
+ }
+ }
+}
+
+// https://webidl.spec.whatwg.org/#abstract-opdef-converttoint
+webidl.util.ConvertToInt = function (V, bitLength, signedness, opts = {}) {
+ let upperBound
+ let lowerBound
+
+ // 1. If bitLength is 64, then:
+ if (bitLength === 64) {
+ // 1. Let upperBound be 2^53 − 1.
+ upperBound = Math.pow(2, 53) - 1
+
+ // 2. If signedness is "unsigned", then let lowerBound be 0.
+ if (signedness === 'unsigned') {
+ lowerBound = 0
+ } else {
+ // 3. Otherwise let lowerBound be −2^53 + 1.
+ lowerBound = Math.pow(-2, 53) + 1
+ }
+ } else if (signedness === 'unsigned') {
+ // 2. Otherwise, if signedness is "unsigned", then:
+
+ // 1. Let lowerBound be 0.
+ lowerBound = 0
+
+ // 2. Let upperBound be 2^bitLength − 1.
+ upperBound = Math.pow(2, bitLength) - 1
+ } else {
+ // 3. Otherwise:
+
+ // 1. Let lowerBound be -2^bitLength − 1.
+ lowerBound = Math.pow(-2, bitLength) - 1
+
+ // 2. Let upperBound be 2^bitLength − 1 − 1.
+ upperBound = Math.pow(2, bitLength - 1) - 1
+ }
+
+ // 4. Let x be ? ToNumber(V).
+ let x = Number(V)
+
+ // 5. If x is −0, then set x to +0.
+ if (x === 0) {
+ x = 0
+ }
+
+ // 6. If the conversion is to an IDL type associated
+ // with the [EnforceRange] extended attribute, then:
+ if (opts.enforceRange === true) {
+ // 1. If x is NaN, +∞, or −∞, then throw a TypeError.
+ if (
+ Number.isNaN(x) ||
+ x === Number.POSITIVE_INFINITY ||
+ x === Number.NEGATIVE_INFINITY
+ ) {
+ throw webidl.errors.exception({
+ header: 'Integer conversion',
+ message: `Could not convert ${V} to an integer.`
+ })
+ }
+
+ // 2. Set x to IntegerPart(x).
+ x = webidl.util.IntegerPart(x)
+
+ // 3. If x < lowerBound or x > upperBound, then
+ // throw a TypeError.
+ if (x < lowerBound || x > upperBound) {
+ throw webidl.errors.exception({
+ header: 'Integer conversion',
+ message: `Value must be between ${lowerBound}-${upperBound}, got ${x}.`
+ })
+ }
+
+ // 4. Return x.
+ return x
+ }
+
+ // 7. If x is not NaN and the conversion is to an IDL
+ // type associated with the [Clamp] extended
+ // attribute, then:
+ if (!Number.isNaN(x) && opts.clamp === true) {
+ // 1. Set x to min(max(x, lowerBound), upperBound).
+ x = Math.min(Math.max(x, lowerBound), upperBound)
+
+ // 2. Round x to the nearest integer, choosing the
+ // even integer if it lies halfway between two,
+ // and choosing +0 rather than −0.
+ if (Math.floor(x) % 2 === 0) {
+ x = Math.floor(x)
+ } else {
+ x = Math.ceil(x)
+ }
+
+ // 3. Return x.
+ return x
+ }
+
+ // 8. If x is NaN, +0, +∞, or −∞, then return +0.
+ if (
+ Number.isNaN(x) ||
+ (x === 0 && Object.is(0, x)) ||
+ x === Number.POSITIVE_INFINITY ||
+ x === Number.NEGATIVE_INFINITY
+ ) {
+ return 0
+ }
+
+ // 9. Set x to IntegerPart(x).
+ x = webidl.util.IntegerPart(x)
+
+ // 10. Set x to x modulo 2^bitLength.
+ x = x % Math.pow(2, bitLength)
+
+ // 11. If signedness is "signed" and x ≥ 2^bitLength − 1,
+ // then return x − 2^bitLength.
+ if (signedness === 'signed' && x >= Math.pow(2, bitLength) - 1) {
+ return x - Math.pow(2, bitLength)
+ }
+
+ // 12. Otherwise, return x.
+ return x
+}
+
+// https://webidl.spec.whatwg.org/#abstract-opdef-integerpart
+webidl.util.IntegerPart = function (n) {
+ // 1. Let r be floor(abs(n)).
+ const r = Math.floor(Math.abs(n))
+
+ // 2. If n < 0, then return -1 × r.
+ if (n < 0) {
+ return -1 * r
+ }
+
+ // 3. Otherwise, return r.
+ return r
+}
+
+// https://webidl.spec.whatwg.org/#es-sequence
+webidl.sequenceConverter = function (converter) {
+ return (V) => {
+ // 1. If Type(V) is not Object, throw a TypeError.
+ if (webidl.util.Type(V) !== 'Object') {
+ throw webidl.errors.exception({
+ header: 'Sequence',
+ message: `Value of type ${webidl.util.Type(V)} is not an Object.`
+ })
+ }
+
+ // 2. Let method be ? GetMethod(V, @@iterator).
+ /** @type {Generator} */
+ const method = V?.[Symbol.iterator]?.()
+ const seq = []
+
+ // 3. If method is undefined, throw a TypeError.
+ if (
+ method === undefined ||
+ typeof method.next !== 'function'
+ ) {
+ throw webidl.errors.exception({
+ header: 'Sequence',
+ message: 'Object is not an iterator.'
+ })
+ }
+
+ // https://webidl.spec.whatwg.org/#create-sequence-from-iterable
+ while (true) {
+ const { done, value } = method.next()
+
+ if (done) {
+ break
+ }
+
+ seq.push(converter(value))
+ }
+
+ return seq
+ }
+}
+
+// https://webidl.spec.whatwg.org/#es-to-record
+webidl.recordConverter = function (keyConverter, valueConverter) {
+ return (O) => {
+ // 1. If Type(O) is not Object, throw a TypeError.
+ if (webidl.util.Type(O) !== 'Object') {
+ throw webidl.errors.exception({
+ header: 'Record',
+ message: `Value of type ${webidl.util.Type(O)} is not an Object.`
+ })
+ }
+
+ // 2. Let result be a new empty instance of record.
+ const result = {}
+
+ if (!types.isProxy(O)) {
+ // Object.keys only returns enumerable properties
+ const keys = Object.keys(O)
+
+ for (const key of keys) {
+ // 1. Let typedKey be key converted to an IDL value of type K.
+ const typedKey = keyConverter(key)
+
+ // 2. Let value be ? Get(O, key).
+ // 3. Let typedValue be value converted to an IDL value of type V.
+ const typedValue = valueConverter(O[key])
+
+ // 4. Set result[typedKey] to typedValue.
+ result[typedKey] = typedValue
+ }
+
+ // 5. Return result.
+ return result
+ }
+
+ // 3. Let keys be ? O.[[OwnPropertyKeys]]().
+ const keys = Reflect.ownKeys(O)
+
+ // 4. For each key of keys.
+ for (const key of keys) {
+ // 1. Let desc be ? O.[[GetOwnProperty]](key).
+ const desc = Reflect.getOwnPropertyDescriptor(O, key)
+
+ // 2. If desc is not undefined and desc.[[Enumerable]] is true:
+ if (desc?.enumerable) {
+ // 1. Let typedKey be key converted to an IDL value of type K.
+ const typedKey = keyConverter(key)
+
+ // 2. Let value be ? Get(O, key).
+ // 3. Let typedValue be value converted to an IDL value of type V.
+ const typedValue = valueConverter(O[key])
+
+ // 4. Set result[typedKey] to typedValue.
+ result[typedKey] = typedValue
+ }
+ }
+
+ // 5. Return result.
+ return result
+ }
+}
+
+webidl.interfaceConverter = function (i) {
+ return (V, opts = {}) => {
+ if (opts.strict !== false && !(V instanceof i)) {
+ throw webidl.errors.exception({
+ header: i.name,
+ message: `Expected ${V} to be an instance of ${i.name}.`
+ })
+ }
+
+ return V
+ }
+}
+
+webidl.dictionaryConverter = function (converters) {
+ return (dictionary) => {
+ const type = webidl.util.Type(dictionary)
+ const dict = {}
+
+ if (type === 'Null' || type === 'Undefined') {
+ return dict
+ } else if (type !== 'Object') {
+ throw webidl.errors.exception({
+ header: 'Dictionary',
+ message: `Expected ${dictionary} to be one of: Null, Undefined, Object.`
+ })
+ }
+
+ for (const options of converters) {
+ const { key, defaultValue, required, converter } = options
+
+ if (required === true) {
+ if (!hasOwn(dictionary, key)) {
+ throw webidl.errors.exception({
+ header: 'Dictionary',
+ message: `Missing required key "${key}".`
+ })
+ }
+ }
+
+ let value = dictionary[key]
+ const hasDefault = hasOwn(options, 'defaultValue')
+
+ // Only use defaultValue if value is undefined and
+ // a defaultValue options was provided.
+ if (hasDefault && value !== null) {
+ value = value ?? defaultValue
+ }
+
+ // A key can be optional and have no default value.
+ // When this happens, do not perform a conversion,
+ // and do not assign the key a value.
+ if (required || hasDefault || value !== undefined) {
+ value = converter(value)
+
+ if (
+ options.allowedValues &&
+ !options.allowedValues.includes(value)
+ ) {
+ throw webidl.errors.exception({
+ header: 'Dictionary',
+ message: `${value} is not an accepted type. Expected one of ${options.allowedValues.join(', ')}.`
+ })
+ }
+
+ dict[key] = value
+ }
+ }
+
+ return dict
+ }
+}
+
+webidl.nullableConverter = function (converter) {
+ return (V) => {
+ if (V === null) {
+ return V
+ }
+
+ return converter(V)
+ }
+}
+
+// https://webidl.spec.whatwg.org/#es-DOMString
+webidl.converters.DOMString = function (V, opts = {}) {
+ // 1. If V is null and the conversion is to an IDL type
+ // associated with the [LegacyNullToEmptyString]
+ // extended attribute, then return the DOMString value
+ // that represents the empty string.
+ if (V === null && opts.legacyNullToEmptyString) {
+ return ''
+ }
+
+ // 2. Let x be ? ToString(V).
+ if (typeof V === 'symbol') {
+ throw new TypeError('Could not convert argument of type symbol to string.')
+ }
+
+ // 3. Return the IDL DOMString value that represents the
+ // same sequence of code units as the one the
+ // ECMAScript String value x represents.
+ return String(V)
+}
+
+// https://webidl.spec.whatwg.org/#es-ByteString
+webidl.converters.ByteString = function (V) {
+ // 1. Let x be ? ToString(V).
+ // Note: DOMString converter perform ? ToString(V)
+ const x = webidl.converters.DOMString(V)
+
+ // 2. If the value of any element of x is greater than
+ // 255, then throw a TypeError.
+ for (let index = 0; index < x.length; index++) {
+ if (x.charCodeAt(index) > 255) {
+ throw new TypeError(
+ 'Cannot convert argument to a ByteString because the character at ' +
+ `index ${index} has a value of ${x.charCodeAt(index)} which is greater than 255.`
+ )
+ }
+ }
+
+ // 3. Return an IDL ByteString value whose length is the
+ // length of x, and where the value of each element is
+ // the value of the corresponding element of x.
+ return x
+}
+
+// https://webidl.spec.whatwg.org/#es-USVString
+webidl.converters.USVString = toUSVString
+
+// https://webidl.spec.whatwg.org/#es-boolean
+webidl.converters.boolean = function (V) {
+ // 1. Let x be the result of computing ToBoolean(V).
+ const x = Boolean(V)
+
+ // 2. Return the IDL boolean value that is the one that represents
+ // the same truth value as the ECMAScript Boolean value x.
+ return x
+}
+
+// https://webidl.spec.whatwg.org/#es-any
+webidl.converters.any = function (V) {
+ return V
+}
+
+// https://webidl.spec.whatwg.org/#es-long-long
+webidl.converters['long long'] = function (V) {
+ // 1. Let x be ? ConvertToInt(V, 64, "signed").
+ const x = webidl.util.ConvertToInt(V, 64, 'signed')
+
+ // 2. Return the IDL long long value that represents
+ // the same numeric value as x.
+ return x
+}
+
+// https://webidl.spec.whatwg.org/#es-unsigned-long-long
+webidl.converters['unsigned long long'] = function (V) {
+ // 1. Let x be ? ConvertToInt(V, 64, "unsigned").
+ const x = webidl.util.ConvertToInt(V, 64, 'unsigned')
+
+ // 2. Return the IDL unsigned long long value that
+ // represents the same numeric value as x.
+ return x
+}
+
+// https://webidl.spec.whatwg.org/#es-unsigned-long
+webidl.converters['unsigned long'] = function (V) {
+ // 1. Let x be ? ConvertToInt(V, 32, "unsigned").
+ const x = webidl.util.ConvertToInt(V, 32, 'unsigned')
+
+ // 2. Return the IDL unsigned long value that
+ // represents the same numeric value as x.
+ return x
+}
+
+// https://webidl.spec.whatwg.org/#es-unsigned-short
+webidl.converters['unsigned short'] = function (V, opts) {
+ // 1. Let x be ? ConvertToInt(V, 16, "unsigned").
+ const x = webidl.util.ConvertToInt(V, 16, 'unsigned', opts)
+
+ // 2. Return the IDL unsigned short value that represents
+ // the same numeric value as x.
+ return x
+}
+
+// https://webidl.spec.whatwg.org/#idl-ArrayBuffer
+webidl.converters.ArrayBuffer = function (V, opts = {}) {
+ // 1. If Type(V) is not Object, or V does not have an
+ // [[ArrayBufferData]] internal slot, then throw a
+ // TypeError.
+ // see: https://tc39.es/ecma262/#sec-properties-of-the-arraybuffer-instances
+ // see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances
+ if (
+ webidl.util.Type(V) !== 'Object' ||
+ !types.isAnyArrayBuffer(V)
+ ) {
+ throw webidl.errors.conversionFailed({
+ prefix: `${V}`,
+ argument: `${V}`,
+ types: ['ArrayBuffer']
+ })
+ }
+
+ // 2. If the conversion is not to an IDL type associated
+ // with the [AllowShared] extended attribute, and
+ // IsSharedArrayBuffer(V) is true, then throw a
+ // TypeError.
+ if (opts.allowShared === false && types.isSharedArrayBuffer(V)) {
+ throw webidl.errors.exception({
+ header: 'ArrayBuffer',
+ message: 'SharedArrayBuffer is not allowed.'
+ })
+ }
+
+ // 3. If the conversion is not to an IDL type associated
+ // with the [AllowResizable] extended attribute, and
+ // IsResizableArrayBuffer(V) is true, then throw a
+ // TypeError.
+ // Note: resizable ArrayBuffers are currently a proposal.
+
+ // 4. Return the IDL ArrayBuffer value that is a
+ // reference to the same object as V.
+ return V
+}
+
+webidl.converters.TypedArray = function (V, T, opts = {}) {
+ // 1. Let T be the IDL type V is being converted to.
+
+ // 2. If Type(V) is not Object, or V does not have a
+ // [[TypedArrayName]] internal slot with a value
+ // equal to T’s name, then throw a TypeError.
+ if (
+ webidl.util.Type(V) !== 'Object' ||
+ !types.isTypedArray(V) ||
+ V.constructor.name !== T.name
+ ) {
+ throw webidl.errors.conversionFailed({
+ prefix: `${T.name}`,
+ argument: `${V}`,
+ types: [T.name]
+ })
+ }
+
+ // 3. If the conversion is not to an IDL type associated
+ // with the [AllowShared] extended attribute, and
+ // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is
+ // true, then throw a TypeError.
+ if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) {
+ throw webidl.errors.exception({
+ header: 'ArrayBuffer',
+ message: 'SharedArrayBuffer is not allowed.'
+ })
+ }
+
+ // 4. If the conversion is not to an IDL type associated
+ // with the [AllowResizable] extended attribute, and
+ // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is
+ // true, then throw a TypeError.
+ // Note: resizable array buffers are currently a proposal
+
+ // 5. Return the IDL value of type T that is a reference
+ // to the same object as V.
+ return V
+}
+
+webidl.converters.DataView = function (V, opts = {}) {
+ // 1. If Type(V) is not Object, or V does not have a
+ // [[DataView]] internal slot, then throw a TypeError.
+ if (webidl.util.Type(V) !== 'Object' || !types.isDataView(V)) {
+ throw webidl.errors.exception({
+ header: 'DataView',
+ message: 'Object is not a DataView.'
+ })
+ }
+
+ // 2. If the conversion is not to an IDL type associated
+ // with the [AllowShared] extended attribute, and
+ // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is true,
+ // then throw a TypeError.
+ if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) {
+ throw webidl.errors.exception({
+ header: 'ArrayBuffer',
+ message: 'SharedArrayBuffer is not allowed.'
+ })
+ }
+
+ // 3. If the conversion is not to an IDL type associated
+ // with the [AllowResizable] extended attribute, and
+ // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is
+ // true, then throw a TypeError.
+ // Note: resizable ArrayBuffers are currently a proposal
+
+ // 4. Return the IDL DataView value that is a reference
+ // to the same object as V.
+ return V
+}
+
+// https://webidl.spec.whatwg.org/#BufferSource
+webidl.converters.BufferSource = function (V, opts = {}) {
+ if (types.isAnyArrayBuffer(V)) {
+ return webidl.converters.ArrayBuffer(V, opts)
+ }
+
+ if (types.isTypedArray(V)) {
+ return webidl.converters.TypedArray(V, V.constructor)
+ }
+
+ if (types.isDataView(V)) {
+ return webidl.converters.DataView(V, opts)
+ }
+
+ throw new TypeError(`Could not convert ${V} to a BufferSource.`)
+}
+
+webidl.converters['sequence'] = webidl.sequenceConverter(
+ webidl.converters.ByteString
+)
+
+webidl.converters['sequence>'] = webidl.sequenceConverter(
+ webidl.converters['sequence']
+)
+
+webidl.converters['record'] = webidl.recordConverter(
+ webidl.converters.ByteString,
+ webidl.converters.ByteString
+)
+
+module.exports = {
+ webidl
+}
+
+
+/***/ }),
+
+/***/ 4854:
+/***/ ((module) => {
+
+"use strict";
+
+
+/**
+ * @see https://encoding.spec.whatwg.org/#concept-encoding-get
+ * @param {string|undefined} label
+ */
+function getEncoding (label) {
+ if (!label) {
+ return 'failure'
+ }
+
+ // 1. Remove any leading and trailing ASCII whitespace from label.
+ // 2. If label is an ASCII case-insensitive match for any of the
+ // labels listed in the table below, then return the
+ // corresponding encoding; otherwise return failure.
+ switch (label.trim().toLowerCase()) {
+ case 'unicode-1-1-utf-8':
+ case 'unicode11utf8':
+ case 'unicode20utf8':
+ case 'utf-8':
+ case 'utf8':
+ case 'x-unicode20utf8':
+ return 'UTF-8'
+ case '866':
+ case 'cp866':
+ case 'csibm866':
+ case 'ibm866':
+ return 'IBM866'
+ case 'csisolatin2':
+ case 'iso-8859-2':
+ case 'iso-ir-101':
+ case 'iso8859-2':
+ case 'iso88592':
+ case 'iso_8859-2':
+ case 'iso_8859-2:1987':
+ case 'l2':
+ case 'latin2':
+ return 'ISO-8859-2'
+ case 'csisolatin3':
+ case 'iso-8859-3':
+ case 'iso-ir-109':
+ case 'iso8859-3':
+ case 'iso88593':
+ case 'iso_8859-3':
+ case 'iso_8859-3:1988':
+ case 'l3':
+ case 'latin3':
+ return 'ISO-8859-3'
+ case 'csisolatin4':
+ case 'iso-8859-4':
+ case 'iso-ir-110':
+ case 'iso8859-4':
+ case 'iso88594':
+ case 'iso_8859-4':
+ case 'iso_8859-4:1988':
+ case 'l4':
+ case 'latin4':
+ return 'ISO-8859-4'
+ case 'csisolatincyrillic':
+ case 'cyrillic':
+ case 'iso-8859-5':
+ case 'iso-ir-144':
+ case 'iso8859-5':
+ case 'iso88595':
+ case 'iso_8859-5':
+ case 'iso_8859-5:1988':
+ return 'ISO-8859-5'
+ case 'arabic':
+ case 'asmo-708':
+ case 'csiso88596e':
+ case 'csiso88596i':
+ case 'csisolatinarabic':
+ case 'ecma-114':
+ case 'iso-8859-6':
+ case 'iso-8859-6-e':
+ case 'iso-8859-6-i':
+ case 'iso-ir-127':
+ case 'iso8859-6':
+ case 'iso88596':
+ case 'iso_8859-6':
+ case 'iso_8859-6:1987':
+ return 'ISO-8859-6'
+ case 'csisolatingreek':
+ case 'ecma-118':
+ case 'elot_928':
+ case 'greek':
+ case 'greek8':
+ case 'iso-8859-7':
+ case 'iso-ir-126':
+ case 'iso8859-7':
+ case 'iso88597':
+ case 'iso_8859-7':
+ case 'iso_8859-7:1987':
+ case 'sun_eu_greek':
+ return 'ISO-8859-7'
+ case 'csiso88598e':
+ case 'csisolatinhebrew':
+ case 'hebrew':
+ case 'iso-8859-8':
+ case 'iso-8859-8-e':
+ case 'iso-ir-138':
+ case 'iso8859-8':
+ case 'iso88598':
+ case 'iso_8859-8':
+ case 'iso_8859-8:1988':
+ case 'visual':
+ return 'ISO-8859-8'
+ case 'csiso88598i':
+ case 'iso-8859-8-i':
+ case 'logical':
+ return 'ISO-8859-8-I'
+ case 'csisolatin6':
+ case 'iso-8859-10':
+ case 'iso-ir-157':
+ case 'iso8859-10':
+ case 'iso885910':
+ case 'l6':
+ case 'latin6':
+ return 'ISO-8859-10'
+ case 'iso-8859-13':
+ case 'iso8859-13':
+ case 'iso885913':
+ return 'ISO-8859-13'
+ case 'iso-8859-14':
+ case 'iso8859-14':
+ case 'iso885914':
+ return 'ISO-8859-14'
+ case 'csisolatin9':
+ case 'iso-8859-15':
+ case 'iso8859-15':
+ case 'iso885915':
+ case 'iso_8859-15':
+ case 'l9':
+ return 'ISO-8859-15'
+ case 'iso-8859-16':
+ return 'ISO-8859-16'
+ case 'cskoi8r':
+ case 'koi':
+ case 'koi8':
+ case 'koi8-r':
+ case 'koi8_r':
+ return 'KOI8-R'
+ case 'koi8-ru':
+ case 'koi8-u':
+ return 'KOI8-U'
+ case 'csmacintosh':
+ case 'mac':
+ case 'macintosh':
+ case 'x-mac-roman':
+ return 'macintosh'
+ case 'iso-8859-11':
+ case 'iso8859-11':
+ case 'iso885911':
+ case 'tis-620':
+ case 'windows-874':
+ return 'windows-874'
+ case 'cp1250':
+ case 'windows-1250':
+ case 'x-cp1250':
+ return 'windows-1250'
+ case 'cp1251':
+ case 'windows-1251':
+ case 'x-cp1251':
+ return 'windows-1251'
+ case 'ansi_x3.4-1968':
+ case 'ascii':
+ case 'cp1252':
+ case 'cp819':
+ case 'csisolatin1':
+ case 'ibm819':
+ case 'iso-8859-1':
+ case 'iso-ir-100':
+ case 'iso8859-1':
+ case 'iso88591':
+ case 'iso_8859-1':
+ case 'iso_8859-1:1987':
+ case 'l1':
+ case 'latin1':
+ case 'us-ascii':
+ case 'windows-1252':
+ case 'x-cp1252':
+ return 'windows-1252'
+ case 'cp1253':
+ case 'windows-1253':
+ case 'x-cp1253':
+ return 'windows-1253'
+ case 'cp1254':
+ case 'csisolatin5':
+ case 'iso-8859-9':
+ case 'iso-ir-148':
+ case 'iso8859-9':
+ case 'iso88599':
+ case 'iso_8859-9':
+ case 'iso_8859-9:1989':
+ case 'l5':
+ case 'latin5':
+ case 'windows-1254':
+ case 'x-cp1254':
+ return 'windows-1254'
+ case 'cp1255':
+ case 'windows-1255':
+ case 'x-cp1255':
+ return 'windows-1255'
+ case 'cp1256':
+ case 'windows-1256':
+ case 'x-cp1256':
+ return 'windows-1256'
+ case 'cp1257':
+ case 'windows-1257':
+ case 'x-cp1257':
+ return 'windows-1257'
+ case 'cp1258':
+ case 'windows-1258':
+ case 'x-cp1258':
+ return 'windows-1258'
+ case 'x-mac-cyrillic':
+ case 'x-mac-ukrainian':
+ return 'x-mac-cyrillic'
+ case 'chinese':
+ case 'csgb2312':
+ case 'csiso58gb231280':
+ case 'gb2312':
+ case 'gb_2312':
+ case 'gb_2312-80':
+ case 'gbk':
+ case 'iso-ir-58':
+ case 'x-gbk':
+ return 'GBK'
+ case 'gb18030':
+ return 'gb18030'
+ case 'big5':
+ case 'big5-hkscs':
+ case 'cn-big5':
+ case 'csbig5':
+ case 'x-x-big5':
+ return 'Big5'
+ case 'cseucpkdfmtjapanese':
+ case 'euc-jp':
+ case 'x-euc-jp':
+ return 'EUC-JP'
+ case 'csiso2022jp':
+ case 'iso-2022-jp':
+ return 'ISO-2022-JP'
+ case 'csshiftjis':
+ case 'ms932':
+ case 'ms_kanji':
+ case 'shift-jis':
+ case 'shift_jis':
+ case 'sjis':
+ case 'windows-31j':
+ case 'x-sjis':
+ return 'Shift_JIS'
+ case 'cseuckr':
+ case 'csksc56011987':
+ case 'euc-kr':
+ case 'iso-ir-149':
+ case 'korean':
+ case 'ks_c_5601-1987':
+ case 'ks_c_5601-1989':
+ case 'ksc5601':
+ case 'ksc_5601':
+ case 'windows-949':
+ return 'EUC-KR'
+ case 'csiso2022kr':
+ case 'hz-gb-2312':
+ case 'iso-2022-cn':
+ case 'iso-2022-cn-ext':
+ case 'iso-2022-kr':
+ case 'replacement':
+ return 'replacement'
+ case 'unicodefffe':
+ case 'utf-16be':
+ return 'UTF-16BE'
+ case 'csunicode':
+ case 'iso-10646-ucs-2':
+ case 'ucs-2':
+ case 'unicode':
+ case 'unicodefeff':
+ case 'utf-16':
+ case 'utf-16le':
+ return 'UTF-16LE'
+ case 'x-user-defined':
+ return 'x-user-defined'
+ default: return 'failure'
+ }
+}
+
+module.exports = {
+ getEncoding
+}
+
+
+/***/ }),
+
+/***/ 1446:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const {
+ staticPropertyDescriptors,
+ readOperation,
+ fireAProgressEvent
+} = __nccwpck_require__(7530)
+const {
+ kState,
+ kError,
+ kResult,
+ kEvents,
+ kAborted
+} = __nccwpck_require__(9054)
+const { webidl } = __nccwpck_require__(1744)
+const { kEnumerableProperty } = __nccwpck_require__(3983)
+
+class FileReader extends EventTarget {
+ constructor () {
+ super()
+
+ this[kState] = 'empty'
+ this[kResult] = null
+ this[kError] = null
+ this[kEvents] = {
+ loadend: null,
+ error: null,
+ abort: null,
+ load: null,
+ progress: null,
+ loadstart: null
+ }
+ }
+
+ /**
+ * @see https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer
+ * @param {import('buffer').Blob} blob
+ */
+ readAsArrayBuffer (blob) {
+ webidl.brandCheck(this, FileReader)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsArrayBuffer' })
+
+ blob = webidl.converters.Blob(blob, { strict: false })
+
+ // The readAsArrayBuffer(blob) method, when invoked,
+ // must initiate a read operation for blob with ArrayBuffer.
+ readOperation(this, blob, 'ArrayBuffer')
+ }
+
+ /**
+ * @see https://w3c.github.io/FileAPI/#readAsBinaryString
+ * @param {import('buffer').Blob} blob
+ */
+ readAsBinaryString (blob) {
+ webidl.brandCheck(this, FileReader)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsBinaryString' })
+
+ blob = webidl.converters.Blob(blob, { strict: false })
+
+ // The readAsBinaryString(blob) method, when invoked,
+ // must initiate a read operation for blob with BinaryString.
+ readOperation(this, blob, 'BinaryString')
+ }
+
+ /**
+ * @see https://w3c.github.io/FileAPI/#readAsDataText
+ * @param {import('buffer').Blob} blob
+ * @param {string?} encoding
+ */
+ readAsText (blob, encoding = undefined) {
+ webidl.brandCheck(this, FileReader)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsText' })
+
+ blob = webidl.converters.Blob(blob, { strict: false })
+
+ if (encoding !== undefined) {
+ encoding = webidl.converters.DOMString(encoding)
+ }
+
+ // The readAsText(blob, encoding) method, when invoked,
+ // must initiate a read operation for blob with Text and encoding.
+ readOperation(this, blob, 'Text', encoding)
+ }
+
+ /**
+ * @see https://w3c.github.io/FileAPI/#dfn-readAsDataURL
+ * @param {import('buffer').Blob} blob
+ */
+ readAsDataURL (blob) {
+ webidl.brandCheck(this, FileReader)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsDataURL' })
+
+ blob = webidl.converters.Blob(blob, { strict: false })
+
+ // The readAsDataURL(blob) method, when invoked, must
+ // initiate a read operation for blob with DataURL.
+ readOperation(this, blob, 'DataURL')
+ }
+
+ /**
+ * @see https://w3c.github.io/FileAPI/#dfn-abort
+ */
+ abort () {
+ // 1. If this's state is "empty" or if this's state is
+ // "done" set this's result to null and terminate
+ // this algorithm.
+ if (this[kState] === 'empty' || this[kState] === 'done') {
+ this[kResult] = null
+ return
+ }
+
+ // 2. If this's state is "loading" set this's state to
+ // "done" and set this's result to null.
+ if (this[kState] === 'loading') {
+ this[kState] = 'done'
+ this[kResult] = null
+ }
+
+ // 3. If there are any tasks from this on the file reading
+ // task source in an affiliated task queue, then remove
+ // those tasks from that task queue.
+ this[kAborted] = true
+
+ // 4. Terminate the algorithm for the read method being processed.
+ // TODO
+
+ // 5. Fire a progress event called abort at this.
+ fireAProgressEvent('abort', this)
+
+ // 6. If this's state is not "loading", fire a progress
+ // event called loadend at this.
+ if (this[kState] !== 'loading') {
+ fireAProgressEvent('loadend', this)
+ }
+ }
+
+ /**
+ * @see https://w3c.github.io/FileAPI/#dom-filereader-readystate
+ */
+ get readyState () {
+ webidl.brandCheck(this, FileReader)
+
+ switch (this[kState]) {
+ case 'empty': return this.EMPTY
+ case 'loading': return this.LOADING
+ case 'done': return this.DONE
+ }
+ }
+
+ /**
+ * @see https://w3c.github.io/FileAPI/#dom-filereader-result
+ */
+ get result () {
+ webidl.brandCheck(this, FileReader)
+
+ // The result attribute’s getter, when invoked, must return
+ // this's result.
+ return this[kResult]
+ }
+
+ /**
+ * @see https://w3c.github.io/FileAPI/#dom-filereader-error
+ */
+ get error () {
+ webidl.brandCheck(this, FileReader)
+
+ // The error attribute’s getter, when invoked, must return
+ // this's error.
+ return this[kError]
+ }
+
+ get onloadend () {
+ webidl.brandCheck(this, FileReader)
+
+ return this[kEvents].loadend
+ }
+
+ set onloadend (fn) {
+ webidl.brandCheck(this, FileReader)
+
+ if (this[kEvents].loadend) {
+ this.removeEventListener('loadend', this[kEvents].loadend)
+ }
+
+ if (typeof fn === 'function') {
+ this[kEvents].loadend = fn
+ this.addEventListener('loadend', fn)
+ } else {
+ this[kEvents].loadend = null
+ }
+ }
+
+ get onerror () {
+ webidl.brandCheck(this, FileReader)
+
+ return this[kEvents].error
+ }
+
+ set onerror (fn) {
+ webidl.brandCheck(this, FileReader)
+
+ if (this[kEvents].error) {
+ this.removeEventListener('error', this[kEvents].error)
+ }
+
+ if (typeof fn === 'function') {
+ this[kEvents].error = fn
+ this.addEventListener('error', fn)
+ } else {
+ this[kEvents].error = null
+ }
+ }
+
+ get onloadstart () {
+ webidl.brandCheck(this, FileReader)
+
+ return this[kEvents].loadstart
+ }
+
+ set onloadstart (fn) {
+ webidl.brandCheck(this, FileReader)
+
+ if (this[kEvents].loadstart) {
+ this.removeEventListener('loadstart', this[kEvents].loadstart)
+ }
+
+ if (typeof fn === 'function') {
+ this[kEvents].loadstart = fn
+ this.addEventListener('loadstart', fn)
+ } else {
+ this[kEvents].loadstart = null
+ }
+ }
+
+ get onprogress () {
+ webidl.brandCheck(this, FileReader)
+
+ return this[kEvents].progress
+ }
+
+ set onprogress (fn) {
+ webidl.brandCheck(this, FileReader)
+
+ if (this[kEvents].progress) {
+ this.removeEventListener('progress', this[kEvents].progress)
+ }
+
+ if (typeof fn === 'function') {
+ this[kEvents].progress = fn
+ this.addEventListener('progress', fn)
+ } else {
+ this[kEvents].progress = null
+ }
+ }
+
+ get onload () {
+ webidl.brandCheck(this, FileReader)
+
+ return this[kEvents].load
+ }
+
+ set onload (fn) {
+ webidl.brandCheck(this, FileReader)
+
+ if (this[kEvents].load) {
+ this.removeEventListener('load', this[kEvents].load)
+ }
+
+ if (typeof fn === 'function') {
+ this[kEvents].load = fn
+ this.addEventListener('load', fn)
+ } else {
+ this[kEvents].load = null
+ }
+ }
+
+ get onabort () {
+ webidl.brandCheck(this, FileReader)
+
+ return this[kEvents].abort
+ }
+
+ set onabort (fn) {
+ webidl.brandCheck(this, FileReader)
+
+ if (this[kEvents].abort) {
+ this.removeEventListener('abort', this[kEvents].abort)
+ }
+
+ if (typeof fn === 'function') {
+ this[kEvents].abort = fn
+ this.addEventListener('abort', fn)
+ } else {
+ this[kEvents].abort = null
+ }
+ }
+}
+
+// https://w3c.github.io/FileAPI/#dom-filereader-empty
+FileReader.EMPTY = FileReader.prototype.EMPTY = 0
+// https://w3c.github.io/FileAPI/#dom-filereader-loading
+FileReader.LOADING = FileReader.prototype.LOADING = 1
+// https://w3c.github.io/FileAPI/#dom-filereader-done
+FileReader.DONE = FileReader.prototype.DONE = 2
+
+Object.defineProperties(FileReader.prototype, {
+ EMPTY: staticPropertyDescriptors,
+ LOADING: staticPropertyDescriptors,
+ DONE: staticPropertyDescriptors,
+ readAsArrayBuffer: kEnumerableProperty,
+ readAsBinaryString: kEnumerableProperty,
+ readAsText: kEnumerableProperty,
+ readAsDataURL: kEnumerableProperty,
+ abort: kEnumerableProperty,
+ readyState: kEnumerableProperty,
+ result: kEnumerableProperty,
+ error: kEnumerableProperty,
+ onloadstart: kEnumerableProperty,
+ onprogress: kEnumerableProperty,
+ onload: kEnumerableProperty,
+ onabort: kEnumerableProperty,
+ onerror: kEnumerableProperty,
+ onloadend: kEnumerableProperty,
+ [Symbol.toStringTag]: {
+ value: 'FileReader',
+ writable: false,
+ enumerable: false,
+ configurable: true
+ }
+})
+
+Object.defineProperties(FileReader, {
+ EMPTY: staticPropertyDescriptors,
+ LOADING: staticPropertyDescriptors,
+ DONE: staticPropertyDescriptors
+})
+
+module.exports = {
+ FileReader
+}
+
+
+/***/ }),
+
+/***/ 5504:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { webidl } = __nccwpck_require__(1744)
+
+const kState = Symbol('ProgressEvent state')
+
+/**
+ * @see https://xhr.spec.whatwg.org/#progressevent
+ */
+class ProgressEvent extends Event {
+ constructor (type, eventInitDict = {}) {
+ type = webidl.converters.DOMString(type)
+ eventInitDict = webidl.converters.ProgressEventInit(eventInitDict ?? {})
+
+ super(type, eventInitDict)
+
+ this[kState] = {
+ lengthComputable: eventInitDict.lengthComputable,
+ loaded: eventInitDict.loaded,
+ total: eventInitDict.total
+ }
+ }
+
+ get lengthComputable () {
+ webidl.brandCheck(this, ProgressEvent)
+
+ return this[kState].lengthComputable
+ }
+
+ get loaded () {
+ webidl.brandCheck(this, ProgressEvent)
+
+ return this[kState].loaded
+ }
+
+ get total () {
+ webidl.brandCheck(this, ProgressEvent)
+
+ return this[kState].total
+ }
+}
+
+webidl.converters.ProgressEventInit = webidl.dictionaryConverter([
+ {
+ key: 'lengthComputable',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: 'loaded',
+ converter: webidl.converters['unsigned long long'],
+ defaultValue: 0
+ },
+ {
+ key: 'total',
+ converter: webidl.converters['unsigned long long'],
+ defaultValue: 0
+ },
+ {
+ key: 'bubbles',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: 'cancelable',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: 'composed',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ }
+])
+
+module.exports = {
+ ProgressEvent
+}
+
+
+/***/ }),
+
+/***/ 9054:
+/***/ ((module) => {
+
+"use strict";
+
+
+module.exports = {
+ kState: Symbol('FileReader state'),
+ kResult: Symbol('FileReader result'),
+ kError: Symbol('FileReader error'),
+ kLastProgressEventFired: Symbol('FileReader last progress event fired timestamp'),
+ kEvents: Symbol('FileReader events'),
+ kAborted: Symbol('FileReader aborted')
+}
+
+
+/***/ }),
+
+/***/ 7530:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const {
+ kState,
+ kError,
+ kResult,
+ kAborted,
+ kLastProgressEventFired
+} = __nccwpck_require__(9054)
+const { ProgressEvent } = __nccwpck_require__(5504)
+const { getEncoding } = __nccwpck_require__(4854)
+const { DOMException } = __nccwpck_require__(1037)
+const { serializeAMimeType, parseMIMEType } = __nccwpck_require__(685)
+const { types } = __nccwpck_require__(3837)
+const { StringDecoder } = __nccwpck_require__(1576)
+const { btoa } = __nccwpck_require__(4300)
+
+/** @type {PropertyDescriptor} */
+const staticPropertyDescriptors = {
+ enumerable: true,
+ writable: false,
+ configurable: false
+}
+
+/**
+ * @see https://w3c.github.io/FileAPI/#readOperation
+ * @param {import('./filereader').FileReader} fr
+ * @param {import('buffer').Blob} blob
+ * @param {string} type
+ * @param {string?} encodingName
+ */
+function readOperation (fr, blob, type, encodingName) {
+ // 1. If fr’s state is "loading", throw an InvalidStateError
+ // DOMException.
+ if (fr[kState] === 'loading') {
+ throw new DOMException('Invalid state', 'InvalidStateError')
+ }
+
+ // 2. Set fr’s state to "loading".
+ fr[kState] = 'loading'
+
+ // 3. Set fr’s result to null.
+ fr[kResult] = null
+
+ // 4. Set fr’s error to null.
+ fr[kError] = null
+
+ // 5. Let stream be the result of calling get stream on blob.
+ /** @type {import('stream/web').ReadableStream} */
+ const stream = blob.stream()
+
+ // 6. Let reader be the result of getting a reader from stream.
+ const reader = stream.getReader()
+
+ // 7. Let bytes be an empty byte sequence.
+ /** @type {Uint8Array[]} */
+ const bytes = []
+
+ // 8. Let chunkPromise be the result of reading a chunk from
+ // stream with reader.
+ let chunkPromise = reader.read()
+
+ // 9. Let isFirstChunk be true.
+ let isFirstChunk = true
+
+ // 10. In parallel, while true:
+ // Note: "In parallel" just means non-blocking
+ // Note 2: readOperation itself cannot be async as double
+ // reading the body would then reject the promise, instead
+ // of throwing an error.
+ ;(async () => {
+ while (!fr[kAborted]) {
+ // 1. Wait for chunkPromise to be fulfilled or rejected.
+ try {
+ const { done, value } = await chunkPromise
+
+ // 2. If chunkPromise is fulfilled, and isFirstChunk is
+ // true, queue a task to fire a progress event called
+ // loadstart at fr.
+ if (isFirstChunk && !fr[kAborted]) {
+ queueMicrotask(() => {
+ fireAProgressEvent('loadstart', fr)
+ })
+ }
+
+ // 3. Set isFirstChunk to false.
+ isFirstChunk = false
+
+ // 4. If chunkPromise is fulfilled with an object whose
+ // done property is false and whose value property is
+ // a Uint8Array object, run these steps:
+ if (!done && types.isUint8Array(value)) {
+ // 1. Let bs be the byte sequence represented by the
+ // Uint8Array object.
+
+ // 2. Append bs to bytes.
+ bytes.push(value)
+
+ // 3. If roughly 50ms have passed since these steps
+ // were last invoked, queue a task to fire a
+ // progress event called progress at fr.
+ if (
+ (
+ fr[kLastProgressEventFired] === undefined ||
+ Date.now() - fr[kLastProgressEventFired] >= 50
+ ) &&
+ !fr[kAborted]
+ ) {
+ fr[kLastProgressEventFired] = Date.now()
+ queueMicrotask(() => {
+ fireAProgressEvent('progress', fr)
+ })
+ }
+
+ // 4. Set chunkPromise to the result of reading a
+ // chunk from stream with reader.
+ chunkPromise = reader.read()
+ } else if (done) {
+ // 5. Otherwise, if chunkPromise is fulfilled with an
+ // object whose done property is true, queue a task
+ // to run the following steps and abort this algorithm:
+ queueMicrotask(() => {
+ // 1. Set fr’s state to "done".
+ fr[kState] = 'done'
+
+ // 2. Let result be the result of package data given
+ // bytes, type, blob’s type, and encodingName.
+ try {
+ const result = packageData(bytes, type, blob.type, encodingName)
+
+ // 4. Else:
+
+ if (fr[kAborted]) {
+ return
+ }
+
+ // 1. Set fr’s result to result.
+ fr[kResult] = result
+
+ // 2. Fire a progress event called load at the fr.
+ fireAProgressEvent('load', fr)
+ } catch (error) {
+ // 3. If package data threw an exception error:
+
+ // 1. Set fr’s error to error.
+ fr[kError] = error
+
+ // 2. Fire a progress event called error at fr.
+ fireAProgressEvent('error', fr)
+ }
+
+ // 5. If fr’s state is not "loading", fire a progress
+ // event called loadend at the fr.
+ if (fr[kState] !== 'loading') {
+ fireAProgressEvent('loadend', fr)
+ }
+ })
+
+ break
+ }
+ } catch (error) {
+ if (fr[kAborted]) {
+ return
+ }
+
+ // 6. Otherwise, if chunkPromise is rejected with an
+ // error error, queue a task to run the following
+ // steps and abort this algorithm:
+ queueMicrotask(() => {
+ // 1. Set fr’s state to "done".
+ fr[kState] = 'done'
+
+ // 2. Set fr’s error to error.
+ fr[kError] = error
+
+ // 3. Fire a progress event called error at fr.
+ fireAProgressEvent('error', fr)
+
+ // 4. If fr’s state is not "loading", fire a progress
+ // event called loadend at fr.
+ if (fr[kState] !== 'loading') {
+ fireAProgressEvent('loadend', fr)
+ }
+ })
+
+ break
+ }
+ }
+ })()
+}
+
+/**
+ * @see https://w3c.github.io/FileAPI/#fire-a-progress-event
+ * @see https://dom.spec.whatwg.org/#concept-event-fire
+ * @param {string} e The name of the event
+ * @param {import('./filereader').FileReader} reader
+ */
+function fireAProgressEvent (e, reader) {
+ // The progress event e does not bubble. e.bubbles must be false
+ // The progress event e is NOT cancelable. e.cancelable must be false
+ const event = new ProgressEvent(e, {
+ bubbles: false,
+ cancelable: false
+ })
+
+ reader.dispatchEvent(event)
+}
+
+/**
+ * @see https://w3c.github.io/FileAPI/#blob-package-data
+ * @param {Uint8Array[]} bytes
+ * @param {string} type
+ * @param {string?} mimeType
+ * @param {string?} encodingName
+ */
+function packageData (bytes, type, mimeType, encodingName) {
+ // 1. A Blob has an associated package data algorithm, given
+ // bytes, a type, a optional mimeType, and a optional
+ // encodingName, which switches on type and runs the
+ // associated steps:
+
+ switch (type) {
+ case 'DataURL': {
+ // 1. Return bytes as a DataURL [RFC2397] subject to
+ // the considerations below:
+ // * Use mimeType as part of the Data URL if it is
+ // available in keeping with the Data URL
+ // specification [RFC2397].
+ // * If mimeType is not available return a Data URL
+ // without a media-type. [RFC2397].
+
+ // https://datatracker.ietf.org/doc/html/rfc2397#section-3
+ // dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
+ // mediatype := [ type "/" subtype ] *( ";" parameter )
+ // data := *urlchar
+ // parameter := attribute "=" value
+ let dataURL = 'data:'
+
+ const parsed = parseMIMEType(mimeType || 'application/octet-stream')
+
+ if (parsed !== 'failure') {
+ dataURL += serializeAMimeType(parsed)
+ }
+
+ dataURL += ';base64,'
+
+ const decoder = new StringDecoder('latin1')
+
+ for (const chunk of bytes) {
+ dataURL += btoa(decoder.write(chunk))
+ }
+
+ dataURL += btoa(decoder.end())
+
+ return dataURL
+ }
+ case 'Text': {
+ // 1. Let encoding be failure
+ let encoding = 'failure'
+
+ // 2. If the encodingName is present, set encoding to the
+ // result of getting an encoding from encodingName.
+ if (encodingName) {
+ encoding = getEncoding(encodingName)
+ }
+
+ // 3. If encoding is failure, and mimeType is present:
+ if (encoding === 'failure' && mimeType) {
+ // 1. Let type be the result of parse a MIME type
+ // given mimeType.
+ const type = parseMIMEType(mimeType)
+
+ // 2. If type is not failure, set encoding to the result
+ // of getting an encoding from type’s parameters["charset"].
+ if (type !== 'failure') {
+ encoding = getEncoding(type.parameters.get('charset'))
+ }
+ }
+
+ // 4. If encoding is failure, then set encoding to UTF-8.
+ if (encoding === 'failure') {
+ encoding = 'UTF-8'
+ }
+
+ // 5. Decode bytes using fallback encoding encoding, and
+ // return the result.
+ return decode(bytes, encoding)
+ }
+ case 'ArrayBuffer': {
+ // Return a new ArrayBuffer whose contents are bytes.
+ const sequence = combineByteSequences(bytes)
+
+ return sequence.buffer
+ }
+ case 'BinaryString': {
+ // Return bytes as a binary string, in which every byte
+ // is represented by a code unit of equal value [0..255].
+ let binaryString = ''
+
+ const decoder = new StringDecoder('latin1')
+
+ for (const chunk of bytes) {
+ binaryString += decoder.write(chunk)
+ }
+
+ binaryString += decoder.end()
+
+ return binaryString
+ }
+ }
+}
+
+/**
+ * @see https://encoding.spec.whatwg.org/#decode
+ * @param {Uint8Array[]} ioQueue
+ * @param {string} encoding
+ */
+function decode (ioQueue, encoding) {
+ const bytes = combineByteSequences(ioQueue)
+
+ // 1. Let BOMEncoding be the result of BOM sniffing ioQueue.
+ const BOMEncoding = BOMSniffing(bytes)
+
+ let slice = 0
+
+ // 2. If BOMEncoding is non-null:
+ if (BOMEncoding !== null) {
+ // 1. Set encoding to BOMEncoding.
+ encoding = BOMEncoding
+
+ // 2. Read three bytes from ioQueue, if BOMEncoding is
+ // UTF-8; otherwise read two bytes.
+ // (Do nothing with those bytes.)
+ slice = BOMEncoding === 'UTF-8' ? 3 : 2
+ }
+
+ // 3. Process a queue with an instance of encoding’s
+ // decoder, ioQueue, output, and "replacement".
+
+ // 4. Return output.
+
+ const sliced = bytes.slice(slice)
+ return new TextDecoder(encoding).decode(sliced)
+}
+
+/**
+ * @see https://encoding.spec.whatwg.org/#bom-sniff
+ * @param {Uint8Array} ioQueue
+ */
+function BOMSniffing (ioQueue) {
+ // 1. Let BOM be the result of peeking 3 bytes from ioQueue,
+ // converted to a byte sequence.
+ const [a, b, c] = ioQueue
+
+ // 2. For each of the rows in the table below, starting with
+ // the first one and going down, if BOM starts with the
+ // bytes given in the first column, then return the
+ // encoding given in the cell in the second column of that
+ // row. Otherwise, return null.
+ if (a === 0xEF && b === 0xBB && c === 0xBF) {
+ return 'UTF-8'
+ } else if (a === 0xFE && b === 0xFF) {
+ return 'UTF-16BE'
+ } else if (a === 0xFF && b === 0xFE) {
+ return 'UTF-16LE'
+ }
+
+ return null
+}
+
+/**
+ * @param {Uint8Array[]} sequences
+ */
+function combineByteSequences (sequences) {
+ const size = sequences.reduce((a, b) => {
+ return a + b.byteLength
+ }, 0)
+
+ let offset = 0
+
+ return sequences.reduce((a, b) => {
+ a.set(b, offset)
+ offset += b.byteLength
+ return a
+ }, new Uint8Array(size))
+}
+
+module.exports = {
+ staticPropertyDescriptors,
+ readOperation,
+ fireAProgressEvent
+}
+
+
+/***/ }),
+
+/***/ 1892:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+// We include a version number for the Dispatcher API. In case of breaking changes,
+// this version number must be increased to avoid conflicts.
+const globalDispatcher = Symbol.for('undici.globalDispatcher.1')
+const { InvalidArgumentError } = __nccwpck_require__(8045)
+const Agent = __nccwpck_require__(7890)
+
+if (getGlobalDispatcher() === undefined) {
+ setGlobalDispatcher(new Agent())
+}
+
+function setGlobalDispatcher (agent) {
+ if (!agent || typeof agent.dispatch !== 'function') {
+ throw new InvalidArgumentError('Argument agent must implement Agent')
+ }
+ Object.defineProperty(globalThis, globalDispatcher, {
+ value: agent,
+ writable: true,
+ enumerable: false,
+ configurable: false
+ })
+}
+
+function getGlobalDispatcher () {
+ return globalThis[globalDispatcher]
+}
+
+module.exports = {
+ setGlobalDispatcher,
+ getGlobalDispatcher
+}
+
+
+/***/ }),
+
+/***/ 6930:
+/***/ ((module) => {
+
+"use strict";
+
+
+module.exports = class DecoratorHandler {
+ constructor (handler) {
+ this.handler = handler
+ }
+
+ onConnect (...args) {
+ return this.handler.onConnect(...args)
+ }
+
+ onError (...args) {
+ return this.handler.onError(...args)
+ }
+
+ onUpgrade (...args) {
+ return this.handler.onUpgrade(...args)
+ }
+
+ onHeaders (...args) {
+ return this.handler.onHeaders(...args)
+ }
+
+ onData (...args) {
+ return this.handler.onData(...args)
+ }
+
+ onComplete (...args) {
+ return this.handler.onComplete(...args)
+ }
+
+ onBodySent (...args) {
+ return this.handler.onBodySent(...args)
+ }
+}
+
+
+/***/ }),
+
+/***/ 2860:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const util = __nccwpck_require__(3983)
+const { kBodyUsed } = __nccwpck_require__(2785)
+const assert = __nccwpck_require__(9491)
+const { InvalidArgumentError } = __nccwpck_require__(8045)
+const EE = __nccwpck_require__(2361)
+
+const redirectableStatusCodes = [300, 301, 302, 303, 307, 308]
+
+const kBody = Symbol('body')
+
+class BodyAsyncIterable {
+ constructor (body) {
+ this[kBody] = body
+ this[kBodyUsed] = false
+ }
+
+ async * [Symbol.asyncIterator] () {
+ assert(!this[kBodyUsed], 'disturbed')
+ this[kBodyUsed] = true
+ yield * this[kBody]
+ }
+}
+
+class RedirectHandler {
+ constructor (dispatch, maxRedirections, opts, handler) {
+ if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {
+ throw new InvalidArgumentError('maxRedirections must be a positive number')
+ }
+
+ util.validateHandler(handler, opts.method, opts.upgrade)
+
+ this.dispatch = dispatch
+ this.location = null
+ this.abort = null
+ this.opts = { ...opts, maxRedirections: 0 } // opts must be a copy
+ this.maxRedirections = maxRedirections
+ this.handler = handler
+ this.history = []
+
+ if (util.isStream(this.opts.body)) {
+ // TODO (fix): Provide some way for the user to cache the file to e.g. /tmp
+ // so that it can be dispatched again?
+ // TODO (fix): Do we need 100-expect support to provide a way to do this properly?
+ if (util.bodyLength(this.opts.body) === 0) {
+ this.opts.body
+ .on('data', function () {
+ assert(false)
+ })
+ }
+
+ if (typeof this.opts.body.readableDidRead !== 'boolean') {
+ this.opts.body[kBodyUsed] = false
+ EE.prototype.on.call(this.opts.body, 'data', function () {
+ this[kBodyUsed] = true
+ })
+ }
+ } else if (this.opts.body && typeof this.opts.body.pipeTo === 'function') {
+ // TODO (fix): We can't access ReadableStream internal state
+ // to determine whether or not it has been disturbed. This is just
+ // a workaround.
+ this.opts.body = new BodyAsyncIterable(this.opts.body)
+ } else if (
+ this.opts.body &&
+ typeof this.opts.body !== 'string' &&
+ !ArrayBuffer.isView(this.opts.body) &&
+ util.isIterable(this.opts.body)
+ ) {
+ // TODO: Should we allow re-using iterable if !this.opts.idempotent
+ // or through some other flag?
+ this.opts.body = new BodyAsyncIterable(this.opts.body)
+ }
+ }
+
+ onConnect (abort) {
+ this.abort = abort
+ this.handler.onConnect(abort, { history: this.history })
+ }
+
+ onUpgrade (statusCode, headers, socket) {
+ this.handler.onUpgrade(statusCode, headers, socket)
+ }
+
+ onError (error) {
+ this.handler.onError(error)
+ }
+
+ onHeaders (statusCode, headers, resume, statusText) {
+ this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body)
+ ? null
+ : parseLocation(statusCode, headers)
+
+ if (this.opts.origin) {
+ this.history.push(new URL(this.opts.path, this.opts.origin))
+ }
+
+ if (!this.location) {
+ return this.handler.onHeaders(statusCode, headers, resume, statusText)
+ }
+
+ const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)))
+ const path = search ? `${pathname}${search}` : pathname
+
+ // Remove headers referring to the original URL.
+ // By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers.
+ // https://tools.ietf.org/html/rfc7231#section-6.4
+ this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin)
+ this.opts.path = path
+ this.opts.origin = origin
+ this.opts.maxRedirections = 0
+ this.opts.query = null
+
+ // https://tools.ietf.org/html/rfc7231#section-6.4.4
+ // In case of HTTP 303, always replace method to be either HEAD or GET
+ if (statusCode === 303 && this.opts.method !== 'HEAD') {
+ this.opts.method = 'GET'
+ this.opts.body = null
+ }
+ }
+
+ onData (chunk) {
+ if (this.location) {
+ /*
+ https://tools.ietf.org/html/rfc7231#section-6.4
+
+ TLDR: undici always ignores 3xx response bodies.
+
+ Redirection is used to serve the requested resource from another URL, so it is assumes that
+ no body is generated (and thus can be ignored). Even though generating a body is not prohibited.
+
+ For status 301, 302, 303, 307 and 308 (the latter from RFC 7238), the specs mention that the body usually
+ (which means it's optional and not mandated) contain just an hyperlink to the value of
+ the Location response header, so the body can be ignored safely.
+
+ For status 300, which is "Multiple Choices", the spec mentions both generating a Location
+ response header AND a response body with the other possible location to follow.
+ Since the spec explicitily chooses not to specify a format for such body and leave it to
+ servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it.
+ */
+ } else {
+ return this.handler.onData(chunk)
+ }
+ }
+
+ onComplete (trailers) {
+ if (this.location) {
+ /*
+ https://tools.ietf.org/html/rfc7231#section-6.4
+
+ TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections
+ and neither are useful if present.
+
+ See comment on onData method above for more detailed informations.
+ */
+
+ this.location = null
+ this.abort = null
+
+ this.dispatch(this.opts, this)
+ } else {
+ this.handler.onComplete(trailers)
+ }
+ }
+
+ onBodySent (chunk) {
+ if (this.handler.onBodySent) {
+ this.handler.onBodySent(chunk)
+ }
+ }
+}
+
+function parseLocation (statusCode, headers) {
+ if (redirectableStatusCodes.indexOf(statusCode) === -1) {
+ return null
+ }
+
+ for (let i = 0; i < headers.length; i += 2) {
+ if (headers[i].toString().toLowerCase() === 'location') {
+ return headers[i + 1]
+ }
+ }
+}
+
+// https://tools.ietf.org/html/rfc7231#section-6.4.4
+function shouldRemoveHeader (header, removeContent, unknownOrigin) {
+ return (
+ (header.length === 4 && header.toString().toLowerCase() === 'host') ||
+ (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) ||
+ (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') ||
+ (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie')
+ )
+}
+
+// https://tools.ietf.org/html/rfc7231#section-6.4
+function cleanRequestHeaders (headers, removeContent, unknownOrigin) {
+ const ret = []
+ if (Array.isArray(headers)) {
+ for (let i = 0; i < headers.length; i += 2) {
+ if (!shouldRemoveHeader(headers[i], removeContent, unknownOrigin)) {
+ ret.push(headers[i], headers[i + 1])
+ }
+ }
+ } else if (headers && typeof headers === 'object') {
+ for (const key of Object.keys(headers)) {
+ if (!shouldRemoveHeader(key, removeContent, unknownOrigin)) {
+ ret.push(key, headers[key])
+ }
+ }
+ } else {
+ assert(headers == null, 'headers must be an object or an array')
+ }
+ return ret
+}
+
+module.exports = RedirectHandler
+
+
+/***/ }),
+
+/***/ 2286:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+const assert = __nccwpck_require__(9491)
+
+const { kRetryHandlerDefaultRetry } = __nccwpck_require__(2785)
+const { RequestRetryError } = __nccwpck_require__(8045)
+const { isDisturbed, parseHeaders, parseRangeHeader } = __nccwpck_require__(3983)
+
+function calculateRetryAfterHeader (retryAfter) {
+ const current = Date.now()
+ const diff = new Date(retryAfter).getTime() - current
+
+ return diff
+}
+
+class RetryHandler {
+ constructor (opts, handlers) {
+ const { retryOptions, ...dispatchOpts } = opts
+ const {
+ // Retry scoped
+ retry: retryFn,
+ maxRetries,
+ maxTimeout,
+ minTimeout,
+ timeoutFactor,
+ // Response scoped
+ methods,
+ errorCodes,
+ retryAfter,
+ statusCodes
+ } = retryOptions ?? {}
+
+ this.dispatch = handlers.dispatch
+ this.handler = handlers.handler
+ this.opts = dispatchOpts
+ this.abort = null
+ this.aborted = false
+ this.retryOpts = {
+ retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry],
+ retryAfter: retryAfter ?? true,
+ maxTimeout: maxTimeout ?? 30 * 1000, // 30s,
+ timeout: minTimeout ?? 500, // .5s
+ timeoutFactor: timeoutFactor ?? 2,
+ maxRetries: maxRetries ?? 5,
+ // What errors we should retry
+ methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'],
+ // Indicates which errors to retry
+ statusCodes: statusCodes ?? [500, 502, 503, 504, 429],
+ // List of errors to retry
+ errorCodes: errorCodes ?? [
+ 'ECONNRESET',
+ 'ECONNREFUSED',
+ 'ENOTFOUND',
+ 'ENETDOWN',
+ 'ENETUNREACH',
+ 'EHOSTDOWN',
+ 'EHOSTUNREACH',
+ 'EPIPE'
+ ]
+ }
+
+ this.retryCount = 0
+ this.start = 0
+ this.end = null
+ this.etag = null
+ this.resume = null
+
+ // Handle possible onConnect duplication
+ this.handler.onConnect(reason => {
+ this.aborted = true
+ if (this.abort) {
+ this.abort(reason)
+ } else {
+ this.reason = reason
+ }
+ })
+ }
+
+ onRequestSent () {
+ if (this.handler.onRequestSent) {
+ this.handler.onRequestSent()
+ }
+ }
+
+ onUpgrade (statusCode, headers, socket) {
+ if (this.handler.onUpgrade) {
+ this.handler.onUpgrade(statusCode, headers, socket)
+ }
+ }
+
+ onConnect (abort) {
+ if (this.aborted) {
+ abort(this.reason)
+ } else {
+ this.abort = abort
+ }
+ }
+
+ onBodySent (chunk) {
+ if (this.handler.onBodySent) return this.handler.onBodySent(chunk)
+ }
+
+ static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) {
+ const { statusCode, code, headers } = err
+ const { method, retryOptions } = opts
+ const {
+ maxRetries,
+ timeout,
+ maxTimeout,
+ timeoutFactor,
+ statusCodes,
+ errorCodes,
+ methods
+ } = retryOptions
+ let { counter, currentTimeout } = state
+
+ currentTimeout =
+ currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout
+
+ // Any code that is not a Undici's originated and allowed to retry
+ if (
+ code &&
+ code !== 'UND_ERR_REQ_RETRY' &&
+ code !== 'UND_ERR_SOCKET' &&
+ !errorCodes.includes(code)
+ ) {
+ cb(err)
+ return
+ }
+
+ // If a set of method are provided and the current method is not in the list
+ if (Array.isArray(methods) && !methods.includes(method)) {
+ cb(err)
+ return
+ }
+
+ // If a set of status code are provided and the current status code is not in the list
+ if (
+ statusCode != null &&
+ Array.isArray(statusCodes) &&
+ !statusCodes.includes(statusCode)
+ ) {
+ cb(err)
+ return
+ }
+
+ // If we reached the max number of retries
+ if (counter > maxRetries) {
+ cb(err)
+ return
+ }
+
+ let retryAfterHeader = headers != null && headers['retry-after']
+ if (retryAfterHeader) {
+ retryAfterHeader = Number(retryAfterHeader)
+ retryAfterHeader = isNaN(retryAfterHeader)
+ ? calculateRetryAfterHeader(retryAfterHeader)
+ : retryAfterHeader * 1e3 // Retry-After is in seconds
+ }
+
+ const retryTimeout =
+ retryAfterHeader > 0
+ ? Math.min(retryAfterHeader, maxTimeout)
+ : Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout)
+
+ state.currentTimeout = retryTimeout
+
+ setTimeout(() => cb(null), retryTimeout)
+ }
+
+ onHeaders (statusCode, rawHeaders, resume, statusMessage) {
+ const headers = parseHeaders(rawHeaders)
+
+ this.retryCount += 1
+
+ if (statusCode >= 300) {
+ this.abort(
+ new RequestRetryError('Request failed', statusCode, {
+ headers,
+ count: this.retryCount
+ })
+ )
+ return false
+ }
+
+ // Checkpoint for resume from where we left it
+ if (this.resume != null) {
+ this.resume = null
+
+ if (statusCode !== 206) {
+ return true
+ }
+
+ const contentRange = parseRangeHeader(headers['content-range'])
+ // If no content range
+ if (!contentRange) {
+ this.abort(
+ new RequestRetryError('Content-Range mismatch', statusCode, {
+ headers,
+ count: this.retryCount
+ })
+ )
+ return false
+ }
+
+ // Let's start with a weak etag check
+ if (this.etag != null && this.etag !== headers.etag) {
+ this.abort(
+ new RequestRetryError('ETag mismatch', statusCode, {
+ headers,
+ count: this.retryCount
+ })
+ )
+ return false
+ }
+
+ const { start, size, end = size } = contentRange
+
+ assert(this.start === start, 'content-range mismatch')
+ assert(this.end == null || this.end === end, 'content-range mismatch')
+
+ this.resume = resume
+ return true
+ }
+
+ if (this.end == null) {
+ if (statusCode === 206) {
+ // First time we receive 206
+ const range = parseRangeHeader(headers['content-range'])
+
+ if (range == null) {
+ return this.handler.onHeaders(
+ statusCode,
+ rawHeaders,
+ resume,
+ statusMessage
+ )
+ }
+
+ const { start, size, end = size } = range
+
+ assert(
+ start != null && Number.isFinite(start) && this.start !== start,
+ 'content-range mismatch'
+ )
+ assert(Number.isFinite(start))
+ assert(
+ end != null && Number.isFinite(end) && this.end !== end,
+ 'invalid content-length'
+ )
+
+ this.start = start
+ this.end = end
+ }
+
+ // We make our best to checkpoint the body for further range headers
+ if (this.end == null) {
+ const contentLength = headers['content-length']
+ this.end = contentLength != null ? Number(contentLength) : null
+ }
+
+ assert(Number.isFinite(this.start))
+ assert(
+ this.end == null || Number.isFinite(this.end),
+ 'invalid content-length'
+ )
+
+ this.resume = resume
+ this.etag = headers.etag != null ? headers.etag : null
+
+ return this.handler.onHeaders(
+ statusCode,
+ rawHeaders,
+ resume,
+ statusMessage
+ )
+ }
+
+ const err = new RequestRetryError('Request failed', statusCode, {
+ headers,
+ count: this.retryCount
+ })
+
+ this.abort(err)
+
+ return false
+ }
+
+ onData (chunk) {
+ this.start += chunk.length
+
+ return this.handler.onData(chunk)
+ }
+
+ onComplete (rawTrailers) {
+ this.retryCount = 0
+ return this.handler.onComplete(rawTrailers)
+ }
+
+ onError (err) {
+ if (this.aborted || isDisturbed(this.opts.body)) {
+ return this.handler.onError(err)
+ }
+
+ this.retryOpts.retry(
+ err,
+ {
+ state: { counter: this.retryCount++, currentTimeout: this.retryAfter },
+ opts: { retryOptions: this.retryOpts, ...this.opts }
+ },
+ onRetry.bind(this)
+ )
+
+ function onRetry (err) {
+ if (err != null || this.aborted || isDisturbed(this.opts.body)) {
+ return this.handler.onError(err)
+ }
+
+ if (this.start !== 0) {
+ this.opts = {
+ ...this.opts,
+ headers: {
+ ...this.opts.headers,
+ range: `bytes=${this.start}-${this.end ?? ''}`
+ }
+ }
+ }
+
+ try {
+ this.dispatch(this.opts, this)
+ } catch (err) {
+ this.handler.onError(err)
+ }
+ }
+ }
+}
+
+module.exports = RetryHandler
+
+
+/***/ }),
+
+/***/ 8861:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const RedirectHandler = __nccwpck_require__(2860)
+
+function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections }) {
+ return (dispatch) => {
+ return function Intercept (opts, handler) {
+ const { maxRedirections = defaultMaxRedirections } = opts
+
+ if (!maxRedirections) {
+ return dispatch(opts, handler)
+ }
+
+ const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler)
+ opts = { ...opts, maxRedirections: 0 } // Stop sub dispatcher from also redirecting.
+ return dispatch(opts, redirectHandler)
+ }
+ }
+}
+
+module.exports = createRedirectInterceptor
+
+
+/***/ }),
+
+/***/ 953:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.SPECIAL_HEADERS = exports.HEADER_STATE = exports.MINOR = exports.MAJOR = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.STRICT_TOKEN = exports.HEX = exports.URL_CHAR = exports.STRICT_URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.FINISH = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0;
+const utils_1 = __nccwpck_require__(1891);
+// C headers
+var ERROR;
+(function (ERROR) {
+ ERROR[ERROR["OK"] = 0] = "OK";
+ ERROR[ERROR["INTERNAL"] = 1] = "INTERNAL";
+ ERROR[ERROR["STRICT"] = 2] = "STRICT";
+ ERROR[ERROR["LF_EXPECTED"] = 3] = "LF_EXPECTED";
+ ERROR[ERROR["UNEXPECTED_CONTENT_LENGTH"] = 4] = "UNEXPECTED_CONTENT_LENGTH";
+ ERROR[ERROR["CLOSED_CONNECTION"] = 5] = "CLOSED_CONNECTION";
+ ERROR[ERROR["INVALID_METHOD"] = 6] = "INVALID_METHOD";
+ ERROR[ERROR["INVALID_URL"] = 7] = "INVALID_URL";
+ ERROR[ERROR["INVALID_CONSTANT"] = 8] = "INVALID_CONSTANT";
+ ERROR[ERROR["INVALID_VERSION"] = 9] = "INVALID_VERSION";
+ ERROR[ERROR["INVALID_HEADER_TOKEN"] = 10] = "INVALID_HEADER_TOKEN";
+ ERROR[ERROR["INVALID_CONTENT_LENGTH"] = 11] = "INVALID_CONTENT_LENGTH";
+ ERROR[ERROR["INVALID_CHUNK_SIZE"] = 12] = "INVALID_CHUNK_SIZE";
+ ERROR[ERROR["INVALID_STATUS"] = 13] = "INVALID_STATUS";
+ ERROR[ERROR["INVALID_EOF_STATE"] = 14] = "INVALID_EOF_STATE";
+ ERROR[ERROR["INVALID_TRANSFER_ENCODING"] = 15] = "INVALID_TRANSFER_ENCODING";
+ ERROR[ERROR["CB_MESSAGE_BEGIN"] = 16] = "CB_MESSAGE_BEGIN";
+ ERROR[ERROR["CB_HEADERS_COMPLETE"] = 17] = "CB_HEADERS_COMPLETE";
+ ERROR[ERROR["CB_MESSAGE_COMPLETE"] = 18] = "CB_MESSAGE_COMPLETE";
+ ERROR[ERROR["CB_CHUNK_HEADER"] = 19] = "CB_CHUNK_HEADER";
+ ERROR[ERROR["CB_CHUNK_COMPLETE"] = 20] = "CB_CHUNK_COMPLETE";
+ ERROR[ERROR["PAUSED"] = 21] = "PAUSED";
+ ERROR[ERROR["PAUSED_UPGRADE"] = 22] = "PAUSED_UPGRADE";
+ ERROR[ERROR["PAUSED_H2_UPGRADE"] = 23] = "PAUSED_H2_UPGRADE";
+ ERROR[ERROR["USER"] = 24] = "USER";
+})(ERROR = exports.ERROR || (exports.ERROR = {}));
+var TYPE;
+(function (TYPE) {
+ TYPE[TYPE["BOTH"] = 0] = "BOTH";
+ TYPE[TYPE["REQUEST"] = 1] = "REQUEST";
+ TYPE[TYPE["RESPONSE"] = 2] = "RESPONSE";
+})(TYPE = exports.TYPE || (exports.TYPE = {}));
+var FLAGS;
+(function (FLAGS) {
+ FLAGS[FLAGS["CONNECTION_KEEP_ALIVE"] = 1] = "CONNECTION_KEEP_ALIVE";
+ FLAGS[FLAGS["CONNECTION_CLOSE"] = 2] = "CONNECTION_CLOSE";
+ FLAGS[FLAGS["CONNECTION_UPGRADE"] = 4] = "CONNECTION_UPGRADE";
+ FLAGS[FLAGS["CHUNKED"] = 8] = "CHUNKED";
+ FLAGS[FLAGS["UPGRADE"] = 16] = "UPGRADE";
+ FLAGS[FLAGS["CONTENT_LENGTH"] = 32] = "CONTENT_LENGTH";
+ FLAGS[FLAGS["SKIPBODY"] = 64] = "SKIPBODY";
+ FLAGS[FLAGS["TRAILING"] = 128] = "TRAILING";
+ // 1 << 8 is unused
+ FLAGS[FLAGS["TRANSFER_ENCODING"] = 512] = "TRANSFER_ENCODING";
+})(FLAGS = exports.FLAGS || (exports.FLAGS = {}));
+var LENIENT_FLAGS;
+(function (LENIENT_FLAGS) {
+ LENIENT_FLAGS[LENIENT_FLAGS["HEADERS"] = 1] = "HEADERS";
+ LENIENT_FLAGS[LENIENT_FLAGS["CHUNKED_LENGTH"] = 2] = "CHUNKED_LENGTH";
+ LENIENT_FLAGS[LENIENT_FLAGS["KEEP_ALIVE"] = 4] = "KEEP_ALIVE";
+})(LENIENT_FLAGS = exports.LENIENT_FLAGS || (exports.LENIENT_FLAGS = {}));
+var METHODS;
+(function (METHODS) {
+ METHODS[METHODS["DELETE"] = 0] = "DELETE";
+ METHODS[METHODS["GET"] = 1] = "GET";
+ METHODS[METHODS["HEAD"] = 2] = "HEAD";
+ METHODS[METHODS["POST"] = 3] = "POST";
+ METHODS[METHODS["PUT"] = 4] = "PUT";
+ /* pathological */
+ METHODS[METHODS["CONNECT"] = 5] = "CONNECT";
+ METHODS[METHODS["OPTIONS"] = 6] = "OPTIONS";
+ METHODS[METHODS["TRACE"] = 7] = "TRACE";
+ /* WebDAV */
+ METHODS[METHODS["COPY"] = 8] = "COPY";
+ METHODS[METHODS["LOCK"] = 9] = "LOCK";
+ METHODS[METHODS["MKCOL"] = 10] = "MKCOL";
+ METHODS[METHODS["MOVE"] = 11] = "MOVE";
+ METHODS[METHODS["PROPFIND"] = 12] = "PROPFIND";
+ METHODS[METHODS["PROPPATCH"] = 13] = "PROPPATCH";
+ METHODS[METHODS["SEARCH"] = 14] = "SEARCH";
+ METHODS[METHODS["UNLOCK"] = 15] = "UNLOCK";
+ METHODS[METHODS["BIND"] = 16] = "BIND";
+ METHODS[METHODS["REBIND"] = 17] = "REBIND";
+ METHODS[METHODS["UNBIND"] = 18] = "UNBIND";
+ METHODS[METHODS["ACL"] = 19] = "ACL";
+ /* subversion */
+ METHODS[METHODS["REPORT"] = 20] = "REPORT";
+ METHODS[METHODS["MKACTIVITY"] = 21] = "MKACTIVITY";
+ METHODS[METHODS["CHECKOUT"] = 22] = "CHECKOUT";
+ METHODS[METHODS["MERGE"] = 23] = "MERGE";
+ /* upnp */
+ METHODS[METHODS["M-SEARCH"] = 24] = "M-SEARCH";
+ METHODS[METHODS["NOTIFY"] = 25] = "NOTIFY";
+ METHODS[METHODS["SUBSCRIBE"] = 26] = "SUBSCRIBE";
+ METHODS[METHODS["UNSUBSCRIBE"] = 27] = "UNSUBSCRIBE";
+ /* RFC-5789 */
+ METHODS[METHODS["PATCH"] = 28] = "PATCH";
+ METHODS[METHODS["PURGE"] = 29] = "PURGE";
+ /* CalDAV */
+ METHODS[METHODS["MKCALENDAR"] = 30] = "MKCALENDAR";
+ /* RFC-2068, section 19.6.1.2 */
+ METHODS[METHODS["LINK"] = 31] = "LINK";
+ METHODS[METHODS["UNLINK"] = 32] = "UNLINK";
+ /* icecast */
+ METHODS[METHODS["SOURCE"] = 33] = "SOURCE";
+ /* RFC-7540, section 11.6 */
+ METHODS[METHODS["PRI"] = 34] = "PRI";
+ /* RFC-2326 RTSP */
+ METHODS[METHODS["DESCRIBE"] = 35] = "DESCRIBE";
+ METHODS[METHODS["ANNOUNCE"] = 36] = "ANNOUNCE";
+ METHODS[METHODS["SETUP"] = 37] = "SETUP";
+ METHODS[METHODS["PLAY"] = 38] = "PLAY";
+ METHODS[METHODS["PAUSE"] = 39] = "PAUSE";
+ METHODS[METHODS["TEARDOWN"] = 40] = "TEARDOWN";
+ METHODS[METHODS["GET_PARAMETER"] = 41] = "GET_PARAMETER";
+ METHODS[METHODS["SET_PARAMETER"] = 42] = "SET_PARAMETER";
+ METHODS[METHODS["REDIRECT"] = 43] = "REDIRECT";
+ METHODS[METHODS["RECORD"] = 44] = "RECORD";
+ /* RAOP */
+ METHODS[METHODS["FLUSH"] = 45] = "FLUSH";
+})(METHODS = exports.METHODS || (exports.METHODS = {}));
+exports.METHODS_HTTP = [
+ METHODS.DELETE,
+ METHODS.GET,
+ METHODS.HEAD,
+ METHODS.POST,
+ METHODS.PUT,
+ METHODS.CONNECT,
+ METHODS.OPTIONS,
+ METHODS.TRACE,
+ METHODS.COPY,
+ METHODS.LOCK,
+ METHODS.MKCOL,
+ METHODS.MOVE,
+ METHODS.PROPFIND,
+ METHODS.PROPPATCH,
+ METHODS.SEARCH,
+ METHODS.UNLOCK,
+ METHODS.BIND,
+ METHODS.REBIND,
+ METHODS.UNBIND,
+ METHODS.ACL,
+ METHODS.REPORT,
+ METHODS.MKACTIVITY,
+ METHODS.CHECKOUT,
+ METHODS.MERGE,
+ METHODS['M-SEARCH'],
+ METHODS.NOTIFY,
+ METHODS.SUBSCRIBE,
+ METHODS.UNSUBSCRIBE,
+ METHODS.PATCH,
+ METHODS.PURGE,
+ METHODS.MKCALENDAR,
+ METHODS.LINK,
+ METHODS.UNLINK,
+ METHODS.PRI,
+ // TODO(indutny): should we allow it with HTTP?
+ METHODS.SOURCE,
+];
+exports.METHODS_ICE = [
+ METHODS.SOURCE,
+];
+exports.METHODS_RTSP = [
+ METHODS.OPTIONS,
+ METHODS.DESCRIBE,
+ METHODS.ANNOUNCE,
+ METHODS.SETUP,
+ METHODS.PLAY,
+ METHODS.PAUSE,
+ METHODS.TEARDOWN,
+ METHODS.GET_PARAMETER,
+ METHODS.SET_PARAMETER,
+ METHODS.REDIRECT,
+ METHODS.RECORD,
+ METHODS.FLUSH,
+ // For AirPlay
+ METHODS.GET,
+ METHODS.POST,
+];
+exports.METHOD_MAP = utils_1.enumToMap(METHODS);
+exports.H_METHOD_MAP = {};
+Object.keys(exports.METHOD_MAP).forEach((key) => {
+ if (/^H/.test(key)) {
+ exports.H_METHOD_MAP[key] = exports.METHOD_MAP[key];
+ }
+});
+var FINISH;
+(function (FINISH) {
+ FINISH[FINISH["SAFE"] = 0] = "SAFE";
+ FINISH[FINISH["SAFE_WITH_CB"] = 1] = "SAFE_WITH_CB";
+ FINISH[FINISH["UNSAFE"] = 2] = "UNSAFE";
+})(FINISH = exports.FINISH || (exports.FINISH = {}));
+exports.ALPHA = [];
+for (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) {
+ // Upper case
+ exports.ALPHA.push(String.fromCharCode(i));
+ // Lower case
+ exports.ALPHA.push(String.fromCharCode(i + 0x20));
+}
+exports.NUM_MAP = {
+ 0: 0, 1: 1, 2: 2, 3: 3, 4: 4,
+ 5: 5, 6: 6, 7: 7, 8: 8, 9: 9,
+};
+exports.HEX_MAP = {
+ 0: 0, 1: 1, 2: 2, 3: 3, 4: 4,
+ 5: 5, 6: 6, 7: 7, 8: 8, 9: 9,
+ A: 0XA, B: 0XB, C: 0XC, D: 0XD, E: 0XE, F: 0XF,
+ a: 0xa, b: 0xb, c: 0xc, d: 0xd, e: 0xe, f: 0xf,
+};
+exports.NUM = [
+ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
+];
+exports.ALPHANUM = exports.ALPHA.concat(exports.NUM);
+exports.MARK = ['-', '_', '.', '!', '~', '*', '\'', '(', ')'];
+exports.USERINFO_CHARS = exports.ALPHANUM
+ .concat(exports.MARK)
+ .concat(['%', ';', ':', '&', '=', '+', '$', ',']);
+// TODO(indutny): use RFC
+exports.STRICT_URL_CHAR = [
+ '!', '"', '$', '%', '&', '\'',
+ '(', ')', '*', '+', ',', '-', '.', '/',
+ ':', ';', '<', '=', '>',
+ '@', '[', '\\', ']', '^', '_',
+ '`',
+ '{', '|', '}', '~',
+].concat(exports.ALPHANUM);
+exports.URL_CHAR = exports.STRICT_URL_CHAR
+ .concat(['\t', '\f']);
+// All characters with 0x80 bit set to 1
+for (let i = 0x80; i <= 0xff; i++) {
+ exports.URL_CHAR.push(i);
+}
+exports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']);
+/* Tokens as defined by rfc 2616. Also lowercases them.
+ * token = 1*
+ * separators = "(" | ")" | "<" | ">" | "@"
+ * | "," | ";" | ":" | "\" | <">
+ * | "/" | "[" | "]" | "?" | "="
+ * | "{" | "}" | SP | HT
+ */
+exports.STRICT_TOKEN = [
+ '!', '#', '$', '%', '&', '\'',
+ '*', '+', '-', '.',
+ '^', '_', '`',
+ '|', '~',
+].concat(exports.ALPHANUM);
+exports.TOKEN = exports.STRICT_TOKEN.concat([' ']);
+/*
+ * Verify that a char is a valid visible (printable) US-ASCII
+ * character or %x80-FF
+ */
+exports.HEADER_CHARS = ['\t'];
+for (let i = 32; i <= 255; i++) {
+ if (i !== 127) {
+ exports.HEADER_CHARS.push(i);
+ }
+}
+// ',' = \x44
+exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS.filter((c) => c !== 44);
+exports.MAJOR = exports.NUM_MAP;
+exports.MINOR = exports.MAJOR;
+var HEADER_STATE;
+(function (HEADER_STATE) {
+ HEADER_STATE[HEADER_STATE["GENERAL"] = 0] = "GENERAL";
+ HEADER_STATE[HEADER_STATE["CONNECTION"] = 1] = "CONNECTION";
+ HEADER_STATE[HEADER_STATE["CONTENT_LENGTH"] = 2] = "CONTENT_LENGTH";
+ HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING"] = 3] = "TRANSFER_ENCODING";
+ HEADER_STATE[HEADER_STATE["UPGRADE"] = 4] = "UPGRADE";
+ HEADER_STATE[HEADER_STATE["CONNECTION_KEEP_ALIVE"] = 5] = "CONNECTION_KEEP_ALIVE";
+ HEADER_STATE[HEADER_STATE["CONNECTION_CLOSE"] = 6] = "CONNECTION_CLOSE";
+ HEADER_STATE[HEADER_STATE["CONNECTION_UPGRADE"] = 7] = "CONNECTION_UPGRADE";
+ HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING_CHUNKED"] = 8] = "TRANSFER_ENCODING_CHUNKED";
+})(HEADER_STATE = exports.HEADER_STATE || (exports.HEADER_STATE = {}));
+exports.SPECIAL_HEADERS = {
+ 'connection': HEADER_STATE.CONNECTION,
+ 'content-length': HEADER_STATE.CONTENT_LENGTH,
+ 'proxy-connection': HEADER_STATE.CONNECTION,
+ 'transfer-encoding': HEADER_STATE.TRANSFER_ENCODING,
+ 'upgrade': HEADER_STATE.UPGRADE,
+};
+//# sourceMappingURL=constants.js.map
+
+/***/ }),
+
+/***/ 1145:
+/***/ ((module) => {
+
+module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCsLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC1kAIABBGGpCADcDACAAQgA3AwAgAEE4akIANwMAIABBMGpCADcDACAAQShqQgA3AwAgAEEgakIANwMAIABBEGpCADcDACAAQQhqQgA3AwAgAEHdATYCHEEAC3sBAX8CQCAAKAIMIgMNAAJAIAAoAgRFDQAgACABNgIECwJAIAAgASACEMSAgIAAIgMNACAAKAIMDwsgACADNgIcQQAhAyAAKAIEIgFFDQAgACABIAIgACgCCBGBgICAAAAiAUUNACAAIAI2AhQgACABNgIMIAEhAwsgAwvk8wEDDn8DfgR/I4CAgIAAQRBrIgMkgICAgAAgASEEIAEhBSABIQYgASEHIAEhCCABIQkgASEKIAEhCyABIQwgASENIAEhDiABIQ8CQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgACgCHCIQQX9qDt0B2gEB2QECAwQFBgcICQoLDA0O2AEPENcBERLWARMUFRYXGBkaG+AB3wEcHR7VAR8gISIjJCXUASYnKCkqKyzTAdIBLS7RAdABLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVG2wFHSElKzwHOAUvNAUzMAU1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4ABgQGCAYMBhAGFAYYBhwGIAYkBigGLAYwBjQGOAY8BkAGRAZIBkwGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwHLAcoBuAHJAbkByAG6AbsBvAG9Ab4BvwHAAcEBwgHDAcQBxQHGAQDcAQtBACEQDMYBC0EOIRAMxQELQQ0hEAzEAQtBDyEQDMMBC0EQIRAMwgELQRMhEAzBAQtBFCEQDMABC0EVIRAMvwELQRYhEAy+AQtBFyEQDL0BC0EYIRAMvAELQRkhEAy7AQtBGiEQDLoBC0EbIRAMuQELQRwhEAy4AQtBCCEQDLcBC0EdIRAMtgELQSAhEAy1AQtBHyEQDLQBC0EHIRAMswELQSEhEAyyAQtBIiEQDLEBC0EeIRAMsAELQSMhEAyvAQtBEiEQDK4BC0ERIRAMrQELQSQhEAysAQtBJSEQDKsBC0EmIRAMqgELQSchEAypAQtBwwEhEAyoAQtBKSEQDKcBC0ErIRAMpgELQSwhEAylAQtBLSEQDKQBC0EuIRAMowELQS8hEAyiAQtBxAEhEAyhAQtBMCEQDKABC0E0IRAMnwELQQwhEAyeAQtBMSEQDJ0BC0EyIRAMnAELQTMhEAybAQtBOSEQDJoBC0E1IRAMmQELQcUBIRAMmAELQQshEAyXAQtBOiEQDJYBC0E2IRAMlQELQQohEAyUAQtBNyEQDJMBC0E4IRAMkgELQTwhEAyRAQtBOyEQDJABC0E9IRAMjwELQQkhEAyOAQtBKCEQDI0BC0E+IRAMjAELQT8hEAyLAQtBwAAhEAyKAQtBwQAhEAyJAQtBwgAhEAyIAQtBwwAhEAyHAQtBxAAhEAyGAQtBxQAhEAyFAQtBxgAhEAyEAQtBKiEQDIMBC0HHACEQDIIBC0HIACEQDIEBC0HJACEQDIABC0HKACEQDH8LQcsAIRAMfgtBzQAhEAx9C0HMACEQDHwLQc4AIRAMewtBzwAhEAx6C0HQACEQDHkLQdEAIRAMeAtB0gAhEAx3C0HTACEQDHYLQdQAIRAMdQtB1gAhEAx0C0HVACEQDHMLQQYhEAxyC0HXACEQDHELQQUhEAxwC0HYACEQDG8LQQQhEAxuC0HZACEQDG0LQdoAIRAMbAtB2wAhEAxrC0HcACEQDGoLQQMhEAxpC0HdACEQDGgLQd4AIRAMZwtB3wAhEAxmC0HhACEQDGULQeAAIRAMZAtB4gAhEAxjC0HjACEQDGILQQIhEAxhC0HkACEQDGALQeUAIRAMXwtB5gAhEAxeC0HnACEQDF0LQegAIRAMXAtB6QAhEAxbC0HqACEQDFoLQesAIRAMWQtB7AAhEAxYC0HtACEQDFcLQe4AIRAMVgtB7wAhEAxVC0HwACEQDFQLQfEAIRAMUwtB8gAhEAxSC0HzACEQDFELQfQAIRAMUAtB9QAhEAxPC0H2ACEQDE4LQfcAIRAMTQtB+AAhEAxMC0H5ACEQDEsLQfoAIRAMSgtB+wAhEAxJC0H8ACEQDEgLQf0AIRAMRwtB/gAhEAxGC0H/ACEQDEULQYABIRAMRAtBgQEhEAxDC0GCASEQDEILQYMBIRAMQQtBhAEhEAxAC0GFASEQDD8LQYYBIRAMPgtBhwEhEAw9C0GIASEQDDwLQYkBIRAMOwtBigEhEAw6C0GLASEQDDkLQYwBIRAMOAtBjQEhEAw3C0GOASEQDDYLQY8BIRAMNQtBkAEhEAw0C0GRASEQDDMLQZIBIRAMMgtBkwEhEAwxC0GUASEQDDALQZUBIRAMLwtBlgEhEAwuC0GXASEQDC0LQZgBIRAMLAtBmQEhEAwrC0GaASEQDCoLQZsBIRAMKQtBnAEhEAwoC0GdASEQDCcLQZ4BIRAMJgtBnwEhEAwlC0GgASEQDCQLQaEBIRAMIwtBogEhEAwiC0GjASEQDCELQaQBIRAMIAtBpQEhEAwfC0GmASEQDB4LQacBIRAMHQtBqAEhEAwcC0GpASEQDBsLQaoBIRAMGgtBqwEhEAwZC0GsASEQDBgLQa0BIRAMFwtBrgEhEAwWC0EBIRAMFQtBrwEhEAwUC0GwASEQDBMLQbEBIRAMEgtBswEhEAwRC0GyASEQDBALQbQBIRAMDwtBtQEhEAwOC0G2ASEQDA0LQbcBIRAMDAtBuAEhEAwLC0G5ASEQDAoLQboBIRAMCQtBuwEhEAwIC0HGASEQDAcLQbwBIRAMBgtBvQEhEAwFC0G+ASEQDAQLQb8BIRAMAwtBwAEhEAwCC0HCASEQDAELQcEBIRALA0ACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQDscBAAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxweHyAhIyUoP0BBREVGR0hJSktMTU9QUVJT3gNXWVtcXWBiZWZnaGlqa2xtb3BxcnN0dXZ3eHl6e3x9foABggGFAYYBhwGJAYsBjAGNAY4BjwGQAZEBlAGVAZYBlwGYAZkBmgGbAZwBnQGeAZ8BoAGhAaIBowGkAaUBpgGnAagBqQGqAasBrAGtAa4BrwGwAbEBsgGzAbQBtQG2AbcBuAG5AboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBxwHIAckBygHLAcwBzQHOAc8B0AHRAdIB0wHUAdUB1gHXAdgB2QHaAdsB3AHdAd4B4AHhAeIB4wHkAeUB5gHnAegB6QHqAesB7AHtAe4B7wHwAfEB8gHzAZkCpAKwAv4C/gILIAEiBCACRw3zAUHdASEQDP8DCyABIhAgAkcN3QFBwwEhEAz+AwsgASIBIAJHDZABQfcAIRAM/QMLIAEiASACRw2GAUHvACEQDPwDCyABIgEgAkcNf0HqACEQDPsDCyABIgEgAkcNe0HoACEQDPoDCyABIgEgAkcNeEHmACEQDPkDCyABIgEgAkcNGkEYIRAM+AMLIAEiASACRw0UQRIhEAz3AwsgASIBIAJHDVlBxQAhEAz2AwsgASIBIAJHDUpBPyEQDPUDCyABIgEgAkcNSEE8IRAM9AMLIAEiASACRw1BQTEhEAzzAwsgAC0ALkEBRg3rAwyHAgsgACABIgEgAhDAgICAAEEBRw3mASAAQgA3AyAM5wELIAAgASIBIAIQtICAgAAiEA3nASABIQEM9QILAkAgASIBIAJHDQBBBiEQDPADCyAAIAFBAWoiASACELuAgIAAIhAN6AEgASEBDDELIABCADcDIEESIRAM1QMLIAEiECACRw0rQR0hEAztAwsCQCABIgEgAkYNACABQQFqIQFBECEQDNQDC0EHIRAM7AMLIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN5QFBCCEQDOsDCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEUIRAM0gMLQQkhEAzqAwsgASEBIAApAyBQDeQBIAEhAQzyAgsCQCABIgEgAkcNAEELIRAM6QMLIAAgAUEBaiIBIAIQtoCAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3mASABIQEMDQsgACABIgEgAhC6gICAACIQDecBIAEhAQzwAgsCQCABIgEgAkcNAEEPIRAM5QMLIAEtAAAiEEE7Rg0IIBBBDUcN6AEgAUEBaiEBDO8CCyAAIAEiASACELqAgIAAIhAN6AEgASEBDPICCwNAAkAgAS0AAEHwtYCAAGotAAAiEEEBRg0AIBBBAkcN6wEgACgCBCEQIABBADYCBCAAIBAgAUEBaiIBELmAgIAAIhAN6gEgASEBDPQCCyABQQFqIgEgAkcNAAtBEiEQDOIDCyAAIAEiASACELqAgIAAIhAN6QEgASEBDAoLIAEiASACRw0GQRshEAzgAwsCQCABIgEgAkcNAEEWIRAM4AMLIABBioCAgAA2AgggACABNgIEIAAgASACELiAgIAAIhAN6gEgASEBQSAhEAzGAwsCQCABIgEgAkYNAANAAkAgAS0AAEHwt4CAAGotAAAiEEECRg0AAkAgEEF/ag4E5QHsAQDrAewBCyABQQFqIQFBCCEQDMgDCyABQQFqIgEgAkcNAAtBFSEQDN8DC0EVIRAM3gMLA0ACQCABLQAAQfC5gIAAai0AACIQQQJGDQAgEEF/ag4E3gHsAeAB6wHsAQsgAUEBaiIBIAJHDQALQRghEAzdAwsCQCABIgEgAkYNACAAQYuAgIAANgIIIAAgATYCBCABIQFBByEQDMQDC0EZIRAM3AMLIAFBAWohAQwCCwJAIAEiFCACRw0AQRohEAzbAwsgFCEBAkAgFC0AAEFzag4U3QLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gIA7gILQQAhECAAQQA2AhwgAEGvi4CAADYCECAAQQI2AgwgACAUQQFqNgIUDNoDCwJAIAEtAAAiEEE7Rg0AIBBBDUcN6AEgAUEBaiEBDOUCCyABQQFqIQELQSIhEAy/AwsCQCABIhAgAkcNAEEcIRAM2AMLQgAhESAQIQEgEC0AAEFQag435wHmAQECAwQFBgcIAAAAAAAAAAkKCwwNDgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADxAREhMUAAtBHiEQDL0DC0ICIREM5QELQgMhEQzkAQtCBCERDOMBC0IFIREM4gELQgYhEQzhAQtCByERDOABC0IIIREM3wELQgkhEQzeAQtCCiERDN0BC0ILIREM3AELQgwhEQzbAQtCDSERDNoBC0IOIREM2QELQg8hEQzYAQtCCiERDNcBC0ILIREM1gELQgwhEQzVAQtCDSERDNQBC0IOIREM0wELQg8hEQzSAQtCACERAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQLQAAQVBqDjflAeQBAAECAwQFBgfmAeYB5gHmAeYB5gHmAQgJCgsMDeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gEODxAREhPmAQtCAiERDOQBC0IDIREM4wELQgQhEQziAQtCBSERDOEBC0IGIREM4AELQgchEQzfAQtCCCERDN4BC0IJIREM3QELQgohEQzcAQtCCyERDNsBC0IMIREM2gELQg0hEQzZAQtCDiERDNgBC0IPIREM1wELQgohEQzWAQtCCyERDNUBC0IMIREM1AELQg0hEQzTAQtCDiERDNIBC0IPIREM0QELIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN0gFBHyEQDMADCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEkIRAMpwMLQSAhEAy/AwsgACABIhAgAhC+gICAAEF/ag4FtgEAxQIB0QHSAQtBESEQDKQDCyAAQQE6AC8gECEBDLsDCyABIgEgAkcN0gFBJCEQDLsDCyABIg0gAkcNHkHGACEQDLoDCyAAIAEiASACELKAgIAAIhAN1AEgASEBDLUBCyABIhAgAkcNJkHQACEQDLgDCwJAIAEiASACRw0AQSghEAy4AwsgAEEANgIEIABBjICAgAA2AgggACABIAEQsYCAgAAiEA3TASABIQEM2AELAkAgASIQIAJHDQBBKSEQDLcDCyAQLQAAIgFBIEYNFCABQQlHDdMBIBBBAWohAQwVCwJAIAEiASACRg0AIAFBAWohAQwXC0EqIRAMtQMLAkAgASIQIAJHDQBBKyEQDLUDCwJAIBAtAAAiAUEJRg0AIAFBIEcN1QELIAAtACxBCEYN0wEgECEBDJEDCwJAIAEiASACRw0AQSwhEAy0AwsgAS0AAEEKRw3VASABQQFqIQEMyQILIAEiDiACRw3VAUEvIRAMsgMLA0ACQCABLQAAIhBBIEYNAAJAIBBBdmoOBADcAdwBANoBCyABIQEM4AELIAFBAWoiASACRw0AC0ExIRAMsQMLQTIhECABIhQgAkYNsAMgAiAUayAAKAIAIgFqIRUgFCABa0EDaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfC7gIAAai0AAEcNAQJAIAFBA0cNAEEGIQEMlgMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLEDCyAAQQA2AgAgFCEBDNkBC0EzIRAgASIUIAJGDa8DIAIgFGsgACgCACIBaiEVIBQgAWtBCGohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUH0u4CAAGotAABHDQECQCABQQhHDQBBBSEBDJUDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAywAwsgAEEANgIAIBQhAQzYAQtBNCEQIAEiFCACRg2uAyACIBRrIAAoAgAiAWohFSAUIAFrQQVqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw0BAkAgAUEFRw0AQQchAQyUAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMrwMLIABBADYCACAUIQEM1wELAkAgASIBIAJGDQADQAJAIAEtAABBgL6AgABqLQAAIhBBAUYNACAQQQJGDQogASEBDN0BCyABQQFqIgEgAkcNAAtBMCEQDK4DC0EwIRAMrQMLAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AIBBBdmoOBNkB2gHaAdkB2gELIAFBAWoiASACRw0AC0E4IRAMrQMLQTghEAysAwsDQAJAIAEtAAAiEEEgRg0AIBBBCUcNAwsgAUEBaiIBIAJHDQALQTwhEAyrAwsDQAJAIAEtAAAiEEEgRg0AAkACQCAQQXZqDgTaAQEB2gEACyAQQSxGDdsBCyABIQEMBAsgAUEBaiIBIAJHDQALQT8hEAyqAwsgASEBDNsBC0HAACEQIAEiFCACRg2oAyACIBRrIAAoAgAiAWohFiAUIAFrQQZqIRcCQANAIBQtAABBIHIgAUGAwICAAGotAABHDQEgAUEGRg2OAyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAypAwsgAEEANgIAIBQhAQtBNiEQDI4DCwJAIAEiDyACRw0AQcEAIRAMpwMLIABBjICAgAA2AgggACAPNgIEIA8hASAALQAsQX9qDgTNAdUB1wHZAYcDCyABQQFqIQEMzAELAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgciAQIBBBv39qQf8BcUEaSRtB/wFxIhBBCUYNACAQQSBGDQACQAJAAkACQCAQQZ1/ag4TAAMDAwMDAwMBAwMDAwMDAwMDAgMLIAFBAWohAUExIRAMkQMLIAFBAWohAUEyIRAMkAMLIAFBAWohAUEzIRAMjwMLIAEhAQzQAQsgAUEBaiIBIAJHDQALQTUhEAylAwtBNSEQDKQDCwJAIAEiASACRg0AA0ACQCABLQAAQYC8gIAAai0AAEEBRg0AIAEhAQzTAQsgAUEBaiIBIAJHDQALQT0hEAykAwtBPSEQDKMDCyAAIAEiASACELCAgIAAIhAN1gEgASEBDAELIBBBAWohAQtBPCEQDIcDCwJAIAEiASACRw0AQcIAIRAMoAMLAkADQAJAIAEtAABBd2oOGAAC/gL+AoQD/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4CAP4CCyABQQFqIgEgAkcNAAtBwgAhEAygAwsgAUEBaiEBIAAtAC1BAXFFDb0BIAEhAQtBLCEQDIUDCyABIgEgAkcN0wFBxAAhEAydAwsDQAJAIAEtAABBkMCAgABqLQAAQQFGDQAgASEBDLcCCyABQQFqIgEgAkcNAAtBxQAhEAycAwsgDS0AACIQQSBGDbMBIBBBOkcNgQMgACgCBCEBIABBADYCBCAAIAEgDRCvgICAACIBDdABIA1BAWohAQyzAgtBxwAhECABIg0gAkYNmgMgAiANayAAKAIAIgFqIRYgDSABa0EFaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGQwoCAAGotAABHDYADIAFBBUYN9AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmgMLQcgAIRAgASINIAJGDZkDIAIgDWsgACgCACIBaiEWIA0gAWtBCWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBlsKAgABqLQAARw3/AgJAIAFBCUcNAEECIQEM9QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJkDCwJAIAEiDSACRw0AQckAIRAMmQMLAkACQCANLQAAIgFBIHIgASABQb9/akH/AXFBGkkbQf8BcUGSf2oOBwCAA4ADgAOAA4ADAYADCyANQQFqIQFBPiEQDIADCyANQQFqIQFBPyEQDP8CC0HKACEQIAEiDSACRg2XAyACIA1rIAAoAgAiAWohFiANIAFrQQFqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaDCgIAAai0AAEcN/QIgAUEBRg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyXAwtBywAhECABIg0gAkYNlgMgAiANayAAKAIAIgFqIRYgDSABa0EOaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGiwoCAAGotAABHDfwCIAFBDkYN8AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlgMLQcwAIRAgASINIAJGDZUDIAIgDWsgACgCACIBaiEWIA0gAWtBD2ohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBwMKAgABqLQAARw37AgJAIAFBD0cNAEEDIQEM8QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJUDC0HNACEQIAEiDSACRg2UAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQdDCgIAAai0AAEcN+gICQCABQQVHDQBBBCEBDPACCyABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyUAwsCQCABIg0gAkcNAEHOACEQDJQDCwJAAkACQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZ1/ag4TAP0C/QL9Av0C/QL9Av0C/QL9Av0C/QL9AgH9Av0C/QICA/0CCyANQQFqIQFBwQAhEAz9AgsgDUEBaiEBQcIAIRAM/AILIA1BAWohAUHDACEQDPsCCyANQQFqIQFBxAAhEAz6AgsCQCABIgEgAkYNACAAQY2AgIAANgIIIAAgATYCBCABIQFBxQAhEAz6AgtBzwAhEAySAwsgECEBAkACQCAQLQAAQXZqDgQBqAKoAgCoAgsgEEEBaiEBC0EnIRAM+AILAkAgASIBIAJHDQBB0QAhEAyRAwsCQCABLQAAQSBGDQAgASEBDI0BCyABQQFqIQEgAC0ALUEBcUUNxwEgASEBDIwBCyABIhcgAkcNyAFB0gAhEAyPAwtB0wAhECABIhQgAkYNjgMgAiAUayAAKAIAIgFqIRYgFCABa0EBaiEXA0AgFC0AACABQdbCgIAAai0AAEcNzAEgAUEBRg3HASABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAyOAwsCQCABIgEgAkcNAEHVACEQDI4DCyABLQAAQQpHDcwBIAFBAWohAQzHAQsCQCABIgEgAkcNAEHWACEQDI0DCwJAAkAgAS0AAEF2ag4EAM0BzQEBzQELIAFBAWohAQzHAQsgAUEBaiEBQcoAIRAM8wILIAAgASIBIAIQroCAgAAiEA3LASABIQFBzQAhEAzyAgsgAC0AKUEiRg2FAwymAgsCQCABIgEgAkcNAEHbACEQDIoDC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgAS0AAEFQag4K1AHTAQABAgMEBQYI1QELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMzAELQQkhEEEBIRRBACEXQQAhFgzLAQsCQCABIgEgAkcNAEHdACEQDIkDCyABLQAAQS5HDcwBIAFBAWohAQymAgsgASIBIAJHDcwBQd8AIRAMhwMLAkAgASIBIAJGDQAgAEGOgICAADYCCCAAIAE2AgQgASEBQdAAIRAM7gILQeAAIRAMhgMLQeEAIRAgASIBIAJGDYUDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHiwoCAAGotAABHDc0BIBRBA0YNzAEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhQMLQeIAIRAgASIBIAJGDYQDIAIgAWsgACgCACIUaiEWIAEgFGtBAmohFwNAIAEtAAAgFEHmwoCAAGotAABHDcwBIBRBAkYNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhAMLQeMAIRAgASIBIAJGDYMDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHpwoCAAGotAABHDcsBIBRBA0YNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMgwMLAkAgASIBIAJHDQBB5QAhEAyDAwsgACABQQFqIgEgAhCogICAACIQDc0BIAEhAUHWACEQDOkCCwJAIAEiASACRg0AA0ACQCABLQAAIhBBIEYNAAJAAkACQCAQQbh/ag4LAAHPAc8BzwHPAc8BzwHPAc8BAs8BCyABQQFqIQFB0gAhEAztAgsgAUEBaiEBQdMAIRAM7AILIAFBAWohAUHUACEQDOsCCyABQQFqIgEgAkcNAAtB5AAhEAyCAwtB5AAhEAyBAwsDQAJAIAEtAABB8MKAgABqLQAAIhBBAUYNACAQQX5qDgPPAdAB0QHSAQsgAUEBaiIBIAJHDQALQeYAIRAMgAMLAkAgASIBIAJGDQAgAUEBaiEBDAMLQecAIRAM/wILA0ACQCABLQAAQfDEgIAAai0AACIQQQFGDQACQCAQQX5qDgTSAdMB1AEA1QELIAEhAUHXACEQDOcCCyABQQFqIgEgAkcNAAtB6AAhEAz+AgsCQCABIgEgAkcNAEHpACEQDP4CCwJAIAEtAAAiEEF2ag4augHVAdUBvAHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHKAdUB1QEA0wELIAFBAWohAQtBBiEQDOMCCwNAAkAgAS0AAEHwxoCAAGotAABBAUYNACABIQEMngILIAFBAWoiASACRw0AC0HqACEQDPsCCwJAIAEiASACRg0AIAFBAWohAQwDC0HrACEQDPoCCwJAIAEiASACRw0AQewAIRAM+gILIAFBAWohAQwBCwJAIAEiASACRw0AQe0AIRAM+QILIAFBAWohAQtBBCEQDN4CCwJAIAEiFCACRw0AQe4AIRAM9wILIBQhAQJAAkACQCAULQAAQfDIgIAAai0AAEF/ag4H1AHVAdYBAJwCAQLXAQsgFEEBaiEBDAoLIBRBAWohAQzNAQtBACEQIABBADYCHCAAQZuSgIAANgIQIABBBzYCDCAAIBRBAWo2AhQM9gILAkADQAJAIAEtAABB8MiAgABqLQAAIhBBBEYNAAJAAkAgEEF/ag4H0gHTAdQB2QEABAHZAQsgASEBQdoAIRAM4AILIAFBAWohAUHcACEQDN8CCyABQQFqIgEgAkcNAAtB7wAhEAz2AgsgAUEBaiEBDMsBCwJAIAEiFCACRw0AQfAAIRAM9QILIBQtAABBL0cN1AEgFEEBaiEBDAYLAkAgASIUIAJHDQBB8QAhEAz0AgsCQCAULQAAIgFBL0cNACAUQQFqIQFB3QAhEAzbAgsgAUF2aiIEQRZLDdMBQQEgBHRBiYCAAnFFDdMBDMoCCwJAIAEiASACRg0AIAFBAWohAUHeACEQDNoCC0HyACEQDPICCwJAIAEiFCACRw0AQfQAIRAM8gILIBQhAQJAIBQtAABB8MyAgABqLQAAQX9qDgPJApQCANQBC0HhACEQDNgCCwJAIAEiFCACRg0AA0ACQCAULQAAQfDKgIAAai0AACIBQQNGDQACQCABQX9qDgLLAgDVAQsgFCEBQd8AIRAM2gILIBRBAWoiFCACRw0AC0HzACEQDPECC0HzACEQDPACCwJAIAEiASACRg0AIABBj4CAgAA2AgggACABNgIEIAEhAUHgACEQDNcCC0H1ACEQDO8CCwJAIAEiASACRw0AQfYAIRAM7wILIABBj4CAgAA2AgggACABNgIEIAEhAQtBAyEQDNQCCwNAIAEtAABBIEcNwwIgAUEBaiIBIAJHDQALQfcAIRAM7AILAkAgASIBIAJHDQBB+AAhEAzsAgsgAS0AAEEgRw3OASABQQFqIQEM7wELIAAgASIBIAIQrICAgAAiEA3OASABIQEMjgILAkAgASIEIAJHDQBB+gAhEAzqAgsgBC0AAEHMAEcN0QEgBEEBaiEBQRMhEAzPAQsCQCABIgQgAkcNAEH7ACEQDOkCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRADQCAELQAAIAFB8M6AgABqLQAARw3QASABQQVGDc4BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQfsAIRAM6AILAkAgASIEIAJHDQBB/AAhEAzoAgsCQAJAIAQtAABBvX9qDgwA0QHRAdEB0QHRAdEB0QHRAdEB0QEB0QELIARBAWohAUHmACEQDM8CCyAEQQFqIQFB5wAhEAzOAgsCQCABIgQgAkcNAEH9ACEQDOcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDc8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH9ACEQDOcCCyAAQQA2AgAgEEEBaiEBQRAhEAzMAQsCQCABIgQgAkcNAEH+ACEQDOYCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUH2zoCAAGotAABHDc4BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH+ACEQDOYCCyAAQQA2AgAgEEEBaiEBQRYhEAzLAQsCQCABIgQgAkcNAEH/ACEQDOUCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUH8zoCAAGotAABHDc0BIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH/ACEQDOUCCyAAQQA2AgAgEEEBaiEBQQUhEAzKAQsCQCABIgQgAkcNAEGAASEQDOQCCyAELQAAQdkARw3LASAEQQFqIQFBCCEQDMkBCwJAIAEiBCACRw0AQYEBIRAM4wILAkACQCAELQAAQbJ/ag4DAMwBAcwBCyAEQQFqIQFB6wAhEAzKAgsgBEEBaiEBQewAIRAMyQILAkAgASIEIAJHDQBBggEhEAziAgsCQAJAIAQtAABBuH9qDggAywHLAcsBywHLAcsBAcsBCyAEQQFqIQFB6gAhEAzJAgsgBEEBaiEBQe0AIRAMyAILAkAgASIEIAJHDQBBgwEhEAzhAgsgAiAEayAAKAIAIgFqIRAgBCABa0ECaiEUAkADQCAELQAAIAFBgM+AgABqLQAARw3JASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBA2AgBBgwEhEAzhAgtBACEQIABBADYCACAUQQFqIQEMxgELAkAgASIEIAJHDQBBhAEhEAzgAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBg8+AgABqLQAARw3IASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhAEhEAzgAgsgAEEANgIAIBBBAWohAUEjIRAMxQELAkAgASIEIAJHDQBBhQEhEAzfAgsCQAJAIAQtAABBtH9qDggAyAHIAcgByAHIAcgBAcgBCyAEQQFqIQFB7wAhEAzGAgsgBEEBaiEBQfAAIRAMxQILAkAgASIEIAJHDQBBhgEhEAzeAgsgBC0AAEHFAEcNxQEgBEEBaiEBDIMCCwJAIAEiBCACRw0AQYcBIRAM3QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQYjPgIAAai0AAEcNxQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYcBIRAM3QILIABBADYCACAQQQFqIQFBLSEQDMIBCwJAIAEiBCACRw0AQYgBIRAM3AILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNxAEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYgBIRAM3AILIABBADYCACAQQQFqIQFBKSEQDMEBCwJAIAEiASACRw0AQYkBIRAM2wILQQEhECABLQAAQd8ARw3AASABQQFqIQEMgQILAkAgASIEIAJHDQBBigEhEAzaAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQA0AgBC0AACABQYzPgIAAai0AAEcNwQEgAUEBRg2vAiABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGKASEQDNkCCwJAIAEiBCACRw0AQYsBIRAM2QILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQY7PgIAAai0AAEcNwQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYsBIRAM2QILIABBADYCACAQQQFqIQFBAiEQDL4BCwJAIAEiBCACRw0AQYwBIRAM2AILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNwAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYwBIRAM2AILIABBADYCACAQQQFqIQFBHyEQDL0BCwJAIAEiBCACRw0AQY0BIRAM1wILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNvwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY0BIRAM1wILIABBADYCACAQQQFqIQFBCSEQDLwBCwJAIAEiBCACRw0AQY4BIRAM1gILAkACQCAELQAAQbd/ag4HAL8BvwG/Ab8BvwEBvwELIARBAWohAUH4ACEQDL0CCyAEQQFqIQFB+QAhEAy8AgsCQCABIgQgAkcNAEGPASEQDNUCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGRz4CAAGotAABHDb0BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGPASEQDNUCCyAAQQA2AgAgEEEBaiEBQRghEAy6AQsCQCABIgQgAkcNAEGQASEQDNQCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUGXz4CAAGotAABHDbwBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGQASEQDNQCCyAAQQA2AgAgEEEBaiEBQRchEAy5AQsCQCABIgQgAkcNAEGRASEQDNMCCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUGaz4CAAGotAABHDbsBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGRASEQDNMCCyAAQQA2AgAgEEEBaiEBQRUhEAy4AQsCQCABIgQgAkcNAEGSASEQDNICCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGhz4CAAGotAABHDboBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGSASEQDNICCyAAQQA2AgAgEEEBaiEBQR4hEAy3AQsCQCABIgQgAkcNAEGTASEQDNECCyAELQAAQcwARw24ASAEQQFqIQFBCiEQDLYBCwJAIAQgAkcNAEGUASEQDNACCwJAAkAgBC0AAEG/f2oODwC5AbkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AQG5AQsgBEEBaiEBQf4AIRAMtwILIARBAWohAUH/ACEQDLYCCwJAIAQgAkcNAEGVASEQDM8CCwJAAkAgBC0AAEG/f2oOAwC4AQG4AQsgBEEBaiEBQf0AIRAMtgILIARBAWohBEGAASEQDLUCCwJAIAQgAkcNAEGWASEQDM4CCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUGnz4CAAGotAABHDbYBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGWASEQDM4CCyAAQQA2AgAgEEEBaiEBQQshEAyzAQsCQCAEIAJHDQBBlwEhEAzNAgsCQAJAAkACQCAELQAAQVNqDiMAuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AQG4AbgBuAG4AbgBArgBuAG4AQO4AQsgBEEBaiEBQfsAIRAMtgILIARBAWohAUH8ACEQDLUCCyAEQQFqIQRBgQEhEAy0AgsgBEEBaiEEQYIBIRAMswILAkAgBCACRw0AQZgBIRAMzAILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQanPgIAAai0AAEcNtAEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZgBIRAMzAILIABBADYCACAQQQFqIQFBGSEQDLEBCwJAIAQgAkcNAEGZASEQDMsCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGuz4CAAGotAABHDbMBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGZASEQDMsCCyAAQQA2AgAgEEEBaiEBQQYhEAywAQsCQCAEIAJHDQBBmgEhEAzKAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBtM+AgABqLQAARw2yASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmgEhEAzKAgsgAEEANgIAIBBBAWohAUEcIRAMrwELAkAgBCACRw0AQZsBIRAMyQILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbbPgIAAai0AAEcNsQEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZsBIRAMyQILIABBADYCACAQQQFqIQFBJyEQDK4BCwJAIAQgAkcNAEGcASEQDMgCCwJAAkAgBC0AAEGsf2oOAgABsQELIARBAWohBEGGASEQDK8CCyAEQQFqIQRBhwEhEAyuAgsCQCAEIAJHDQBBnQEhEAzHAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBuM+AgABqLQAARw2vASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBnQEhEAzHAgsgAEEANgIAIBBBAWohAUEmIRAMrAELAkAgBCACRw0AQZ4BIRAMxgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbrPgIAAai0AAEcNrgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ4BIRAMxgILIABBADYCACAQQQFqIQFBAyEQDKsBCwJAIAQgAkcNAEGfASEQDMUCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDa0BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGfASEQDMUCCyAAQQA2AgAgEEEBaiEBQQwhEAyqAQsCQCAEIAJHDQBBoAEhEAzEAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBvM+AgABqLQAARw2sASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBoAEhEAzEAgsgAEEANgIAIBBBAWohAUENIRAMqQELAkAgBCACRw0AQaEBIRAMwwILAkACQCAELQAAQbp/ag4LAKwBrAGsAawBrAGsAawBrAGsAQGsAQsgBEEBaiEEQYsBIRAMqgILIARBAWohBEGMASEQDKkCCwJAIAQgAkcNAEGiASEQDMICCyAELQAAQdAARw2pASAEQQFqIQQM6QELAkAgBCACRw0AQaMBIRAMwQILAkACQCAELQAAQbd/ag4HAaoBqgGqAaoBqgEAqgELIARBAWohBEGOASEQDKgCCyAEQQFqIQFBIiEQDKYBCwJAIAQgAkcNAEGkASEQDMACCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHAz4CAAGotAABHDagBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGkASEQDMACCyAAQQA2AgAgEEEBaiEBQR0hEAylAQsCQCAEIAJHDQBBpQEhEAy/AgsCQAJAIAQtAABBrn9qDgMAqAEBqAELIARBAWohBEGQASEQDKYCCyAEQQFqIQFBBCEQDKQBCwJAIAQgAkcNAEGmASEQDL4CCwJAAkACQAJAAkAgBC0AAEG/f2oOFQCqAaoBqgGqAaoBqgGqAaoBqgGqAQGqAaoBAqoBqgEDqgGqAQSqAQsgBEEBaiEEQYgBIRAMqAILIARBAWohBEGJASEQDKcCCyAEQQFqIQRBigEhEAymAgsgBEEBaiEEQY8BIRAMpQILIARBAWohBEGRASEQDKQCCwJAIAQgAkcNAEGnASEQDL0CCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDaUBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGnASEQDL0CCyAAQQA2AgAgEEEBaiEBQREhEAyiAQsCQCAEIAJHDQBBqAEhEAy8AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBws+AgABqLQAARw2kASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqAEhEAy8AgsgAEEANgIAIBBBAWohAUEsIRAMoQELAkAgBCACRw0AQakBIRAMuwILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQcXPgIAAai0AAEcNowEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQakBIRAMuwILIABBADYCACAQQQFqIQFBKyEQDKABCwJAIAQgAkcNAEGqASEQDLoCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHKz4CAAGotAABHDaIBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGqASEQDLoCCyAAQQA2AgAgEEEBaiEBQRQhEAyfAQsCQCAEIAJHDQBBqwEhEAy5AgsCQAJAAkACQCAELQAAQb5/ag4PAAECpAGkAaQBpAGkAaQBpAGkAaQBpAGkAQOkAQsgBEEBaiEEQZMBIRAMogILIARBAWohBEGUASEQDKECCyAEQQFqIQRBlQEhEAygAgsgBEEBaiEEQZYBIRAMnwILAkAgBCACRw0AQawBIRAMuAILIAQtAABBxQBHDZ8BIARBAWohBAzgAQsCQCAEIAJHDQBBrQEhEAy3AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBzc+AgABqLQAARw2fASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrQEhEAy3AgsgAEEANgIAIBBBAWohAUEOIRAMnAELAkAgBCACRw0AQa4BIRAMtgILIAQtAABB0ABHDZ0BIARBAWohAUElIRAMmwELAkAgBCACRw0AQa8BIRAMtQILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNnQEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQa8BIRAMtQILIABBADYCACAQQQFqIQFBKiEQDJoBCwJAIAQgAkcNAEGwASEQDLQCCwJAAkAgBC0AAEGrf2oOCwCdAZ0BnQGdAZ0BnQGdAZ0BnQEBnQELIARBAWohBEGaASEQDJsCCyAEQQFqIQRBmwEhEAyaAgsCQCAEIAJHDQBBsQEhEAyzAgsCQAJAIAQtAABBv39qDhQAnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBAZwBCyAEQQFqIQRBmQEhEAyaAgsgBEEBaiEEQZwBIRAMmQILAkAgBCACRw0AQbIBIRAMsgILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQdnPgIAAai0AAEcNmgEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbIBIRAMsgILIABBADYCACAQQQFqIQFBISEQDJcBCwJAIAQgAkcNAEGzASEQDLECCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUHdz4CAAGotAABHDZkBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGzASEQDLECCyAAQQA2AgAgEEEBaiEBQRohEAyWAQsCQCAEIAJHDQBBtAEhEAywAgsCQAJAAkAgBC0AAEG7f2oOEQCaAZoBmgGaAZoBmgGaAZoBmgEBmgGaAZoBmgGaAQKaAQsgBEEBaiEEQZ0BIRAMmAILIARBAWohBEGeASEQDJcCCyAEQQFqIQRBnwEhEAyWAgsCQCAEIAJHDQBBtQEhEAyvAgsgAiAEayAAKAIAIgFqIRQgBCABa0EFaiEQAkADQCAELQAAIAFB5M+AgABqLQAARw2XASABQQVGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtQEhEAyvAgsgAEEANgIAIBBBAWohAUEoIRAMlAELAkAgBCACRw0AQbYBIRAMrgILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQerPgIAAai0AAEcNlgEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbYBIRAMrgILIABBADYCACAQQQFqIQFBByEQDJMBCwJAIAQgAkcNAEG3ASEQDK0CCwJAAkAgBC0AAEG7f2oODgCWAZYBlgGWAZYBlgGWAZYBlgGWAZYBlgEBlgELIARBAWohBEGhASEQDJQCCyAEQQFqIQRBogEhEAyTAgsCQCAEIAJHDQBBuAEhEAysAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB7c+AgABqLQAARw2UASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuAEhEAysAgsgAEEANgIAIBBBAWohAUESIRAMkQELAkAgBCACRw0AQbkBIRAMqwILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNkwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbkBIRAMqwILIABBADYCACAQQQFqIQFBICEQDJABCwJAIAQgAkcNAEG6ASEQDKoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHyz4CAAGotAABHDZIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG6ASEQDKoCCyAAQQA2AgAgEEEBaiEBQQ8hEAyPAQsCQCAEIAJHDQBBuwEhEAypAgsCQAJAIAQtAABBt39qDgcAkgGSAZIBkgGSAQGSAQsgBEEBaiEEQaUBIRAMkAILIARBAWohBEGmASEQDI8CCwJAIAQgAkcNAEG8ASEQDKgCCyACIARrIAAoAgAiAWohFCAEIAFrQQdqIRACQANAIAQtAAAgAUH0z4CAAGotAABHDZABIAFBB0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG8ASEQDKgCCyAAQQA2AgAgEEEBaiEBQRshEAyNAQsCQCAEIAJHDQBBvQEhEAynAgsCQAJAAkAgBC0AAEG+f2oOEgCRAZEBkQGRAZEBkQGRAZEBkQEBkQGRAZEBkQGRAZEBApEBCyAEQQFqIQRBpAEhEAyPAgsgBEEBaiEEQacBIRAMjgILIARBAWohBEGoASEQDI0CCwJAIAQgAkcNAEG+ASEQDKYCCyAELQAAQc4ARw2NASAEQQFqIQQMzwELAkAgBCACRw0AQb8BIRAMpQILAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgBC0AAEG/f2oOFQABAgOcAQQFBpwBnAGcAQcICQoLnAEMDQ4PnAELIARBAWohAUHoACEQDJoCCyAEQQFqIQFB6QAhEAyZAgsgBEEBaiEBQe4AIRAMmAILIARBAWohAUHyACEQDJcCCyAEQQFqIQFB8wAhEAyWAgsgBEEBaiEBQfYAIRAMlQILIARBAWohAUH3ACEQDJQCCyAEQQFqIQFB+gAhEAyTAgsgBEEBaiEEQYMBIRAMkgILIARBAWohBEGEASEQDJECCyAEQQFqIQRBhQEhEAyQAgsgBEEBaiEEQZIBIRAMjwILIARBAWohBEGYASEQDI4CCyAEQQFqIQRBoAEhEAyNAgsgBEEBaiEEQaMBIRAMjAILIARBAWohBEGqASEQDIsCCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEGrASEQDIsCC0HAASEQDKMCCyAAIAUgAhCqgICAACIBDYsBIAUhAQxcCwJAIAYgAkYNACAGQQFqIQUMjQELQcIBIRAMoQILA0ACQCAQLQAAQXZqDgSMAQAAjwEACyAQQQFqIhAgAkcNAAtBwwEhEAygAgsCQCAHIAJGDQAgAEGRgICAADYCCCAAIAc2AgQgByEBQQEhEAyHAgtBxAEhEAyfAgsCQCAHIAJHDQBBxQEhEAyfAgsCQAJAIActAABBdmoOBAHOAc4BAM4BCyAHQQFqIQYMjQELIAdBAWohBQyJAQsCQCAHIAJHDQBBxgEhEAyeAgsCQAJAIActAABBdmoOFwGPAY8BAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAQCPAQsgB0EBaiEHC0GwASEQDIQCCwJAIAggAkcNAEHIASEQDJ0CCyAILQAAQSBHDY0BIABBADsBMiAIQQFqIQFBswEhEAyDAgsgASEXAkADQCAXIgcgAkYNASAHLQAAQVBqQf8BcSIQQQpPDcwBAkAgAC8BMiIUQZkzSw0AIAAgFEEKbCIUOwEyIBBB//8DcyAUQf7/A3FJDQAgB0EBaiEXIAAgFCAQaiIQOwEyIBBB//8DcUHoB0kNAQsLQQAhECAAQQA2AhwgAEHBiYCAADYCECAAQQ02AgwgACAHQQFqNgIUDJwCC0HHASEQDJsCCyAAIAggAhCugICAACIQRQ3KASAQQRVHDYwBIABByAE2AhwgACAINgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAyaAgsCQCAJIAJHDQBBzAEhEAyaAgtBACEUQQEhF0EBIRZBACEQAkACQAJAAkACQAJAAkACQAJAIAktAABBUGoOCpYBlQEAAQIDBAUGCJcBC0ECIRAMBgtBAyEQDAULQQQhEAwEC0EFIRAMAwtBBiEQDAILQQchEAwBC0EIIRALQQAhF0EAIRZBACEUDI4BC0EJIRBBASEUQQAhF0EAIRYMjQELAkAgCiACRw0AQc4BIRAMmQILIAotAABBLkcNjgEgCkEBaiEJDMoBCyALIAJHDY4BQdABIRAMlwILAkAgCyACRg0AIABBjoCAgAA2AgggACALNgIEQbcBIRAM/gELQdEBIRAMlgILAkAgBCACRw0AQdIBIRAMlgILIAIgBGsgACgCACIQaiEUIAQgEGtBBGohCwNAIAQtAAAgEEH8z4CAAGotAABHDY4BIBBBBEYN6QEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB0gEhEAyVAgsgACAMIAIQrICAgAAiAQ2NASAMIQEMuAELAkAgBCACRw0AQdQBIRAMlAILIAIgBGsgACgCACIQaiEUIAQgEGtBAWohDANAIAQtAAAgEEGB0ICAAGotAABHDY8BIBBBAUYNjgEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB1AEhEAyTAgsCQCAEIAJHDQBB1gEhEAyTAgsgAiAEayAAKAIAIhBqIRQgBCAQa0ECaiELA0AgBC0AACAQQYPQgIAAai0AAEcNjgEgEEECRg2QASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHWASEQDJICCwJAIAQgAkcNAEHXASEQDJICCwJAAkAgBC0AAEG7f2oOEACPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAY8BCyAEQQFqIQRBuwEhEAz5AQsgBEEBaiEEQbwBIRAM+AELAkAgBCACRw0AQdgBIRAMkQILIAQtAABByABHDYwBIARBAWohBAzEAQsCQCAEIAJGDQAgAEGQgICAADYCCCAAIAQ2AgRBvgEhEAz3AQtB2QEhEAyPAgsCQCAEIAJHDQBB2gEhEAyPAgsgBC0AAEHIAEYNwwEgAEEBOgAoDLkBCyAAQQI6AC8gACAEIAIQpoCAgAAiEA2NAUHCASEQDPQBCyAALQAoQX9qDgK3AbkBuAELA0ACQCAELQAAQXZqDgQAjgGOAQCOAQsgBEEBaiIEIAJHDQALQd0BIRAMiwILIABBADoALyAALQAtQQRxRQ2EAgsgAEEAOgAvIABBAToANCABIQEMjAELIBBBFUYN2gEgAEEANgIcIAAgATYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMiAILAkAgACAQIAIQtICAgAAiBA0AIBAhAQyBAgsCQCAEQRVHDQAgAEEDNgIcIAAgEDYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMiAILIABBADYCHCAAIBA2AhQgAEGnjoCAADYCECAAQRI2AgxBACEQDIcCCyAQQRVGDdYBIABBADYCHCAAIAE2AhQgAEHajYCAADYCECAAQRQ2AgxBACEQDIYCCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNjQEgAEEHNgIcIAAgEDYCFCAAIBQ2AgxBACEQDIUCCyAAIAAvATBBgAFyOwEwIAEhAQtBKiEQDOoBCyAQQRVGDdEBIABBADYCHCAAIAE2AhQgAEGDjICAADYCECAAQRM2AgxBACEQDIICCyAQQRVGDc8BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDIECCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyNAQsgAEEMNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDIACCyAQQRVGDcwBIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDP8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyMAQsgAEENNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDP4BCyAQQRVGDckBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDP0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyLAQsgAEEONgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPwBCyAAQQA2AhwgACABNgIUIABBwJWAgAA2AhAgAEECNgIMQQAhEAz7AQsgEEEVRg3FASAAQQA2AhwgACABNgIUIABBxoyAgAA2AhAgAEEjNgIMQQAhEAz6AQsgAEEQNgIcIAAgATYCFCAAIBA2AgxBACEQDPkBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQzxAQsgAEERNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPgBCyAQQRVGDcEBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPcBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyIAQsgAEETNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPYBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQztAQsgAEEUNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPUBCyAQQRVGDb0BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDPQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyGAQsgAEEWNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPMBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQt4CAgAAiBA0AIAFBAWohAQzpAQsgAEEXNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPIBCyAAQQA2AhwgACABNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzxAQtCASERCyAQQQFqIQECQCAAKQMgIhJC//////////8PVg0AIAAgEkIEhiARhDcDICABIQEMhAELIABBADYCHCAAIAE2AhQgAEGtiYCAADYCECAAQQw2AgxBACEQDO8BCyAAQQA2AhwgACAQNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzuAQsgACgCBCEXIABBADYCBCAQIBGnaiIWIQEgACAXIBAgFiAUGyIQELWAgIAAIhRFDXMgAEEFNgIcIAAgEDYCFCAAIBQ2AgxBACEQDO0BCyAAQQA2AhwgACAQNgIUIABBqpyAgAA2AhAgAEEPNgIMQQAhEAzsAQsgACAQIAIQtICAgAAiAQ0BIBAhAQtBDiEQDNEBCwJAIAFBFUcNACAAQQI2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAzqAQsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAM6QELIAFBAWohEAJAIAAvATAiAUGAAXFFDQACQCAAIBAgAhC7gICAACIBDQAgECEBDHALIAFBFUcNugEgAEEFNgIcIAAgEDYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAM6QELAkAgAUGgBHFBoARHDQAgAC0ALUECcQ0AIABBADYCHCAAIBA2AhQgAEGWk4CAADYCECAAQQQ2AgxBACEQDOkBCyAAIBAgAhC9gICAABogECEBAkACQAJAAkACQCAAIBAgAhCzgICAAA4WAgEABAQEBAQEBAQEBAQEBAQEBAQEAwQLIABBAToALgsgACAALwEwQcAAcjsBMCAQIQELQSYhEAzRAQsgAEEjNgIcIAAgEDYCFCAAQaWWgIAANgIQIABBFTYCDEEAIRAM6QELIABBADYCHCAAIBA2AhQgAEHVi4CAADYCECAAQRE2AgxBACEQDOgBCyAALQAtQQFxRQ0BQcMBIRAMzgELAkAgDSACRg0AA0ACQCANLQAAQSBGDQAgDSEBDMQBCyANQQFqIg0gAkcNAAtBJSEQDOcBC0ElIRAM5gELIAAoAgQhBCAAQQA2AgQgACAEIA0Qr4CAgAAiBEUNrQEgAEEmNgIcIAAgBDYCDCAAIA1BAWo2AhRBACEQDOUBCyAQQRVGDasBIABBADYCHCAAIAE2AhQgAEH9jYCAADYCECAAQR02AgxBACEQDOQBCyAAQSc2AhwgACABNgIUIAAgEDYCDEEAIRAM4wELIBAhAUEBIRQCQAJAAkACQAJAAkACQCAALQAsQX5qDgcGBQUDAQIABQsgACAALwEwQQhyOwEwDAMLQQIhFAwBC0EEIRQLIABBAToALCAAIAAvATAgFHI7ATALIBAhAQtBKyEQDMoBCyAAQQA2AhwgACAQNgIUIABBq5KAgAA2AhAgAEELNgIMQQAhEAziAQsgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDEEAIRAM4QELIABBADoALCAQIQEMvQELIBAhAUEBIRQCQAJAAkACQAJAIAAtACxBe2oOBAMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0EpIRAMxQELIABBADYCHCAAIAE2AhQgAEHwlICAADYCECAAQQM2AgxBACEQDN0BCwJAIA4tAABBDUcNACAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA5BAWohAQx1CyAAQSw2AhwgACABNgIMIAAgDkEBajYCFEEAIRAM3QELIAAtAC1BAXFFDQFBxAEhEAzDAQsCQCAOIAJHDQBBLSEQDNwBCwJAAkADQAJAIA4tAABBdmoOBAIAAAMACyAOQQFqIg4gAkcNAAtBLSEQDN0BCyAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA4hAQx0CyAAQSw2AhwgACAONgIUIAAgATYCDEEAIRAM3AELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHMLIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzbAQsgACgCBCEEIABBADYCBCAAIAQgDhCxgICAACIEDaABIA4hAQzOAQsgEEEsRw0BIAFBAWohEEEBIQECQAJAAkACQAJAIAAtACxBe2oOBAMBAgQACyAQIQEMBAtBAiEBDAELQQQhAQsgAEEBOgAsIAAgAC8BMCABcjsBMCAQIQEMAQsgACAALwEwQQhyOwEwIBAhAQtBOSEQDL8BCyAAQQA6ACwgASEBC0E0IRAMvQELIAAgAC8BMEEgcjsBMCABIQEMAgsgACgCBCEEIABBADYCBAJAIAAgBCABELGAgIAAIgQNACABIQEMxwELIABBNzYCHCAAIAE2AhQgACAENgIMQQAhEAzUAQsgAEEIOgAsIAEhAQtBMCEQDLkBCwJAIAAtAChBAUYNACABIQEMBAsgAC0ALUEIcUUNkwEgASEBDAMLIAAtADBBIHENlAFBxQEhEAy3AQsCQCAPIAJGDQACQANAAkAgDy0AAEFQaiIBQf8BcUEKSQ0AIA8hAUE1IRAMugELIAApAyAiEUKZs+bMmbPmzBlWDQEgACARQgp+IhE3AyAgESABrUL/AYMiEkJ/hVYNASAAIBEgEnw3AyAgD0EBaiIPIAJHDQALQTkhEAzRAQsgACgCBCECIABBADYCBCAAIAIgD0EBaiIEELGAgIAAIgINlQEgBCEBDMMBC0E5IRAMzwELAkAgAC8BMCIBQQhxRQ0AIAAtAChBAUcNACAALQAtQQhxRQ2QAQsgACABQff7A3FBgARyOwEwIA8hAQtBNyEQDLQBCyAAIAAvATBBEHI7ATAMqwELIBBBFUYNiwEgAEEANgIcIAAgATYCFCAAQfCOgIAANgIQIABBHDYCDEEAIRAMywELIABBwwA2AhwgACABNgIMIAAgDUEBajYCFEEAIRAMygELAkAgAS0AAEE6Rw0AIAAoAgQhECAAQQA2AgQCQCAAIBAgARCvgICAACIQDQAgAUEBaiEBDGMLIABBwwA2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMygELIABBADYCHCAAIAE2AhQgAEGxkYCAADYCECAAQQo2AgxBACEQDMkBCyAAQQA2AhwgACABNgIUIABBoJmAgAA2AhAgAEEeNgIMQQAhEAzIAQsgAEEANgIACyAAQYASOwEqIAAgF0EBaiIBIAIQqICAgAAiEA0BIAEhAQtBxwAhEAysAQsgEEEVRw2DASAAQdEANgIcIAAgATYCFCAAQeOXgIAANgIQIABBFTYCDEEAIRAMxAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDF4LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMwwELIABBADYCHCAAIBQ2AhQgAEHBqICAADYCECAAQQc2AgwgAEEANgIAQQAhEAzCAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAzBAQtBACEQIABBADYCHCAAIAE2AhQgAEGAkYCAADYCECAAQQk2AgwMwAELIBBBFUYNfSAAQQA2AhwgACABNgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAy/AQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgAUEBaiEBAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBAJAIAAgECABEK2AgIAAIhANACABIQEMXAsgAEHYADYCHCAAIAE2AhQgACAQNgIMQQAhEAy+AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMrQELIABB2QA2AhwgACABNgIUIAAgBDYCDEEAIRAMvQELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKsBCyAAQdoANgIcIAAgATYCFCAAIAQ2AgxBACEQDLwBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQypAQsgAEHcADYCHCAAIAE2AhQgACAENgIMQQAhEAy7AQsCQCABLQAAQVBqIhBB/wFxQQpPDQAgACAQOgAqIAFBAWohAUHPACEQDKIBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQynAQsgAEHeADYCHCAAIAE2AhQgACAENgIMQQAhEAy6AQsgAEEANgIAIBdBAWohAQJAIAAtAClBI08NACABIQEMWQsgAEEANgIcIAAgATYCFCAAQdOJgIAANgIQIABBCDYCDEEAIRAMuQELIABBADYCAAtBACEQIABBADYCHCAAIAE2AhQgAEGQs4CAADYCECAAQQg2AgwMtwELIABBADYCACAXQQFqIQECQCAALQApQSFHDQAgASEBDFYLIABBADYCHCAAIAE2AhQgAEGbioCAADYCECAAQQg2AgxBACEQDLYBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKSIQQV1qQQtPDQAgASEBDFULAkAgEEEGSw0AQQEgEHRBygBxRQ0AIAEhAQxVC0EAIRAgAEEANgIcIAAgATYCFCAAQfeJgIAANgIQIABBCDYCDAy1AQsgEEEVRg1xIABBADYCHCAAIAE2AhQgAEG5jYCAADYCECAAQRo2AgxBACEQDLQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxUCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLMBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDLIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDLEBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxRCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLABCyAAQQA2AhwgACABNgIUIABBxoqAgAA2AhAgAEEHNgIMQQAhEAyvAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAyuAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAytAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMTQsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAysAQsgAEEANgIcIAAgATYCFCAAQdyIgIAANgIQIABBBzYCDEEAIRAMqwELIBBBP0cNASABQQFqIQELQQUhEAyQAQtBACEQIABBADYCHCAAIAE2AhQgAEH9koCAADYCECAAQQc2AgwMqAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMpwELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMpgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEYLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMpQELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0gA2AhwgACAUNgIUIAAgATYCDEEAIRAMpAELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0wA2AhwgACAUNgIUIAAgATYCDEEAIRAMowELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDEMLIABB5QA2AhwgACAUNgIUIAAgATYCDEEAIRAMogELIABBADYCHCAAIBQ2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKEBCyAAQQA2AhwgACABNgIUIABBw4+AgAA2AhAgAEEHNgIMQQAhEAygAQtBACEQIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgwMnwELIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgxBACEQDJ4BCyAAQQA2AhwgACAUNgIUIABB/pGAgAA2AhAgAEEHNgIMQQAhEAydAQsgAEEANgIcIAAgATYCFCAAQY6bgIAANgIQIABBBjYCDEEAIRAMnAELIBBBFUYNVyAAQQA2AhwgACABNgIUIABBzI6AgAA2AhAgAEEgNgIMQQAhEAybAQsgAEEANgIAIBBBAWohAUEkIRALIAAgEDoAKSAAKAIEIRAgAEEANgIEIAAgECABEKuAgIAAIhANVCABIQEMPgsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQfGbgIAANgIQIABBBjYCDAyXAQsgAUEVRg1QIABBADYCHCAAIAU2AhQgAEHwjICAADYCECAAQRs2AgxBACEQDJYBCyAAKAIEIQUgAEEANgIEIAAgBSAQEKmAgIAAIgUNASAQQQFqIQULQa0BIRAMewsgAEHBATYCHCAAIAU2AgwgACAQQQFqNgIUQQAhEAyTAQsgACgCBCEGIABBADYCBCAAIAYgEBCpgICAACIGDQEgEEEBaiEGC0GuASEQDHgLIABBwgE2AhwgACAGNgIMIAAgEEEBajYCFEEAIRAMkAELIABBADYCHCAAIAc2AhQgAEGXi4CAADYCECAAQQ02AgxBACEQDI8BCyAAQQA2AhwgACAINgIUIABB45CAgAA2AhAgAEEJNgIMQQAhEAyOAQsgAEEANgIcIAAgCDYCFCAAQZSNgIAANgIQIABBITYCDEEAIRAMjQELQQEhFkEAIRdBACEUQQEhEAsgACAQOgArIAlBAWohCAJAAkAgAC0ALUEQcQ0AAkACQAJAIAAtACoOAwEAAgQLIBZFDQMMAgsgFA0BDAILIBdFDQELIAAoAgQhECAAQQA2AgQgACAQIAgQrYCAgAAiEEUNPSAAQckBNgIcIAAgCDYCFCAAIBA2AgxBACEQDIwBCyAAKAIEIQQgAEEANgIEIAAgBCAIEK2AgIAAIgRFDXYgAEHKATYCHCAAIAg2AhQgACAENgIMQQAhEAyLAQsgACgCBCEEIABBADYCBCAAIAQgCRCtgICAACIERQ10IABBywE2AhwgACAJNgIUIAAgBDYCDEEAIRAMigELIAAoAgQhBCAAQQA2AgQgACAEIAoQrYCAgAAiBEUNciAAQc0BNgIcIAAgCjYCFCAAIAQ2AgxBACEQDIkBCwJAIAstAABBUGoiEEH/AXFBCk8NACAAIBA6ACogC0EBaiEKQbYBIRAMcAsgACgCBCEEIABBADYCBCAAIAQgCxCtgICAACIERQ1wIABBzwE2AhwgACALNgIUIAAgBDYCDEEAIRAMiAELIABBADYCHCAAIAQ2AhQgAEGQs4CAADYCECAAQQg2AgwgAEEANgIAQQAhEAyHAQsgAUEVRg0/IABBADYCHCAAIAw2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDIYBCyAAQYEEOwEoIAAoAgQhECAAQgA3AwAgACAQIAxBAWoiDBCrgICAACIQRQ04IABB0wE2AhwgACAMNgIUIAAgEDYCDEEAIRAMhQELIABBADYCAAtBACEQIABBADYCHCAAIAQ2AhQgAEHYm4CAADYCECAAQQg2AgwMgwELIAAoAgQhECAAQgA3AwAgACAQIAtBAWoiCxCrgICAACIQDQFBxgEhEAxpCyAAQQI6ACgMVQsgAEHVATYCHCAAIAs2AhQgACAQNgIMQQAhEAyAAQsgEEEVRg03IABBADYCHCAAIAQ2AhQgAEGkjICAADYCECAAQRA2AgxBACEQDH8LIAAtADRBAUcNNCAAIAQgAhC8gICAACIQRQ00IBBBFUcNNSAAQdwBNgIcIAAgBDYCFCAAQdWWgIAANgIQIABBFTYCDEEAIRAMfgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQMfQtBACEQDGMLQQIhEAxiC0ENIRAMYQtBDyEQDGALQSUhEAxfC0ETIRAMXgtBFSEQDF0LQRYhEAxcC0EXIRAMWwtBGCEQDFoLQRkhEAxZC0EaIRAMWAtBGyEQDFcLQRwhEAxWC0EdIRAMVQtBHyEQDFQLQSEhEAxTC0EjIRAMUgtBxgAhEAxRC0EuIRAMUAtBLyEQDE8LQTshEAxOC0E9IRAMTQtByAAhEAxMC0HJACEQDEsLQcsAIRAMSgtBzAAhEAxJC0HOACEQDEgLQdEAIRAMRwtB1QAhEAxGC0HYACEQDEULQdkAIRAMRAtB2wAhEAxDC0HkACEQDEILQeUAIRAMQQtB8QAhEAxAC0H0ACEQDD8LQY0BIRAMPgtBlwEhEAw9C0GpASEQDDwLQawBIRAMOwtBwAEhEAw6C0G5ASEQDDkLQa8BIRAMOAtBsQEhEAw3C0GyASEQDDYLQbQBIRAMNQtBtQEhEAw0C0G6ASEQDDMLQb0BIRAMMgtBvwEhEAwxC0HBASEQDDALIABBADYCHCAAIAQ2AhQgAEHpi4CAADYCECAAQR82AgxBACEQDEgLIABB2wE2AhwgACAENgIUIABB+paAgAA2AhAgAEEVNgIMQQAhEAxHCyAAQfgANgIcIAAgDDYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMRgsgAEHRADYCHCAAIAU2AhQgAEGwl4CAADYCECAAQRU2AgxBACEQDEULIABB+QA2AhwgACABNgIUIAAgEDYCDEEAIRAMRAsgAEH4ADYCHCAAIAE2AhQgAEHKmICAADYCECAAQRU2AgxBACEQDEMLIABB5AA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAxCCyAAQdcANgIcIAAgATYCFCAAQcmXgIAANgIQIABBFTYCDEEAIRAMQQsgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMQAsgAEHCADYCHCAAIAE2AhQgAEHjmICAADYCECAAQRU2AgxBACEQDD8LIABBADYCBCAAIA8gDxCxgICAACIERQ0BIABBOjYCHCAAIAQ2AgwgACAPQQFqNgIUQQAhEAw+CyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBEUNACAAQTs2AhwgACAENgIMIAAgAUEBajYCFEEAIRAMPgsgAUEBaiEBDC0LIA9BAWohAQwtCyAAQQA2AhwgACAPNgIUIABB5JKAgAA2AhAgAEEENgIMQQAhEAw7CyAAQTY2AhwgACAENgIUIAAgAjYCDEEAIRAMOgsgAEEuNgIcIAAgDjYCFCAAIAQ2AgxBACEQDDkLIABB0AA2AhwgACABNgIUIABBkZiAgAA2AhAgAEEVNgIMQQAhEAw4CyANQQFqIQEMLAsgAEEVNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMNgsgAEEbNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNQsgAEEPNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNAsgAEELNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMMwsgAEEaNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMgsgAEELNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMQsgAEEKNgIcIAAgATYCFCAAQeSWgIAANgIQIABBFTYCDEEAIRAMMAsgAEEeNgIcIAAgATYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAMLwsgAEEANgIcIAAgEDYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMLgsgAEEENgIcIAAgATYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMLQsgAEEANgIAIAtBAWohCwtBuAEhEAwSCyAAQQA2AgAgEEEBaiEBQfUAIRAMEQsgASEBAkAgAC0AKUEFRw0AQeMAIRAMEQtB4gAhEAwQC0EAIRAgAEEANgIcIABB5JGAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAwoCyAAQQA2AgAgF0EBaiEBQcAAIRAMDgtBASEBCyAAIAE6ACwgAEEANgIAIBdBAWohAQtBKCEQDAsLIAEhAQtBOCEQDAkLAkAgASIPIAJGDQADQAJAIA8tAABBgL6AgABqLQAAIgFBAUYNACABQQJHDQMgD0EBaiEBDAQLIA9BAWoiDyACRw0AC0E+IRAMIgtBPiEQDCELIABBADoALCAPIQEMAQtBCyEQDAYLQTohEAwFCyABQQFqIQFBLSEQDAQLIAAgAToALCAAQQA2AgAgFkEBaiEBQQwhEAwDCyAAQQA2AgAgF0EBaiEBQQohEAwCCyAAQQA2AgALIABBADoALCANIQFBCSEQDAALC0EAIRAgAEEANgIcIAAgCzYCFCAAQc2QgIAANgIQIABBCTYCDAwXC0EAIRAgAEEANgIcIAAgCjYCFCAAQemKgIAANgIQIABBCTYCDAwWC0EAIRAgAEEANgIcIAAgCTYCFCAAQbeQgIAANgIQIABBCTYCDAwVC0EAIRAgAEEANgIcIAAgCDYCFCAAQZyRgIAANgIQIABBCTYCDAwUC0EAIRAgAEEANgIcIAAgATYCFCAAQc2QgIAANgIQIABBCTYCDAwTC0EAIRAgAEEANgIcIAAgATYCFCAAQemKgIAANgIQIABBCTYCDAwSC0EAIRAgAEEANgIcIAAgATYCFCAAQbeQgIAANgIQIABBCTYCDAwRC0EAIRAgAEEANgIcIAAgATYCFCAAQZyRgIAANgIQIABBCTYCDAwQC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwPC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwOC0EAIRAgAEEANgIcIAAgATYCFCAAQcCSgIAANgIQIABBCzYCDAwNC0EAIRAgAEEANgIcIAAgATYCFCAAQZWJgIAANgIQIABBCzYCDAwMC0EAIRAgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDAwLC0EAIRAgAEEANgIcIAAgATYCFCAAQfuPgIAANgIQIABBCjYCDAwKC0EAIRAgAEEANgIcIAAgATYCFCAAQfGZgIAANgIQIABBAjYCDAwJC0EAIRAgAEEANgIcIAAgATYCFCAAQcSUgIAANgIQIABBAjYCDAwIC0EAIRAgAEEANgIcIAAgATYCFCAAQfKVgIAANgIQIABBAjYCDAwHCyAAQQI2AhwgACABNgIUIABBnJqAgAA2AhAgAEEWNgIMQQAhEAwGC0EBIRAMBQtB1AAhECABIgQgAkYNBCADQQhqIAAgBCACQdjCgIAAQQoQxYCAgAAgAygCDCEEIAMoAggOAwEEAgALEMqAgIAAAAsgAEEANgIcIABBtZqAgAA2AhAgAEEXNgIMIAAgBEEBajYCFEEAIRAMAgsgAEEANgIcIAAgBDYCFCAAQcqagIAANgIQIABBCTYCDEEAIRAMAQsCQCABIgQgAkcNAEEiIRAMAQsgAEGJgICAADYCCCAAIAQ2AgRBISEQCyADQRBqJICAgIAAIBALrwEBAn8gASgCACEGAkACQCACIANGDQAgBCAGaiEEIAYgA2ogAmshByACIAZBf3MgBWoiBmohBQNAAkAgAi0AACAELQAARg0AQQIhBAwDCwJAIAYNAEEAIQQgBSECDAMLIAZBf2ohBiAEQQFqIQQgAkEBaiICIANHDQALIAchBiADIQILIABBATYCACABIAY2AgAgACACNgIEDwsgAUEANgIAIAAgBDYCACAAIAI2AgQLCgAgABDHgICAAAvyNgELfyOAgICAAEEQayIBJICAgIAAAkBBACgCoNCAgAANAEEAEMuAgIAAQYDUhIAAayICQdkASQ0AQQAhAwJAQQAoAuDTgIAAIgQNAEEAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEIakFwcUHYqtWqBXMiBDYC4NOAgABBAEEANgL004CAAEEAQQA2AsTTgIAAC0EAIAI2AszTgIAAQQBBgNSEgAA2AsjTgIAAQQBBgNSEgAA2ApjQgIAAQQAgBDYCrNCAgABBAEF/NgKo0ICAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALQYDUhIAAQXhBgNSEgABrQQ9xQQBBgNSEgABBCGpBD3EbIgNqIgRBBGogAkFIaiIFIANrIgNBAXI2AgBBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAQYDUhIAAIAVqQTg2AgQLAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABB7AFLDQACQEEAKAKI0ICAACIGQRAgAEETakFwcSAAQQtJGyICQQN2IgR2IgNBA3FFDQACQAJAIANBAXEgBHJBAXMiBUEDdCIEQbDQgIAAaiIDIARBuNCAgABqKAIAIgQoAggiAkcNAEEAIAZBfiAFd3E2AojQgIAADAELIAMgAjYCCCACIAM2AgwLIARBCGohAyAEIAVBA3QiBUEDcjYCBCAEIAVqIgQgBCgCBEEBcjYCBAwMCyACQQAoApDQgIAAIgdNDQECQCADRQ0AAkACQCADIAR0QQIgBHQiA0EAIANrcnEiA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqIgRBA3QiA0Gw0ICAAGoiBSADQbjQgIAAaigCACIDKAIIIgBHDQBBACAGQX4gBHdxIgY2AojQgIAADAELIAUgADYCCCAAIAU2AgwLIAMgAkEDcjYCBCADIARBA3QiBGogBCACayIFNgIAIAMgAmoiACAFQQFyNgIEAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQQCQAJAIAZBASAHQQN2dCIIcQ0AQQAgBiAIcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCAENgIMIAIgBDYCCCAEIAI2AgwgBCAINgIICyADQQhqIQNBACAANgKc0ICAAEEAIAU2ApDQgIAADAwLQQAoAozQgIAAIglFDQEgCUEAIAlrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqQQJ0QbjSgIAAaigCACIAKAIEQXhxIAJrIQQgACEFAkADQAJAIAUoAhAiAw0AIAVBFGooAgAiA0UNAgsgAygCBEF4cSACayIFIAQgBSAESSIFGyEEIAMgACAFGyEAIAMhBQwACwsgACgCGCEKAkAgACgCDCIIIABGDQAgACgCCCIDQQAoApjQgIAASRogCCADNgIIIAMgCDYCDAwLCwJAIABBFGoiBSgCACIDDQAgACgCECIDRQ0DIABBEGohBQsDQCAFIQsgAyIIQRRqIgUoAgAiAw0AIAhBEGohBSAIKAIQIgMNAAsgC0EANgIADAoLQX8hAiAAQb9/Sw0AIABBE2oiA0FwcSECQQAoAozQgIAAIgdFDQBBACELAkAgAkGAAkkNAEEfIQsgAkH///8HSw0AIANBCHYiAyADQYD+P2pBEHZBCHEiA3QiBCAEQYDgH2pBEHZBBHEiBHQiBSAFQYCAD2pBEHZBAnEiBXRBD3YgAyAEciAFcmsiA0EBdCACIANBFWp2QQFxckEcaiELC0EAIAJrIQQCQAJAAkACQCALQQJ0QbjSgIAAaigCACIFDQBBACEDQQAhCAwBC0EAIQMgAkEAQRkgC0EBdmsgC0EfRht0IQBBACEIA0ACQCAFKAIEQXhxIAJrIgYgBE8NACAGIQQgBSEIIAYNAEEAIQQgBSEIIAUhAwwDCyADIAVBFGooAgAiBiAGIAUgAEEddkEEcWpBEGooAgAiBUYbIAMgBhshAyAAQQF0IQAgBQ0ACwsCQCADIAhyDQBBACEIQQIgC3QiA0EAIANrciAHcSIDRQ0DIANBACADa3FBf2oiAyADQQx2QRBxIgN2IgVBBXZBCHEiACADciAFIAB2IgNBAnZBBHEiBXIgAyAFdiIDQQF2QQJxIgVyIAMgBXYiA0EBdkEBcSIFciADIAV2akECdEG40oCAAGooAgAhAwsgA0UNAQsDQCADKAIEQXhxIAJrIgYgBEkhAAJAIAMoAhAiBQ0AIANBFGooAgAhBQsgBiAEIAAbIQQgAyAIIAAbIQggBSEDIAUNAAsLIAhFDQAgBEEAKAKQ0ICAACACa08NACAIKAIYIQsCQCAIKAIMIgAgCEYNACAIKAIIIgNBACgCmNCAgABJGiAAIAM2AgggAyAANgIMDAkLAkAgCEEUaiIFKAIAIgMNACAIKAIQIgNFDQMgCEEQaiEFCwNAIAUhBiADIgBBFGoiBSgCACIDDQAgAEEQaiEFIAAoAhAiAw0ACyAGQQA2AgAMCAsCQEEAKAKQ0ICAACIDIAJJDQBBACgCnNCAgAAhBAJAAkAgAyACayIFQRBJDQAgBCACaiIAIAVBAXI2AgRBACAFNgKQ0ICAAEEAIAA2ApzQgIAAIAQgA2ogBTYCACAEIAJBA3I2AgQMAQsgBCADQQNyNgIEIAQgA2oiAyADKAIEQQFyNgIEQQBBADYCnNCAgABBAEEANgKQ0ICAAAsgBEEIaiEDDAoLAkBBACgClNCAgAAiACACTQ0AQQAoAqDQgIAAIgMgAmoiBCAAIAJrIgVBAXI2AgRBACAFNgKU0ICAAEEAIAQ2AqDQgIAAIAMgAkEDcjYCBCADQQhqIQMMCgsCQAJAQQAoAuDTgIAARQ0AQQAoAujTgIAAIQQMAQtBAEJ/NwLs04CAAEEAQoCAhICAgMAANwLk04CAAEEAIAFBDGpBcHFB2KrVqgVzNgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgABBgIAEIQQLQQAhAwJAIAQgAkHHAGoiB2oiBkEAIARrIgtxIgggAksNAEEAQTA2AvjTgIAADAoLAkBBACgCwNOAgAAiA0UNAAJAQQAoArjTgIAAIgQgCGoiBSAETQ0AIAUgA00NAQtBACEDQQBBMDYC+NOAgAAMCgtBAC0AxNOAgABBBHENBAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQAJAIAMoAgAiBSAESw0AIAUgAygCBGogBEsNAwsgAygCCCIDDQALC0EAEMuAgIAAIgBBf0YNBSAIIQYCQEEAKALk04CAACIDQX9qIgQgAHFFDQAgCCAAayAEIABqQQAgA2txaiEGCyAGIAJNDQUgBkH+////B0sNBQJAQQAoAsDTgIAAIgNFDQBBACgCuNOAgAAiBCAGaiIFIARNDQYgBSADSw0GCyAGEMuAgIAAIgMgAEcNAQwHCyAGIABrIAtxIgZB/v///wdLDQQgBhDLgICAACIAIAMoAgAgAygCBGpGDQMgACEDCwJAIANBf0YNACACQcgAaiAGTQ0AAkAgByAGa0EAKALo04CAACIEakEAIARrcSIEQf7///8HTQ0AIAMhAAwHCwJAIAQQy4CAgABBf0YNACAEIAZqIQYgAyEADAcLQQAgBmsQy4CAgAAaDAQLIAMhACADQX9HDQUMAwtBACEIDAcLQQAhAAwFCyAAQX9HDQILQQBBACgCxNOAgABBBHI2AsTTgIAACyAIQf7///8HSw0BIAgQy4CAgAAhAEEAEMuAgIAAIQMgAEF/Rg0BIANBf0YNASAAIANPDQEgAyAAayIGIAJBOGpNDQELQQBBACgCuNOAgAAgBmoiAzYCuNOAgAACQCADQQAoArzTgIAATQ0AQQAgAzYCvNOAgAALAkACQAJAAkBBACgCoNCAgAAiBEUNAEHI04CAACEDA0AgACADKAIAIgUgAygCBCIIakYNAiADKAIIIgMNAAwDCwsCQAJAQQAoApjQgIAAIgNFDQAgACADTw0BC0EAIAA2ApjQgIAAC0EAIQNBACAGNgLM04CAAEEAIAA2AsjTgIAAQQBBfzYCqNCAgABBAEEAKALg04CAADYCrNCAgABBAEEANgLU04CAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgQgBkFIaiIFIANrIgNBAXI2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAIAAgBWpBODYCBAwCCyADLQAMQQhxDQAgBCAFSQ0AIAQgAE8NACAEQXggBGtBD3FBACAEQQhqQQ9xGyIFaiIAQQAoApTQgIAAIAZqIgsgBWsiBUEBcjYCBCADIAggBmo2AgRBAEEAKALw04CAADYCpNCAgABBACAFNgKU0ICAAEEAIAA2AqDQgIAAIAQgC2pBODYCBAwBCwJAIABBACgCmNCAgAAiCE8NAEEAIAA2ApjQgIAAIAAhCAsgACAGaiEFQcjTgIAAIQMCQAJAAkACQAJAAkACQANAIAMoAgAgBUYNASADKAIIIgMNAAwCCwsgAy0ADEEIcUUNAQtByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiIFIARLDQMLIAMoAgghAwwACwsgAyAANgIAIAMgAygCBCAGajYCBCAAQXggAGtBD3FBACAAQQhqQQ9xG2oiCyACQQNyNgIEIAVBeCAFa0EPcUEAIAVBCGpBD3EbaiIGIAsgAmoiAmshAwJAIAYgBEcNAEEAIAI2AqDQgIAAQQBBACgClNCAgAAgA2oiAzYClNCAgAAgAiADQQFyNgIEDAMLAkAgBkEAKAKc0ICAAEcNAEEAIAI2ApzQgIAAQQBBACgCkNCAgAAgA2oiAzYCkNCAgAAgAiADQQFyNgIEIAIgA2ogAzYCAAwDCwJAIAYoAgQiBEEDcUEBRw0AIARBeHEhBwJAAkAgBEH/AUsNACAGKAIIIgUgBEEDdiIIQQN0QbDQgIAAaiIARhoCQCAGKAIMIgQgBUcNAEEAQQAoAojQgIAAQX4gCHdxNgKI0ICAAAwCCyAEIABGGiAEIAU2AgggBSAENgIMDAELIAYoAhghCQJAAkAgBigCDCIAIAZGDQAgBigCCCIEIAhJGiAAIAQ2AgggBCAANgIMDAELAkAgBkEUaiIEKAIAIgUNACAGQRBqIgQoAgAiBQ0AQQAhAAwBCwNAIAQhCCAFIgBBFGoiBCgCACIFDQAgAEEQaiEEIAAoAhAiBQ0ACyAIQQA2AgALIAlFDQACQAJAIAYgBigCHCIFQQJ0QbjSgIAAaiIEKAIARw0AIAQgADYCACAADQFBAEEAKAKM0ICAAEF+IAV3cTYCjNCAgAAMAgsgCUEQQRQgCSgCECAGRhtqIAA2AgAgAEUNAQsgACAJNgIYAkAgBigCECIERQ0AIAAgBDYCECAEIAA2AhgLIAYoAhQiBEUNACAAQRRqIAQ2AgAgBCAANgIYCyAHIANqIQMgBiAHaiIGKAIEIQQLIAYgBEF+cTYCBCACIANqIAM2AgAgAiADQQFyNgIEAkAgA0H/AUsNACADQXhxQbDQgIAAaiEEAkACQEEAKAKI0ICAACIFQQEgA0EDdnQiA3ENAEEAIAUgA3I2AojQgIAAIAQhAwwBCyAEKAIIIQMLIAMgAjYCDCAEIAI2AgggAiAENgIMIAIgAzYCCAwDC0EfIQQCQCADQf///wdLDQAgA0EIdiIEIARBgP4/akEQdkEIcSIEdCIFIAVBgOAfakEQdkEEcSIFdCIAIABBgIAPakEQdkECcSIAdEEPdiAEIAVyIAByayIEQQF0IAMgBEEVanZBAXFyQRxqIQQLIAIgBDYCHCACQgA3AhAgBEECdEG40oCAAGohBQJAQQAoAozQgIAAIgBBASAEdCIIcQ0AIAUgAjYCAEEAIAAgCHI2AozQgIAAIAIgBTYCGCACIAI2AgggAiACNgIMDAMLIANBAEEZIARBAXZrIARBH0YbdCEEIAUoAgAhAANAIAAiBSgCBEF4cSADRg0CIARBHXYhACAEQQF0IQQgBSAAQQRxakEQaiIIKAIAIgANAAsgCCACNgIAIAIgBTYCGCACIAI2AgwgAiACNgIIDAILIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgsgBkFIaiIIIANrIgNBAXI2AgQgACAIakE4NgIEIAQgBUE3IAVrQQ9xQQAgBUFJakEPcRtqQUFqIgggCCAEQRBqSRsiCEEjNgIEQQBBACgC8NOAgAA2AqTQgIAAQQAgAzYClNCAgABBACALNgKg0ICAACAIQRBqQQApAtDTgIAANwIAIAhBACkCyNOAgAA3AghBACAIQQhqNgLQ04CAAEEAIAY2AszTgIAAQQAgADYCyNOAgABBAEEANgLU04CAACAIQSRqIQMDQCADQQc2AgAgA0EEaiIDIAVJDQALIAggBEYNAyAIIAgoAgRBfnE2AgQgCCAIIARrIgA2AgAgBCAAQQFyNgIEAkAgAEH/AUsNACAAQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgAEEDdnQiAHENAEEAIAUgAHI2AojQgIAAIAMhBQwBCyADKAIIIQULIAUgBDYCDCADIAQ2AgggBCADNgIMIAQgBTYCCAwEC0EfIQMCQCAAQf///wdLDQAgAEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCIIIAhBgIAPakEQdkECcSIIdEEPdiADIAVyIAhyayIDQQF0IAAgA0EVanZBAXFyQRxqIQMLIAQgAzYCHCAEQgA3AhAgA0ECdEG40oCAAGohBQJAQQAoAozQgIAAIghBASADdCIGcQ0AIAUgBDYCAEEAIAggBnI2AozQgIAAIAQgBTYCGCAEIAQ2AgggBCAENgIMDAQLIABBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhCANAIAgiBSgCBEF4cSAARg0DIANBHXYhCCADQQF0IQMgBSAIQQRxakEQaiIGKAIAIggNAAsgBiAENgIAIAQgBTYCGCAEIAQ2AgwgBCAENgIIDAMLIAUoAggiAyACNgIMIAUgAjYCCCACQQA2AhggAiAFNgIMIAIgAzYCCAsgC0EIaiEDDAULIAUoAggiAyAENgIMIAUgBDYCCCAEQQA2AhggBCAFNgIMIAQgAzYCCAtBACgClNCAgAAiAyACTQ0AQQAoAqDQgIAAIgQgAmoiBSADIAJrIgNBAXI2AgRBACADNgKU0ICAAEEAIAU2AqDQgIAAIAQgAkEDcjYCBCAEQQhqIQMMAwtBACEDQQBBMDYC+NOAgAAMAgsCQCALRQ0AAkACQCAIIAgoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAA2AgAgAA0BQQAgB0F+IAV3cSIHNgKM0ICAAAwCCyALQRBBFCALKAIQIAhGG2ogADYCACAARQ0BCyAAIAs2AhgCQCAIKAIQIgNFDQAgACADNgIQIAMgADYCGAsgCEEUaigCACIDRQ0AIABBFGogAzYCACADIAA2AhgLAkACQCAEQQ9LDQAgCCAEIAJqIgNBA3I2AgQgCCADaiIDIAMoAgRBAXI2AgQMAQsgCCACaiIAIARBAXI2AgQgCCACQQNyNgIEIAAgBGogBDYCAAJAIARB/wFLDQAgBEF4cUGw0ICAAGohAwJAAkBBACgCiNCAgAAiBUEBIARBA3Z0IgRxDQBBACAFIARyNgKI0ICAACADIQQMAQsgAygCCCEECyAEIAA2AgwgAyAANgIIIAAgAzYCDCAAIAQ2AggMAQtBHyEDAkAgBEH///8HSw0AIARBCHYiAyADQYD+P2pBEHZBCHEiA3QiBSAFQYDgH2pBEHZBBHEiBXQiAiACQYCAD2pBEHZBAnEiAnRBD3YgAyAFciACcmsiA0EBdCAEIANBFWp2QQFxckEcaiEDCyAAIAM2AhwgAEIANwIQIANBAnRBuNKAgABqIQUCQCAHQQEgA3QiAnENACAFIAA2AgBBACAHIAJyNgKM0ICAACAAIAU2AhggACAANgIIIAAgADYCDAwBCyAEQQBBGSADQQF2ayADQR9GG3QhAyAFKAIAIQICQANAIAIiBSgCBEF4cSAERg0BIANBHXYhAiADQQF0IQMgBSACQQRxakEQaiIGKAIAIgINAAsgBiAANgIAIAAgBTYCGCAAIAA2AgwgACAANgIIDAELIAUoAggiAyAANgIMIAUgADYCCCAAQQA2AhggACAFNgIMIAAgAzYCCAsgCEEIaiEDDAELAkAgCkUNAAJAAkAgACAAKAIcIgVBAnRBuNKAgABqIgMoAgBHDQAgAyAINgIAIAgNAUEAIAlBfiAFd3E2AozQgIAADAILIApBEEEUIAooAhAgAEYbaiAINgIAIAhFDQELIAggCjYCGAJAIAAoAhAiA0UNACAIIAM2AhAgAyAINgIYCyAAQRRqKAIAIgNFDQAgCEEUaiADNgIAIAMgCDYCGAsCQAJAIARBD0sNACAAIAQgAmoiA0EDcjYCBCAAIANqIgMgAygCBEEBcjYCBAwBCyAAIAJqIgUgBEEBcjYCBCAAIAJBA3I2AgQgBSAEaiAENgIAAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQMCQAJAQQEgB0EDdnQiCCAGcQ0AQQAgCCAGcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCADNgIMIAIgAzYCCCADIAI2AgwgAyAINgIIC0EAIAU2ApzQgIAAQQAgBDYCkNCAgAALIABBCGohAwsgAUEQaiSAgICAACADCwoAIAAQyYCAgAAL4g0BB38CQCAARQ0AIABBeGoiASAAQXxqKAIAIgJBeHEiAGohAwJAIAJBAXENACACQQNxRQ0BIAEgASgCACICayIBQQAoApjQgIAAIgRJDQEgAiAAaiEAAkAgAUEAKAKc0ICAAEYNAAJAIAJB/wFLDQAgASgCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgASgCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAwsgAiAGRhogAiAENgIIIAQgAjYCDAwCCyABKAIYIQcCQAJAIAEoAgwiBiABRg0AIAEoAggiAiAESRogBiACNgIIIAIgBjYCDAwBCwJAIAFBFGoiAigCACIEDQAgAUEQaiICKAIAIgQNAEEAIQYMAQsDQCACIQUgBCIGQRRqIgIoAgAiBA0AIAZBEGohAiAGKAIQIgQNAAsgBUEANgIACyAHRQ0BAkACQCABIAEoAhwiBEECdEG40oCAAGoiAigCAEcNACACIAY2AgAgBg0BQQBBACgCjNCAgABBfiAEd3E2AozQgIAADAMLIAdBEEEUIAcoAhAgAUYbaiAGNgIAIAZFDQILIAYgBzYCGAJAIAEoAhAiAkUNACAGIAI2AhAgAiAGNgIYCyABKAIUIgJFDQEgBkEUaiACNgIAIAIgBjYCGAwBCyADKAIEIgJBA3FBA0cNACADIAJBfnE2AgRBACAANgKQ0ICAACABIABqIAA2AgAgASAAQQFyNgIEDwsgASADTw0AIAMoAgQiAkEBcUUNAAJAAkAgAkECcQ0AAkAgA0EAKAKg0ICAAEcNAEEAIAE2AqDQgIAAQQBBACgClNCAgAAgAGoiADYClNCAgAAgASAAQQFyNgIEIAFBACgCnNCAgABHDQNBAEEANgKQ0ICAAEEAQQA2ApzQgIAADwsCQCADQQAoApzQgIAARw0AQQAgATYCnNCAgABBAEEAKAKQ0ICAACAAaiIANgKQ0ICAACABIABBAXI2AgQgASAAaiAANgIADwsgAkF4cSAAaiEAAkACQCACQf8BSw0AIAMoAggiBCACQQN2IgVBA3RBsNCAgABqIgZGGgJAIAMoAgwiAiAERw0AQQBBACgCiNCAgABBfiAFd3E2AojQgIAADAILIAIgBkYaIAIgBDYCCCAEIAI2AgwMAQsgAygCGCEHAkACQCADKAIMIgYgA0YNACADKAIIIgJBACgCmNCAgABJGiAGIAI2AgggAiAGNgIMDAELAkAgA0EUaiICKAIAIgQNACADQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQACQAJAIAMgAygCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAgsgB0EQQRQgBygCECADRhtqIAY2AgAgBkUNAQsgBiAHNgIYAkAgAygCECICRQ0AIAYgAjYCECACIAY2AhgLIAMoAhQiAkUNACAGQRRqIAI2AgAgAiAGNgIYCyABIABqIAA2AgAgASAAQQFyNgIEIAFBACgCnNCAgABHDQFBACAANgKQ0ICAAA8LIAMgAkF+cTYCBCABIABqIAA2AgAgASAAQQFyNgIECwJAIABB/wFLDQAgAEF4cUGw0ICAAGohAgJAAkBBACgCiNCAgAAiBEEBIABBA3Z0IgBxDQBBACAEIAByNgKI0ICAACACIQAMAQsgAigCCCEACyAAIAE2AgwgAiABNgIIIAEgAjYCDCABIAA2AggPC0EfIQICQCAAQf///wdLDQAgAEEIdiICIAJBgP4/akEQdkEIcSICdCIEIARBgOAfakEQdkEEcSIEdCIGIAZBgIAPakEQdkECcSIGdEEPdiACIARyIAZyayICQQF0IAAgAkEVanZBAXFyQRxqIQILIAEgAjYCHCABQgA3AhAgAkECdEG40oCAAGohBAJAAkBBACgCjNCAgAAiBkEBIAJ0IgNxDQAgBCABNgIAQQAgBiADcjYCjNCAgAAgASAENgIYIAEgATYCCCABIAE2AgwMAQsgAEEAQRkgAkEBdmsgAkEfRht0IQIgBCgCACEGAkADQCAGIgQoAgRBeHEgAEYNASACQR12IQYgAkEBdCECIAQgBkEEcWpBEGoiAygCACIGDQALIAMgATYCACABIAQ2AhggASABNgIMIAEgATYCCAwBCyAEKAIIIgAgATYCDCAEIAE2AgggAUEANgIYIAEgBDYCDCABIAA2AggLQQBBACgCqNCAgABBf2oiAUF/IAEbNgKo0ICAAAsLBAAAAAtOAAJAIAANAD8AQRB0DwsCQCAAQf//A3ENACAAQX9MDQACQCAAQRB2QAAiAEF/Rw0AQQBBMDYC+NOAgABBfw8LIABBEHQPCxDKgICAAAAL8gICA38BfgJAIAJFDQAgACABOgAAIAIgAGoiA0F/aiABOgAAIAJBA0kNACAAIAE6AAIgACABOgABIANBfWogAToAACADQX5qIAE6AAAgAkEHSQ0AIAAgAToAAyADQXxqIAE6AAAgAkEJSQ0AIABBACAAa0EDcSIEaiIDIAFB/wFxQYGChAhsIgE2AgAgAyACIARrQXxxIgRqIgJBfGogATYCACAEQQlJDQAgAyABNgIIIAMgATYCBCACQXhqIAE2AgAgAkF0aiABNgIAIARBGUkNACADIAE2AhggAyABNgIUIAMgATYCECADIAE2AgwgAkFwaiABNgIAIAJBbGogATYCACACQWhqIAE2AgAgAkFkaiABNgIAIAQgA0EEcUEYciIFayICQSBJDQAgAa1CgYCAgBB+IQYgAyAFaiEBA0AgASAGNwMYIAEgBjcDECABIAY3AwggASAGNwMAIAFBIGohASACQWBqIgJBH0sNAAsLIAALC45IAQBBgAgLhkgBAAAAAgAAAAMAAAAAAAAAAAAAAAQAAAAFAAAAAAAAAAAAAAAGAAAABwAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEludmFsaWQgY2hhciBpbiB1cmwgcXVlcnkAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9ib2R5AENvbnRlbnQtTGVuZ3RoIG92ZXJmbG93AENodW5rIHNpemUgb3ZlcmZsb3cAUmVzcG9uc2Ugb3ZlcmZsb3cASW52YWxpZCBtZXRob2QgZm9yIEhUVFAveC54IHJlcXVlc3QASW52YWxpZCBtZXRob2QgZm9yIFJUU1AveC54IHJlcXVlc3QARXhwZWN0ZWQgU09VUkNFIG1ldGhvZCBmb3IgSUNFL3gueCByZXF1ZXN0AEludmFsaWQgY2hhciBpbiB1cmwgZnJhZ21lbnQgc3RhcnQARXhwZWN0ZWQgZG90AFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fc3RhdHVzAEludmFsaWQgcmVzcG9uc2Ugc3RhdHVzAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMAVXNlciBjYWxsYmFjayBlcnJvcgBgb25fcmVzZXRgIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19oZWFkZXJgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2JlZ2luYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlYCBjYWxsYmFjayBlcnJvcgBgb25fc3RhdHVzX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdmVyc2lvbl9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX3VybF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWVzc2FnZV9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX21ldGhvZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lYCBjYWxsYmFjayBlcnJvcgBVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNlcnZlcgBJbnZhbGlkIGhlYWRlciB2YWx1ZSBjaGFyAEludmFsaWQgaGVhZGVyIGZpZWxkIGNoYXIAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl92ZXJzaW9uAEludmFsaWQgbWlub3IgdmVyc2lvbgBJbnZhbGlkIG1ham9yIHZlcnNpb24ARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgdmVyc2lvbgBFeHBlY3RlZCBDUkxGIGFmdGVyIHZlcnNpb24ASW52YWxpZCBIVFRQIHZlcnNpb24ASW52YWxpZCBoZWFkZXIgdG9rZW4AU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl91cmwASW52YWxpZCBjaGFyYWN0ZXJzIGluIHVybABVbmV4cGVjdGVkIHN0YXJ0IGNoYXIgaW4gdXJsAERvdWJsZSBAIGluIHVybABFbXB0eSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXJhY3RlciBpbiBDb250ZW50LUxlbmd0aABEdXBsaWNhdGUgQ29udGVudC1MZW5ndGgASW52YWxpZCBjaGFyIGluIHVybCBwYXRoAENvbnRlbnQtTGVuZ3RoIGNhbid0IGJlIHByZXNlbnQgd2l0aCBUcmFuc2Zlci1FbmNvZGluZwBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBzaXplAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX3ZhbHVlAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgdmFsdWUATWlzc2luZyBleHBlY3RlZCBMRiBhZnRlciBoZWFkZXIgdmFsdWUASW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIHF1b3RlIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGVkIHZhbHVlAFBhdXNlZCBieSBvbl9oZWFkZXJzX2NvbXBsZXRlAEludmFsaWQgRU9GIHN0YXRlAG9uX3Jlc2V0IHBhdXNlAG9uX2NodW5rX2hlYWRlciBwYXVzZQBvbl9tZXNzYWdlX2JlZ2luIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl92YWx1ZSBwYXVzZQBvbl9zdGF0dXNfY29tcGxldGUgcGF1c2UAb25fdmVyc2lvbl9jb21wbGV0ZSBwYXVzZQBvbl91cmxfY29tcGxldGUgcGF1c2UAb25fY2h1bmtfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlIHBhdXNlAG9uX21lc3NhZ2VfY29tcGxldGUgcGF1c2UAb25fbWV0aG9kX2NvbXBsZXRlIHBhdXNlAG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19leHRlbnNpb25fbmFtZSBwYXVzZQBVbmV4cGVjdGVkIHNwYWNlIGFmdGVyIHN0YXJ0IGxpbmUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fbmFtZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIG5hbWUAUGF1c2Ugb24gQ09OTkVDVC9VcGdyYWRlAFBhdXNlIG9uIFBSSS9VcGdyYWRlAEV4cGVjdGVkIEhUVFAvMiBDb25uZWN0aW9uIFByZWZhY2UAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9tZXRob2QARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgbWV0aG9kAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX2ZpZWxkAFBhdXNlZABJbnZhbGlkIHdvcmQgZW5jb3VudGVyZWQASW52YWxpZCBtZXRob2QgZW5jb3VudGVyZWQAVW5leHBlY3RlZCBjaGFyIGluIHVybCBzY2hlbWEAUmVxdWVzdCBoYXMgaW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgAFNXSVRDSF9QUk9YWQBVU0VfUFJPWFkATUtBQ1RJVklUWQBVTlBST0NFU1NBQkxFX0VOVElUWQBDT1BZAE1PVkVEX1BFUk1BTkVOVExZAFRPT19FQVJMWQBOT1RJRlkARkFJTEVEX0RFUEVOREVOQ1kAQkFEX0dBVEVXQVkAUExBWQBQVVQAQ0hFQ0tPVVQAR0FURVdBWV9USU1FT1VUAFJFUVVFU1RfVElNRU9VVABORVRXT1JLX0NPTk5FQ1RfVElNRU9VVABDT05ORUNUSU9OX1RJTUVPVVQATE9HSU5fVElNRU9VVABORVRXT1JLX1JFQURfVElNRU9VVABQT1NUAE1JU0RJUkVDVEVEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9SRVFVRVNUAENMSUVOVF9DTE9TRURfTE9BRF9CQUxBTkNFRF9SRVFVRVNUAEJBRF9SRVFVRVNUAEhUVFBfUkVRVUVTVF9TRU5UX1RPX0hUVFBTX1BPUlQAUkVQT1JUAElNX0FfVEVBUE9UAFJFU0VUX0NPTlRFTlQATk9fQ09OVEVOVABQQVJUSUFMX0NPTlRFTlQASFBFX0lOVkFMSURfQ09OU1RBTlQASFBFX0NCX1JFU0VUAEdFVABIUEVfU1RSSUNUAENPTkZMSUNUAFRFTVBPUkFSWV9SRURJUkVDVABQRVJNQU5FTlRfUkVESVJFQ1QAQ09OTkVDVABNVUxUSV9TVEFUVVMASFBFX0lOVkFMSURfU1RBVFVTAFRPT19NQU5ZX1JFUVVFU1RTAEVBUkxZX0hJTlRTAFVOQVZBSUxBQkxFX0ZPUl9MRUdBTF9SRUFTT05TAE9QVElPTlMAU1dJVENISU5HX1BST1RPQ09MUwBWQVJJQU5UX0FMU09fTkVHT1RJQVRFUwBNVUxUSVBMRV9DSE9JQ0VTAElOVEVSTkFMX1NFUlZFUl9FUlJPUgBXRUJfU0VSVkVSX1VOS05PV05fRVJST1IAUkFJTEdVTl9FUlJPUgBJREVOVElUWV9QUk9WSURFUl9BVVRIRU5USUNBVElPTl9FUlJPUgBTU0xfQ0VSVElGSUNBVEVfRVJST1IASU5WQUxJRF9YX0ZPUldBUkRFRF9GT1IAU0VUX1BBUkFNRVRFUgBHRVRfUEFSQU1FVEVSAEhQRV9VU0VSAFNFRV9PVEhFUgBIUEVfQ0JfQ0hVTktfSEVBREVSAE1LQ0FMRU5EQVIAU0VUVVAAV0VCX1NFUlZFUl9JU19ET1dOAFRFQVJET1dOAEhQRV9DTE9TRURfQ09OTkVDVElPTgBIRVVSSVNUSUNfRVhQSVJBVElPTgBESVNDT05ORUNURURfT1BFUkFUSU9OAE5PTl9BVVRIT1JJVEFUSVZFX0lORk9STUFUSU9OAEhQRV9JTlZBTElEX1ZFUlNJT04ASFBFX0NCX01FU1NBR0VfQkVHSU4AU0lURV9JU19GUk9aRU4ASFBFX0lOVkFMSURfSEVBREVSX1RPS0VOAElOVkFMSURfVE9LRU4ARk9SQklEREVOAEVOSEFOQ0VfWU9VUl9DQUxNAEhQRV9JTlZBTElEX1VSTABCTE9DS0VEX0JZX1BBUkVOVEFMX0NPTlRST0wATUtDT0wAQUNMAEhQRV9JTlRFUk5BTABSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFX1VOT0ZGSUNJQUwASFBFX09LAFVOTElOSwBVTkxPQ0sAUFJJAFJFVFJZX1dJVEgASFBFX0lOVkFMSURfQ09OVEVOVF9MRU5HVEgASFBFX1VORVhQRUNURURfQ09OVEVOVF9MRU5HVEgARkxVU0gAUFJPUFBBVENIAE0tU0VBUkNIAFVSSV9UT09fTE9ORwBQUk9DRVNTSU5HAE1JU0NFTExBTkVPVVNfUEVSU0lTVEVOVF9XQVJOSU5HAE1JU0NFTExBTkVPVVNfV0FSTklORwBIUEVfSU5WQUxJRF9UUkFOU0ZFUl9FTkNPRElORwBFeHBlY3RlZCBDUkxGAEhQRV9JTlZBTElEX0NIVU5LX1NJWkUATU9WRQBDT05USU5VRQBIUEVfQ0JfU1RBVFVTX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJTX0NPTVBMRVRFAEhQRV9DQl9WRVJTSU9OX0NPTVBMRVRFAEhQRV9DQl9VUkxfQ09NUExFVEUASFBFX0NCX0NIVU5LX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfVkFMVUVfQ09NUExFVEUASFBFX0NCX0NIVU5LX0VYVEVOU0lPTl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX05BTUVfQ09NUExFVEUASFBFX0NCX01FU1NBR0VfQ09NUExFVEUASFBFX0NCX01FVEhPRF9DT01QTEVURQBIUEVfQ0JfSEVBREVSX0ZJRUxEX0NPTVBMRVRFAERFTEVURQBIUEVfSU5WQUxJRF9FT0ZfU1RBVEUASU5WQUxJRF9TU0xfQ0VSVElGSUNBVEUAUEFVU0UATk9fUkVTUE9OU0UAVU5TVVBQT1JURURfTUVESUFfVFlQRQBHT05FAE5PVF9BQ0NFUFRBQkxFAFNFUlZJQ0VfVU5BVkFJTEFCTEUAUkFOR0VfTk9UX1NBVElTRklBQkxFAE9SSUdJTl9JU19VTlJFQUNIQUJMRQBSRVNQT05TRV9JU19TVEFMRQBQVVJHRQBNRVJHRQBSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFAFJFUVVFU1RfSEVBREVSX1RPT19MQVJHRQBQQVlMT0FEX1RPT19MQVJHRQBJTlNVRkZJQ0lFTlRfU1RPUkFHRQBIUEVfUEFVU0VEX1VQR1JBREUASFBFX1BBVVNFRF9IMl9VUEdSQURFAFNPVVJDRQBBTk5PVU5DRQBUUkFDRQBIUEVfVU5FWFBFQ1RFRF9TUEFDRQBERVNDUklCRQBVTlNVQlNDUklCRQBSRUNPUkQASFBFX0lOVkFMSURfTUVUSE9EAE5PVF9GT1VORABQUk9QRklORABVTkJJTkQAUkVCSU5EAFVOQVVUSE9SSVpFRABNRVRIT0RfTk9UX0FMTE9XRUQASFRUUF9WRVJTSU9OX05PVF9TVVBQT1JURUQAQUxSRUFEWV9SRVBPUlRFRABBQ0NFUFRFRABOT1RfSU1QTEVNRU5URUQATE9PUF9ERVRFQ1RFRABIUEVfQ1JfRVhQRUNURUQASFBFX0xGX0VYUEVDVEVEAENSRUFURUQASU1fVVNFRABIUEVfUEFVU0VEAFRJTUVPVVRfT0NDVVJFRABQQVlNRU5UX1JFUVVJUkVEAFBSRUNPTkRJVElPTl9SRVFVSVJFRABQUk9YWV9BVVRIRU5USUNBVElPTl9SRVFVSVJFRABORVRXT1JLX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAExFTkdUSF9SRVFVSVJFRABTU0xfQ0VSVElGSUNBVEVfUkVRVUlSRUQAVVBHUkFERV9SRVFVSVJFRABQQUdFX0VYUElSRUQAUFJFQ09ORElUSU9OX0ZBSUxFRABFWFBFQ1RBVElPTl9GQUlMRUQAUkVWQUxJREFUSU9OX0ZBSUxFRABTU0xfSEFORFNIQUtFX0ZBSUxFRABMT0NLRUQAVFJBTlNGT1JNQVRJT05fQVBQTElFRABOT1RfTU9ESUZJRUQATk9UX0VYVEVOREVEAEJBTkRXSURUSF9MSU1JVF9FWENFRURFRABTSVRFX0lTX09WRVJMT0FERUQASEVBRABFeHBlY3RlZCBIVFRQLwAAXhMAACYTAAAwEAAA8BcAAJ0TAAAVEgAAORcAAPASAAAKEAAAdRIAAK0SAACCEwAATxQAAH8QAACgFQAAIxQAAIkSAACLFAAATRUAANQRAADPFAAAEBgAAMkWAADcFgAAwREAAOAXAAC7FAAAdBQAAHwVAADlFAAACBcAAB8QAABlFQAAoxQAACgVAAACFQAAmRUAACwQAACLGQAATw8AANQOAABqEAAAzhAAAAIXAACJDgAAbhMAABwTAABmFAAAVhcAAMETAADNEwAAbBMAAGgXAABmFwAAXxcAACITAADODwAAaQ4AANgOAABjFgAAyxMAAKoOAAAoFwAAJhcAAMUTAABdFgAA6BEAAGcTAABlEwAA8hYAAHMTAAAdFwAA+RYAAPMRAADPDgAAzhUAAAwSAACzEQAApREAAGEQAAAyFwAAuxMAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIDAgICAgIAAAICAAICAAICAgICAgICAgIABAAAAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAACAAICAgICAAACAgACAgACAgICAgICAgICAAMABAAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAAgACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAbG9zZWVlcC1hbGl2ZQAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAQEBAQEBAQEBAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBY2h1bmtlZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEAAAEBAAEBAAEBAQEBAQEBAQEAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABlY3Rpb25lbnQtbGVuZ3Rob25yb3h5LWNvbm5lY3Rpb24AAAAAAAAAAAAAAAAAAAByYW5zZmVyLWVuY29kaW5ncGdyYWRlDQoNCg0KU00NCg0KVFRQL0NFL1RTUC8AAAAAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQIAAQMAAAAAAAAAAAAAAAAAAAAAAAAEAQEFAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAAAAQAAAgAAAAAAAAAAAAAAAAAAAAAAAAMEAAAEBAQEBAQEBAQEBAUEBAQEBAQEBAQEBAQABAAGBwQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAABAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAIAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABOT1VOQ0VFQ0tPVVRORUNURVRFQ1JJQkVMVVNIRVRFQURTRUFSQ0hSR0VDVElWSVRZTEVOREFSVkVPVElGWVBUSU9OU0NIU0VBWVNUQVRDSEdFT1JESVJFQ1RPUlRSQ0hQQVJBTUVURVJVUkNFQlNDUklCRUFSRE9XTkFDRUlORE5LQ0tVQlNDUklCRUhUVFAvQURUUC8='
+
+
+/***/ }),
+
+/***/ 5627:
+/***/ ((module) => {
+
+module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCrLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC0kBAXsgAEEQav0MAAAAAAAAAAAAAAAAAAAAACIB/QsDACAAIAH9CwMAIABBMGogAf0LAwAgAEEgaiAB/QsDACAAQd0BNgIcQQALewEBfwJAIAAoAgwiAw0AAkAgACgCBEUNACAAIAE2AgQLAkAgACABIAIQxICAgAAiAw0AIAAoAgwPCyAAIAM2AhxBACEDIAAoAgQiAUUNACAAIAEgAiAAKAIIEYGAgIAAACIBRQ0AIAAgAjYCFCAAIAE2AgwgASEDCyADC+TzAQMOfwN+BH8jgICAgABBEGsiAySAgICAACABIQQgASEFIAEhBiABIQcgASEIIAEhCSABIQogASELIAEhDCABIQ0gASEOIAEhDwJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAAKAIcIhBBf2oO3QHaAQHZAQIDBAUGBwgJCgsMDQ7YAQ8Q1wEREtYBExQVFhcYGRob4AHfARwdHtUBHyAhIiMkJdQBJicoKSorLNMB0gEtLtEB0AEvMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUbbAUdISUrPAc4BS80BTMwBTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gAGBAYIBgwGEAYUBhgGHAYgBiQGKAYsBjAGNAY4BjwGQAZEBkgGTAZQBlQGWAZcBmAGZAZoBmwGcAZ0BngGfAaABoQGiAaMBpAGlAaYBpwGoAakBqgGrAawBrQGuAa8BsAGxAbIBswG0AbUBtgG3AcsBygG4AckBuQHIAboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBANwBC0EAIRAMxgELQQ4hEAzFAQtBDSEQDMQBC0EPIRAMwwELQRAhEAzCAQtBEyEQDMEBC0EUIRAMwAELQRUhEAy/AQtBFiEQDL4BC0EXIRAMvQELQRghEAy8AQtBGSEQDLsBC0EaIRAMugELQRshEAy5AQtBHCEQDLgBC0EIIRAMtwELQR0hEAy2AQtBICEQDLUBC0EfIRAMtAELQQchEAyzAQtBISEQDLIBC0EiIRAMsQELQR4hEAywAQtBIyEQDK8BC0ESIRAMrgELQREhEAytAQtBJCEQDKwBC0ElIRAMqwELQSYhEAyqAQtBJyEQDKkBC0HDASEQDKgBC0EpIRAMpwELQSshEAymAQtBLCEQDKUBC0EtIRAMpAELQS4hEAyjAQtBLyEQDKIBC0HEASEQDKEBC0EwIRAMoAELQTQhEAyfAQtBDCEQDJ4BC0ExIRAMnQELQTIhEAycAQtBMyEQDJsBC0E5IRAMmgELQTUhEAyZAQtBxQEhEAyYAQtBCyEQDJcBC0E6IRAMlgELQTYhEAyVAQtBCiEQDJQBC0E3IRAMkwELQTghEAySAQtBPCEQDJEBC0E7IRAMkAELQT0hEAyPAQtBCSEQDI4BC0EoIRAMjQELQT4hEAyMAQtBPyEQDIsBC0HAACEQDIoBC0HBACEQDIkBC0HCACEQDIgBC0HDACEQDIcBC0HEACEQDIYBC0HFACEQDIUBC0HGACEQDIQBC0EqIRAMgwELQccAIRAMggELQcgAIRAMgQELQckAIRAMgAELQcoAIRAMfwtBywAhEAx+C0HNACEQDH0LQcwAIRAMfAtBzgAhEAx7C0HPACEQDHoLQdAAIRAMeQtB0QAhEAx4C0HSACEQDHcLQdMAIRAMdgtB1AAhEAx1C0HWACEQDHQLQdUAIRAMcwtBBiEQDHILQdcAIRAMcQtBBSEQDHALQdgAIRAMbwtBBCEQDG4LQdkAIRAMbQtB2gAhEAxsC0HbACEQDGsLQdwAIRAMagtBAyEQDGkLQd0AIRAMaAtB3gAhEAxnC0HfACEQDGYLQeEAIRAMZQtB4AAhEAxkC0HiACEQDGMLQeMAIRAMYgtBAiEQDGELQeQAIRAMYAtB5QAhEAxfC0HmACEQDF4LQecAIRAMXQtB6AAhEAxcC0HpACEQDFsLQeoAIRAMWgtB6wAhEAxZC0HsACEQDFgLQe0AIRAMVwtB7gAhEAxWC0HvACEQDFULQfAAIRAMVAtB8QAhEAxTC0HyACEQDFILQfMAIRAMUQtB9AAhEAxQC0H1ACEQDE8LQfYAIRAMTgtB9wAhEAxNC0H4ACEQDEwLQfkAIRAMSwtB+gAhEAxKC0H7ACEQDEkLQfwAIRAMSAtB/QAhEAxHC0H+ACEQDEYLQf8AIRAMRQtBgAEhEAxEC0GBASEQDEMLQYIBIRAMQgtBgwEhEAxBC0GEASEQDEALQYUBIRAMPwtBhgEhEAw+C0GHASEQDD0LQYgBIRAMPAtBiQEhEAw7C0GKASEQDDoLQYsBIRAMOQtBjAEhEAw4C0GNASEQDDcLQY4BIRAMNgtBjwEhEAw1C0GQASEQDDQLQZEBIRAMMwtBkgEhEAwyC0GTASEQDDELQZQBIRAMMAtBlQEhEAwvC0GWASEQDC4LQZcBIRAMLQtBmAEhEAwsC0GZASEQDCsLQZoBIRAMKgtBmwEhEAwpC0GcASEQDCgLQZ0BIRAMJwtBngEhEAwmC0GfASEQDCULQaABIRAMJAtBoQEhEAwjC0GiASEQDCILQaMBIRAMIQtBpAEhEAwgC0GlASEQDB8LQaYBIRAMHgtBpwEhEAwdC0GoASEQDBwLQakBIRAMGwtBqgEhEAwaC0GrASEQDBkLQawBIRAMGAtBrQEhEAwXC0GuASEQDBYLQQEhEAwVC0GvASEQDBQLQbABIRAMEwtBsQEhEAwSC0GzASEQDBELQbIBIRAMEAtBtAEhEAwPC0G1ASEQDA4LQbYBIRAMDQtBtwEhEAwMC0G4ASEQDAsLQbkBIRAMCgtBugEhEAwJC0G7ASEQDAgLQcYBIRAMBwtBvAEhEAwGC0G9ASEQDAULQb4BIRAMBAtBvwEhEAwDC0HAASEQDAILQcIBIRAMAQtBwQEhEAsDQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAOxwEAAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB4fICEjJSg/QEFERUZHSElKS0xNT1BRUlPeA1dZW1xdYGJlZmdoaWprbG1vcHFyc3R1dnd4eXp7fH1+gAGCAYUBhgGHAYkBiwGMAY0BjgGPAZABkQGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwG4AbkBugG7AbwBvQG+Ab8BwAHBAcIBwwHEAcUBxgHHAcgByQHKAcsBzAHNAc4BzwHQAdEB0gHTAdQB1QHWAdcB2AHZAdoB2wHcAd0B3gHgAeEB4gHjAeQB5QHmAecB6AHpAeoB6wHsAe0B7gHvAfAB8QHyAfMBmQKkArAC/gL+AgsgASIEIAJHDfMBQd0BIRAM/wMLIAEiECACRw3dAUHDASEQDP4DCyABIgEgAkcNkAFB9wAhEAz9AwsgASIBIAJHDYYBQe8AIRAM/AMLIAEiASACRw1/QeoAIRAM+wMLIAEiASACRw17QegAIRAM+gMLIAEiASACRw14QeYAIRAM+QMLIAEiASACRw0aQRghEAz4AwsgASIBIAJHDRRBEiEQDPcDCyABIgEgAkcNWUHFACEQDPYDCyABIgEgAkcNSkE/IRAM9QMLIAEiASACRw1IQTwhEAz0AwsgASIBIAJHDUFBMSEQDPMDCyAALQAuQQFGDesDDIcCCyAAIAEiASACEMCAgIAAQQFHDeYBIABCADcDIAznAQsgACABIgEgAhC0gICAACIQDecBIAEhAQz1AgsCQCABIgEgAkcNAEEGIRAM8AMLIAAgAUEBaiIBIAIQu4CAgAAiEA3oASABIQEMMQsgAEIANwMgQRIhEAzVAwsgASIQIAJHDStBHSEQDO0DCwJAIAEiASACRg0AIAFBAWohAUEQIRAM1AMLQQchEAzsAwsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3lAUEIIRAM6wMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQRQhEAzSAwtBCSEQDOoDCyABIQEgACkDIFAN5AEgASEBDPICCwJAIAEiASACRw0AQQshEAzpAwsgACABQQFqIgEgAhC2gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeYBIAEhAQwNCyAAIAEiASACELqAgIAAIhAN5wEgASEBDPACCwJAIAEiASACRw0AQQ8hEAzlAwsgAS0AACIQQTtGDQggEEENRw3oASABQQFqIQEM7wILIAAgASIBIAIQuoCAgAAiEA3oASABIQEM8gILA0ACQCABLQAAQfC1gIAAai0AACIQQQFGDQAgEEECRw3rASAAKAIEIRAgAEEANgIEIAAgECABQQFqIgEQuYCAgAAiEA3qASABIQEM9AILIAFBAWoiASACRw0AC0ESIRAM4gMLIAAgASIBIAIQuoCAgAAiEA3pASABIQEMCgsgASIBIAJHDQZBGyEQDOADCwJAIAEiASACRw0AQRYhEAzgAwsgAEGKgICAADYCCCAAIAE2AgQgACABIAIQuICAgAAiEA3qASABIQFBICEQDMYDCwJAIAEiASACRg0AA0ACQCABLQAAQfC3gIAAai0AACIQQQJGDQACQCAQQX9qDgTlAewBAOsB7AELIAFBAWohAUEIIRAMyAMLIAFBAWoiASACRw0AC0EVIRAM3wMLQRUhEAzeAwsDQAJAIAEtAABB8LmAgABqLQAAIhBBAkYNACAQQX9qDgTeAewB4AHrAewBCyABQQFqIgEgAkcNAAtBGCEQDN0DCwJAIAEiASACRg0AIABBi4CAgAA2AgggACABNgIEIAEhAUEHIRAMxAMLQRkhEAzcAwsgAUEBaiEBDAILAkAgASIUIAJHDQBBGiEQDNsDCyAUIQECQCAULQAAQXNqDhTdAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAgDuAgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQM2gMLAkAgAS0AACIQQTtGDQAgEEENRw3oASABQQFqIQEM5QILIAFBAWohAQtBIiEQDL8DCwJAIAEiECACRw0AQRwhEAzYAwtCACERIBAhASAQLQAAQVBqDjfnAeYBAQIDBAUGBwgAAAAAAAAACQoLDA0OAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPEBESExQAC0EeIRAMvQMLQgIhEQzlAQtCAyERDOQBC0IEIREM4wELQgUhEQziAQtCBiERDOEBC0IHIREM4AELQgghEQzfAQtCCSERDN4BC0IKIREM3QELQgshEQzcAQtCDCERDNsBC0INIREM2gELQg4hEQzZAQtCDyERDNgBC0IKIREM1wELQgshEQzWAQtCDCERDNUBC0INIREM1AELQg4hEQzTAQtCDyERDNIBC0IAIRECQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAtAABBUGoON+UB5AEAAQIDBAUGB+YB5gHmAeYB5gHmAeYBCAkKCwwN5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAQ4PEBESE+YBC0ICIREM5AELQgMhEQzjAQtCBCERDOIBC0IFIREM4QELQgYhEQzgAQtCByERDN8BC0IIIREM3gELQgkhEQzdAQtCCiERDNwBC0ILIREM2wELQgwhEQzaAQtCDSERDNkBC0IOIREM2AELQg8hEQzXAQtCCiERDNYBC0ILIREM1QELQgwhEQzUAQtCDSERDNMBC0IOIREM0gELQg8hEQzRAQsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3SAUEfIRAMwAMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQSQhEAynAwtBICEQDL8DCyAAIAEiECACEL6AgIAAQX9qDgW2AQDFAgHRAdIBC0ERIRAMpAMLIABBAToALyAQIQEMuwMLIAEiASACRw3SAUEkIRAMuwMLIAEiDSACRw0eQcYAIRAMugMLIAAgASIBIAIQsoCAgAAiEA3UASABIQEMtQELIAEiECACRw0mQdAAIRAMuAMLAkAgASIBIAJHDQBBKCEQDLgDCyAAQQA2AgQgAEGMgICAADYCCCAAIAEgARCxgICAACIQDdMBIAEhAQzYAQsCQCABIhAgAkcNAEEpIRAMtwMLIBAtAAAiAUEgRg0UIAFBCUcN0wEgEEEBaiEBDBULAkAgASIBIAJGDQAgAUEBaiEBDBcLQSohEAy1AwsCQCABIhAgAkcNAEErIRAMtQMLAkAgEC0AACIBQQlGDQAgAUEgRw3VAQsgAC0ALEEIRg3TASAQIQEMkQMLAkAgASIBIAJHDQBBLCEQDLQDCyABLQAAQQpHDdUBIAFBAWohAQzJAgsgASIOIAJHDdUBQS8hEAyyAwsDQAJAIAEtAAAiEEEgRg0AAkAgEEF2ag4EANwB3AEA2gELIAEhAQzgAQsgAUEBaiIBIAJHDQALQTEhEAyxAwtBMiEQIAEiFCACRg2wAyACIBRrIAAoAgAiAWohFSAUIAFrQQNqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB8LuAgABqLQAARw0BAkAgAUEDRw0AQQYhAQyWAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMsQMLIABBADYCACAUIQEM2QELQTMhECABIhQgAkYNrwMgAiAUayAAKAIAIgFqIRUgFCABa0EIaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfS7gIAAai0AAEcNAQJAIAFBCEcNAEEFIQEMlQMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLADCyAAQQA2AgAgFCEBDNgBC0E0IRAgASIUIAJGDa4DIAIgFGsgACgCACIBaiEVIBQgAWtBBWohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUHQwoCAAGotAABHDQECQCABQQVHDQBBByEBDJQDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAyvAwsgAEEANgIAIBQhAQzXAQsCQCABIgEgAkYNAANAAkAgAS0AAEGAvoCAAGotAAAiEEEBRg0AIBBBAkYNCiABIQEM3QELIAFBAWoiASACRw0AC0EwIRAMrgMLQTAhEAytAwsCQCABIgEgAkYNAANAAkAgAS0AACIQQSBGDQAgEEF2ag4E2QHaAdoB2QHaAQsgAUEBaiIBIAJHDQALQTghEAytAwtBOCEQDKwDCwNAAkAgAS0AACIQQSBGDQAgEEEJRw0DCyABQQFqIgEgAkcNAAtBPCEQDKsDCwNAAkAgAS0AACIQQSBGDQACQAJAIBBBdmoOBNoBAQHaAQALIBBBLEYN2wELIAEhAQwECyABQQFqIgEgAkcNAAtBPyEQDKoDCyABIQEM2wELQcAAIRAgASIUIAJGDagDIAIgFGsgACgCACIBaiEWIBQgAWtBBmohFwJAA0AgFC0AAEEgciABQYDAgIAAai0AAEcNASABQQZGDY4DIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADKkDCyAAQQA2AgAgFCEBC0E2IRAMjgMLAkAgASIPIAJHDQBBwQAhEAynAwsgAEGMgICAADYCCCAAIA82AgQgDyEBIAAtACxBf2oOBM0B1QHXAdkBhwMLIAFBAWohAQzMAQsCQCABIgEgAkYNAANAAkAgAS0AACIQQSByIBAgEEG/f2pB/wFxQRpJG0H/AXEiEEEJRg0AIBBBIEYNAAJAAkACQAJAIBBBnX9qDhMAAwMDAwMDAwEDAwMDAwMDAwMCAwsgAUEBaiEBQTEhEAyRAwsgAUEBaiEBQTIhEAyQAwsgAUEBaiEBQTMhEAyPAwsgASEBDNABCyABQQFqIgEgAkcNAAtBNSEQDKUDC0E1IRAMpAMLAkAgASIBIAJGDQADQAJAIAEtAABBgLyAgABqLQAAQQFGDQAgASEBDNMBCyABQQFqIgEgAkcNAAtBPSEQDKQDC0E9IRAMowMLIAAgASIBIAIQsICAgAAiEA3WASABIQEMAQsgEEEBaiEBC0E8IRAMhwMLAkAgASIBIAJHDQBBwgAhEAygAwsCQANAAkAgAS0AAEF3ag4YAAL+Av4ChAP+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gIA/gILIAFBAWoiASACRw0AC0HCACEQDKADCyABQQFqIQEgAC0ALUEBcUUNvQEgASEBC0EsIRAMhQMLIAEiASACRw3TAUHEACEQDJ0DCwNAAkAgAS0AAEGQwICAAGotAABBAUYNACABIQEMtwILIAFBAWoiASACRw0AC0HFACEQDJwDCyANLQAAIhBBIEYNswEgEEE6Rw2BAyAAKAIEIQEgAEEANgIEIAAgASANEK+AgIAAIgEN0AEgDUEBaiEBDLMCC0HHACEQIAEiDSACRg2aAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQZDCgIAAai0AAEcNgAMgAUEFRg30AiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyaAwtByAAhECABIg0gAkYNmQMgAiANayAAKAIAIgFqIRYgDSABa0EJaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGWwoCAAGotAABHDf8CAkAgAUEJRw0AQQIhAQz1AgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmQMLAkAgASINIAJHDQBByQAhEAyZAwsCQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZJ/ag4HAIADgAOAA4ADgAMBgAMLIA1BAWohAUE+IRAMgAMLIA1BAWohAUE/IRAM/wILQcoAIRAgASINIAJGDZcDIAIgDWsgACgCACIBaiEWIA0gAWtBAWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBoMKAgABqLQAARw39AiABQQFGDfACIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJcDC0HLACEQIAEiDSACRg2WAyACIA1rIAAoAgAiAWohFiANIAFrQQ5qIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaLCgIAAai0AAEcN/AIgAUEORg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyWAwtBzAAhECABIg0gAkYNlQMgAiANayAAKAIAIgFqIRYgDSABa0EPaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUHAwoCAAGotAABHDfsCAkAgAUEPRw0AQQMhAQzxAgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlQMLQc0AIRAgASINIAJGDZQDIAIgDWsgACgCACIBaiEWIA0gAWtBBWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw36AgJAIAFBBUcNAEEEIQEM8AILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJQDCwJAIAEiDSACRw0AQc4AIRAMlAMLAkACQAJAAkAgDS0AACIBQSByIAEgAUG/f2pB/wFxQRpJG0H/AXFBnX9qDhMA/QL9Av0C/QL9Av0C/QL9Av0C/QL9Av0CAf0C/QL9AgID/QILIA1BAWohAUHBACEQDP0CCyANQQFqIQFBwgAhEAz8AgsgDUEBaiEBQcMAIRAM+wILIA1BAWohAUHEACEQDPoCCwJAIAEiASACRg0AIABBjYCAgAA2AgggACABNgIEIAEhAUHFACEQDPoCC0HPACEQDJIDCyAQIQECQAJAIBAtAABBdmoOBAGoAqgCAKgCCyAQQQFqIQELQSchEAz4AgsCQCABIgEgAkcNAEHRACEQDJEDCwJAIAEtAABBIEYNACABIQEMjQELIAFBAWohASAALQAtQQFxRQ3HASABIQEMjAELIAEiFyACRw3IAUHSACEQDI8DC0HTACEQIAEiFCACRg2OAyACIBRrIAAoAgAiAWohFiAUIAFrQQFqIRcDQCAULQAAIAFB1sKAgABqLQAARw3MASABQQFGDccBIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADI4DCwJAIAEiASACRw0AQdUAIRAMjgMLIAEtAABBCkcNzAEgAUEBaiEBDMcBCwJAIAEiASACRw0AQdYAIRAMjQMLAkACQCABLQAAQXZqDgQAzQHNAQHNAQsgAUEBaiEBDMcBCyABQQFqIQFBygAhEAzzAgsgACABIgEgAhCugICAACIQDcsBIAEhAUHNACEQDPICCyAALQApQSJGDYUDDKYCCwJAIAEiASACRw0AQdsAIRAMigMLQQAhFEEBIRdBASEWQQAhEAJAAkACQAJAAkACQAJAAkACQCABLQAAQVBqDgrUAdMBAAECAwQFBgjVAQtBAiEQDAYLQQMhEAwFC0EEIRAMBAtBBSEQDAMLQQYhEAwCC0EHIRAMAQtBCCEQC0EAIRdBACEWQQAhFAzMAQtBCSEQQQEhFEEAIRdBACEWDMsBCwJAIAEiASACRw0AQd0AIRAMiQMLIAEtAABBLkcNzAEgAUEBaiEBDKYCCyABIgEgAkcNzAFB3wAhEAyHAwsCQCABIgEgAkYNACAAQY6AgIAANgIIIAAgATYCBCABIQFB0AAhEAzuAgtB4AAhEAyGAwtB4QAhECABIgEgAkYNhQMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQeLCgIAAai0AAEcNzQEgFEEDRg3MASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyFAwtB4gAhECABIgEgAkYNhAMgAiABayAAKAIAIhRqIRYgASAUa0ECaiEXA0AgAS0AACAUQebCgIAAai0AAEcNzAEgFEECRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyEAwtB4wAhECABIgEgAkYNgwMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQenCgIAAai0AAEcNywEgFEEDRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyDAwsCQCABIgEgAkcNAEHlACEQDIMDCyAAIAFBAWoiASACEKiAgIAAIhANzQEgASEBQdYAIRAM6QILAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AAkACQAJAIBBBuH9qDgsAAc8BzwHPAc8BzwHPAc8BzwECzwELIAFBAWohAUHSACEQDO0CCyABQQFqIQFB0wAhEAzsAgsgAUEBaiEBQdQAIRAM6wILIAFBAWoiASACRw0AC0HkACEQDIIDC0HkACEQDIEDCwNAAkAgAS0AAEHwwoCAAGotAAAiEEEBRg0AIBBBfmoOA88B0AHRAdIBCyABQQFqIgEgAkcNAAtB5gAhEAyAAwsCQCABIgEgAkYNACABQQFqIQEMAwtB5wAhEAz/AgsDQAJAIAEtAABB8MSAgABqLQAAIhBBAUYNAAJAIBBBfmoOBNIB0wHUAQDVAQsgASEBQdcAIRAM5wILIAFBAWoiASACRw0AC0HoACEQDP4CCwJAIAEiASACRw0AQekAIRAM/gILAkAgAS0AACIQQXZqDhq6AdUB1QG8AdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAcoB1QHVAQDTAQsgAUEBaiEBC0EGIRAM4wILA0ACQCABLQAAQfDGgIAAai0AAEEBRg0AIAEhAQyeAgsgAUEBaiIBIAJHDQALQeoAIRAM+wILAkAgASIBIAJGDQAgAUEBaiEBDAMLQesAIRAM+gILAkAgASIBIAJHDQBB7AAhEAz6AgsgAUEBaiEBDAELAkAgASIBIAJHDQBB7QAhEAz5AgsgAUEBaiEBC0EEIRAM3gILAkAgASIUIAJHDQBB7gAhEAz3AgsgFCEBAkACQAJAIBQtAABB8MiAgABqLQAAQX9qDgfUAdUB1gEAnAIBAtcBCyAUQQFqIQEMCgsgFEEBaiEBDM0BC0EAIRAgAEEANgIcIABBm5KAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAz2AgsCQANAAkAgAS0AAEHwyICAAGotAAAiEEEERg0AAkACQCAQQX9qDgfSAdMB1AHZAQAEAdkBCyABIQFB2gAhEAzgAgsgAUEBaiEBQdwAIRAM3wILIAFBAWoiASACRw0AC0HvACEQDPYCCyABQQFqIQEMywELAkAgASIUIAJHDQBB8AAhEAz1AgsgFC0AAEEvRw3UASAUQQFqIQEMBgsCQCABIhQgAkcNAEHxACEQDPQCCwJAIBQtAAAiAUEvRw0AIBRBAWohAUHdACEQDNsCCyABQXZqIgRBFksN0wFBASAEdEGJgIACcUUN0wEMygILAkAgASIBIAJGDQAgAUEBaiEBQd4AIRAM2gILQfIAIRAM8gILAkAgASIUIAJHDQBB9AAhEAzyAgsgFCEBAkAgFC0AAEHwzICAAGotAABBf2oOA8kClAIA1AELQeEAIRAM2AILAkAgASIUIAJGDQADQAJAIBQtAABB8MqAgABqLQAAIgFBA0YNAAJAIAFBf2oOAssCANUBCyAUIQFB3wAhEAzaAgsgFEEBaiIUIAJHDQALQfMAIRAM8QILQfMAIRAM8AILAkAgASIBIAJGDQAgAEGPgICAADYCCCAAIAE2AgQgASEBQeAAIRAM1wILQfUAIRAM7wILAkAgASIBIAJHDQBB9gAhEAzvAgsgAEGPgICAADYCCCAAIAE2AgQgASEBC0EDIRAM1AILA0AgAS0AAEEgRw3DAiABQQFqIgEgAkcNAAtB9wAhEAzsAgsCQCABIgEgAkcNAEH4ACEQDOwCCyABLQAAQSBHDc4BIAFBAWohAQzvAQsgACABIgEgAhCsgICAACIQDc4BIAEhAQyOAgsCQCABIgQgAkcNAEH6ACEQDOoCCyAELQAAQcwARw3RASAEQQFqIQFBEyEQDM8BCwJAIAEiBCACRw0AQfsAIRAM6QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEANAIAQtAAAgAUHwzoCAAGotAABHDdABIAFBBUYNzgEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBB+wAhEAzoAgsCQCABIgQgAkcNAEH8ACEQDOgCCwJAAkAgBC0AAEG9f2oODADRAdEB0QHRAdEB0QHRAdEB0QHRAQHRAQsgBEEBaiEBQeYAIRAMzwILIARBAWohAUHnACEQDM4CCwJAIAEiBCACRw0AQf0AIRAM5wILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNzwEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf0AIRAM5wILIABBADYCACAQQQFqIQFBECEQDMwBCwJAIAEiBCACRw0AQf4AIRAM5gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQfbOgIAAai0AAEcNzgEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf4AIRAM5gILIABBADYCACAQQQFqIQFBFiEQDMsBCwJAIAEiBCACRw0AQf8AIRAM5QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQfzOgIAAai0AAEcNzQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf8AIRAM5QILIABBADYCACAQQQFqIQFBBSEQDMoBCwJAIAEiBCACRw0AQYABIRAM5AILIAQtAABB2QBHDcsBIARBAWohAUEIIRAMyQELAkAgASIEIAJHDQBBgQEhEAzjAgsCQAJAIAQtAABBsn9qDgMAzAEBzAELIARBAWohAUHrACEQDMoCCyAEQQFqIQFB7AAhEAzJAgsCQCABIgQgAkcNAEGCASEQDOICCwJAAkAgBC0AAEG4f2oOCADLAcsBywHLAcsBywEBywELIARBAWohAUHqACEQDMkCCyAEQQFqIQFB7QAhEAzIAgsCQCABIgQgAkcNAEGDASEQDOECCyACIARrIAAoAgAiAWohECAEIAFrQQJqIRQCQANAIAQtAAAgAUGAz4CAAGotAABHDckBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgEDYCAEGDASEQDOECC0EAIRAgAEEANgIAIBRBAWohAQzGAQsCQCABIgQgAkcNAEGEASEQDOACCyACIARrIAAoAgAiAWohFCAEIAFrQQRqIRACQANAIAQtAAAgAUGDz4CAAGotAABHDcgBIAFBBEYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGEASEQDOACCyAAQQA2AgAgEEEBaiEBQSMhEAzFAQsCQCABIgQgAkcNAEGFASEQDN8CCwJAAkAgBC0AAEG0f2oOCADIAcgByAHIAcgByAEByAELIARBAWohAUHvACEQDMYCCyAEQQFqIQFB8AAhEAzFAgsCQCABIgQgAkcNAEGGASEQDN4CCyAELQAAQcUARw3FASAEQQFqIQEMgwILAkAgASIEIAJHDQBBhwEhEAzdAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBiM+AgABqLQAARw3FASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhwEhEAzdAgsgAEEANgIAIBBBAWohAUEtIRAMwgELAkAgASIEIAJHDQBBiAEhEAzcAgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw3EASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiAEhEAzcAgsgAEEANgIAIBBBAWohAUEpIRAMwQELAkAgASIBIAJHDQBBiQEhEAzbAgtBASEQIAEtAABB3wBHDcABIAFBAWohAQyBAgsCQCABIgQgAkcNAEGKASEQDNoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRADQCAELQAAIAFBjM+AgABqLQAARw3BASABQQFGDa8CIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYoBIRAM2QILAkAgASIEIAJHDQBBiwEhEAzZAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBjs+AgABqLQAARw3BASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiwEhEAzZAgsgAEEANgIAIBBBAWohAUECIRAMvgELAkAgASIEIAJHDQBBjAEhEAzYAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw3AASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjAEhEAzYAgsgAEEANgIAIBBBAWohAUEfIRAMvQELAkAgASIEIAJHDQBBjQEhEAzXAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8s+AgABqLQAARw2/ASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjQEhEAzXAgsgAEEANgIAIBBBAWohAUEJIRAMvAELAkAgASIEIAJHDQBBjgEhEAzWAgsCQAJAIAQtAABBt39qDgcAvwG/Ab8BvwG/AQG/AQsgBEEBaiEBQfgAIRAMvQILIARBAWohAUH5ACEQDLwCCwJAIAEiBCACRw0AQY8BIRAM1QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQZHPgIAAai0AAEcNvQEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY8BIRAM1QILIABBADYCACAQQQFqIQFBGCEQDLoBCwJAIAEiBCACRw0AQZABIRAM1AILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQZfPgIAAai0AAEcNvAEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZABIRAM1AILIABBADYCACAQQQFqIQFBFyEQDLkBCwJAIAEiBCACRw0AQZEBIRAM0wILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQZrPgIAAai0AAEcNuwEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZEBIRAM0wILIABBADYCACAQQQFqIQFBFSEQDLgBCwJAIAEiBCACRw0AQZIBIRAM0gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQaHPgIAAai0AAEcNugEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZIBIRAM0gILIABBADYCACAQQQFqIQFBHiEQDLcBCwJAIAEiBCACRw0AQZMBIRAM0QILIAQtAABBzABHDbgBIARBAWohAUEKIRAMtgELAkAgBCACRw0AQZQBIRAM0AILAkACQCAELQAAQb9/ag4PALkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AbkBAbkBCyAEQQFqIQFB/gAhEAy3AgsgBEEBaiEBQf8AIRAMtgILAkAgBCACRw0AQZUBIRAMzwILAkACQCAELQAAQb9/ag4DALgBAbgBCyAEQQFqIQFB/QAhEAy2AgsgBEEBaiEEQYABIRAMtQILAkAgBCACRw0AQZYBIRAMzgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQafPgIAAai0AAEcNtgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZYBIRAMzgILIABBADYCACAQQQFqIQFBCyEQDLMBCwJAIAQgAkcNAEGXASEQDM0CCwJAAkACQAJAIAQtAABBU2oOIwC4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBAbgBuAG4AbgBuAECuAG4AbgBA7gBCyAEQQFqIQFB+wAhEAy2AgsgBEEBaiEBQfwAIRAMtQILIARBAWohBEGBASEQDLQCCyAEQQFqIQRBggEhEAyzAgsCQCAEIAJHDQBBmAEhEAzMAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBqc+AgABqLQAARw20ASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmAEhEAzMAgsgAEEANgIAIBBBAWohAUEZIRAMsQELAkAgBCACRw0AQZkBIRAMywILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQa7PgIAAai0AAEcNswEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZkBIRAMywILIABBADYCACAQQQFqIQFBBiEQDLABCwJAIAQgAkcNAEGaASEQDMoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG0z4CAAGotAABHDbIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGaASEQDMoCCyAAQQA2AgAgEEEBaiEBQRwhEAyvAQsCQCAEIAJHDQBBmwEhEAzJAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBts+AgABqLQAARw2xASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmwEhEAzJAgsgAEEANgIAIBBBAWohAUEnIRAMrgELAkAgBCACRw0AQZwBIRAMyAILAkACQCAELQAAQax/ag4CAAGxAQsgBEEBaiEEQYYBIRAMrwILIARBAWohBEGHASEQDK4CCwJAIAQgAkcNAEGdASEQDMcCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG4z4CAAGotAABHDa8BIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGdASEQDMcCCyAAQQA2AgAgEEEBaiEBQSYhEAysAQsCQCAEIAJHDQBBngEhEAzGAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBus+AgABqLQAARw2uASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBngEhEAzGAgsgAEEANgIAIBBBAWohAUEDIRAMqwELAkAgBCACRw0AQZ8BIRAMxQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNrQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ8BIRAMxQILIABBADYCACAQQQFqIQFBDCEQDKoBCwJAIAQgAkcNAEGgASEQDMQCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUG8z4CAAGotAABHDawBIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGgASEQDMQCCyAAQQA2AgAgEEEBaiEBQQ0hEAypAQsCQCAEIAJHDQBBoQEhEAzDAgsCQAJAIAQtAABBun9qDgsArAGsAawBrAGsAawBrAGsAawBAawBCyAEQQFqIQRBiwEhEAyqAgsgBEEBaiEEQYwBIRAMqQILAkAgBCACRw0AQaIBIRAMwgILIAQtAABB0ABHDakBIARBAWohBAzpAQsCQCAEIAJHDQBBowEhEAzBAgsCQAJAIAQtAABBt39qDgcBqgGqAaoBqgGqAQCqAQsgBEEBaiEEQY4BIRAMqAILIARBAWohAUEiIRAMpgELAkAgBCACRw0AQaQBIRAMwAILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQcDPgIAAai0AAEcNqAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaQBIRAMwAILIABBADYCACAQQQFqIQFBHSEQDKUBCwJAIAQgAkcNAEGlASEQDL8CCwJAAkAgBC0AAEGuf2oOAwCoAQGoAQsgBEEBaiEEQZABIRAMpgILIARBAWohAUEEIRAMpAELAkAgBCACRw0AQaYBIRAMvgILAkACQAJAAkACQCAELQAAQb9/ag4VAKoBqgGqAaoBqgGqAaoBqgGqAaoBAaoBqgECqgGqAQOqAaoBBKoBCyAEQQFqIQRBiAEhEAyoAgsgBEEBaiEEQYkBIRAMpwILIARBAWohBEGKASEQDKYCCyAEQQFqIQRBjwEhEAylAgsgBEEBaiEEQZEBIRAMpAILAkAgBCACRw0AQacBIRAMvQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNpQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQacBIRAMvQILIABBADYCACAQQQFqIQFBESEQDKIBCwJAIAQgAkcNAEGoASEQDLwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHCz4CAAGotAABHDaQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGoASEQDLwCCyAAQQA2AgAgEEEBaiEBQSwhEAyhAQsCQCAEIAJHDQBBqQEhEAy7AgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBxc+AgABqLQAARw2jASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqQEhEAy7AgsgAEEANgIAIBBBAWohAUErIRAMoAELAkAgBCACRw0AQaoBIRAMugILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQcrPgIAAai0AAEcNogEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaoBIRAMugILIABBADYCACAQQQFqIQFBFCEQDJ8BCwJAIAQgAkcNAEGrASEQDLkCCwJAAkACQAJAIAQtAABBvn9qDg8AAQKkAaQBpAGkAaQBpAGkAaQBpAGkAaQBA6QBCyAEQQFqIQRBkwEhEAyiAgsgBEEBaiEEQZQBIRAMoQILIARBAWohBEGVASEQDKACCyAEQQFqIQRBlgEhEAyfAgsCQCAEIAJHDQBBrAEhEAy4AgsgBC0AAEHFAEcNnwEgBEEBaiEEDOABCwJAIAQgAkcNAEGtASEQDLcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHNz4CAAGotAABHDZ8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGtASEQDLcCCyAAQQA2AgAgEEEBaiEBQQ4hEAycAQsCQCAEIAJHDQBBrgEhEAy2AgsgBC0AAEHQAEcNnQEgBEEBaiEBQSUhEAybAQsCQCAEIAJHDQBBrwEhEAy1AgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw2dASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrwEhEAy1AgsgAEEANgIAIBBBAWohAUEqIRAMmgELAkAgBCACRw0AQbABIRAMtAILAkACQCAELQAAQat/ag4LAJ0BnQGdAZ0BnQGdAZ0BnQGdAQGdAQsgBEEBaiEEQZoBIRAMmwILIARBAWohBEGbASEQDJoCCwJAIAQgAkcNAEGxASEQDLMCCwJAAkAgBC0AAEG/f2oOFACcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAEBnAELIARBAWohBEGZASEQDJoCCyAEQQFqIQRBnAEhEAyZAgsCQCAEIAJHDQBBsgEhEAyyAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFB2c+AgABqLQAARw2aASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBsgEhEAyyAgsgAEEANgIAIBBBAWohAUEhIRAMlwELAkAgBCACRw0AQbMBIRAMsQILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQd3PgIAAai0AAEcNmQEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbMBIRAMsQILIABBADYCACAQQQFqIQFBGiEQDJYBCwJAIAQgAkcNAEG0ASEQDLACCwJAAkACQCAELQAAQbt/ag4RAJoBmgGaAZoBmgGaAZoBmgGaAQGaAZoBmgGaAZoBApoBCyAEQQFqIQRBnQEhEAyYAgsgBEEBaiEEQZ4BIRAMlwILIARBAWohBEGfASEQDJYCCwJAIAQgAkcNAEG1ASEQDK8CCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUHkz4CAAGotAABHDZcBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG1ASEQDK8CCyAAQQA2AgAgEEEBaiEBQSghEAyUAQsCQCAEIAJHDQBBtgEhEAyuAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB6s+AgABqLQAARw2WASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtgEhEAyuAgsgAEEANgIAIBBBAWohAUEHIRAMkwELAkAgBCACRw0AQbcBIRAMrQILAkACQCAELQAAQbt/ag4OAJYBlgGWAZYBlgGWAZYBlgGWAZYBlgGWAQGWAQsgBEEBaiEEQaEBIRAMlAILIARBAWohBEGiASEQDJMCCwJAIAQgAkcNAEG4ASEQDKwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDZQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG4ASEQDKwCCyAAQQA2AgAgEEEBaiEBQRIhEAyRAQsCQCAEIAJHDQBBuQEhEAyrAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw2TASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuQEhEAyrAgsgAEEANgIAIBBBAWohAUEgIRAMkAELAkAgBCACRw0AQboBIRAMqgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNkgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQboBIRAMqgILIABBADYCACAQQQFqIQFBDyEQDI8BCwJAIAQgAkcNAEG7ASEQDKkCCwJAAkAgBC0AAEG3f2oOBwCSAZIBkgGSAZIBAZIBCyAEQQFqIQRBpQEhEAyQAgsgBEEBaiEEQaYBIRAMjwILAkAgBCACRw0AQbwBIRAMqAILIAIgBGsgACgCACIBaiEUIAQgAWtBB2ohEAJAA0AgBC0AACABQfTPgIAAai0AAEcNkAEgAUEHRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbwBIRAMqAILIABBADYCACAQQQFqIQFBGyEQDI0BCwJAIAQgAkcNAEG9ASEQDKcCCwJAAkACQCAELQAAQb5/ag4SAJEBkQGRAZEBkQGRAZEBkQGRAQGRAZEBkQGRAZEBkQECkQELIARBAWohBEGkASEQDI8CCyAEQQFqIQRBpwEhEAyOAgsgBEEBaiEEQagBIRAMjQILAkAgBCACRw0AQb4BIRAMpgILIAQtAABBzgBHDY0BIARBAWohBAzPAQsCQCAEIAJHDQBBvwEhEAylAgsCQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAELQAAQb9/ag4VAAECA5wBBAUGnAGcAZwBBwgJCgucAQwNDg+cAQsgBEEBaiEBQegAIRAMmgILIARBAWohAUHpACEQDJkCCyAEQQFqIQFB7gAhEAyYAgsgBEEBaiEBQfIAIRAMlwILIARBAWohAUHzACEQDJYCCyAEQQFqIQFB9gAhEAyVAgsgBEEBaiEBQfcAIRAMlAILIARBAWohAUH6ACEQDJMCCyAEQQFqIQRBgwEhEAySAgsgBEEBaiEEQYQBIRAMkQILIARBAWohBEGFASEQDJACCyAEQQFqIQRBkgEhEAyPAgsgBEEBaiEEQZgBIRAMjgILIARBAWohBEGgASEQDI0CCyAEQQFqIQRBowEhEAyMAgsgBEEBaiEEQaoBIRAMiwILAkAgBCACRg0AIABBkICAgAA2AgggACAENgIEQasBIRAMiwILQcABIRAMowILIAAgBSACEKqAgIAAIgENiwEgBSEBDFwLAkAgBiACRg0AIAZBAWohBQyNAQtBwgEhEAyhAgsDQAJAIBAtAABBdmoOBIwBAACPAQALIBBBAWoiECACRw0AC0HDASEQDKACCwJAIAcgAkYNACAAQZGAgIAANgIIIAAgBzYCBCAHIQFBASEQDIcCC0HEASEQDJ8CCwJAIAcgAkcNAEHFASEQDJ8CCwJAAkAgBy0AAEF2ag4EAc4BzgEAzgELIAdBAWohBgyNAQsgB0EBaiEFDIkBCwJAIAcgAkcNAEHGASEQDJ4CCwJAAkAgBy0AAEF2ag4XAY8BjwEBjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAI8BCyAHQQFqIQcLQbABIRAMhAILAkAgCCACRw0AQcgBIRAMnQILIAgtAABBIEcNjQEgAEEAOwEyIAhBAWohAUGzASEQDIMCCyABIRcCQANAIBciByACRg0BIActAABBUGpB/wFxIhBBCk8NzAECQCAALwEyIhRBmTNLDQAgACAUQQpsIhQ7ATIgEEH//wNzIBRB/v8DcUkNACAHQQFqIRcgACAUIBBqIhA7ATIgEEH//wNxQegHSQ0BCwtBACEQIABBADYCHCAAQcGJgIAANgIQIABBDTYCDCAAIAdBAWo2AhQMnAILQccBIRAMmwILIAAgCCACEK6AgIAAIhBFDcoBIBBBFUcNjAEgAEHIATYCHCAAIAg2AhQgAEHJl4CAADYCECAAQRU2AgxBACEQDJoCCwJAIAkgAkcNAEHMASEQDJoCC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgCS0AAEFQag4KlgGVAQABAgMEBQYIlwELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMjgELQQkhEEEBIRRBACEXQQAhFgyNAQsCQCAKIAJHDQBBzgEhEAyZAgsgCi0AAEEuRw2OASAKQQFqIQkMygELIAsgAkcNjgFB0AEhEAyXAgsCQCALIAJGDQAgAEGOgICAADYCCCAAIAs2AgRBtwEhEAz+AQtB0QEhEAyWAgsCQCAEIAJHDQBB0gEhEAyWAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EEaiELA0AgBC0AACAQQfzPgIAAai0AAEcNjgEgEEEERg3pASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHSASEQDJUCCyAAIAwgAhCsgICAACIBDY0BIAwhAQy4AQsCQCAEIAJHDQBB1AEhEAyUAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EBaiEMA0AgBC0AACAQQYHQgIAAai0AAEcNjwEgEEEBRg2OASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHUASEQDJMCCwJAIAQgAkcNAEHWASEQDJMCCyACIARrIAAoAgAiEGohFCAEIBBrQQJqIQsDQCAELQAAIBBBg9CAgABqLQAARw2OASAQQQJGDZABIBBBAWohECAEQQFqIgQgAkcNAAsgACAUNgIAQdYBIRAMkgILAkAgBCACRw0AQdcBIRAMkgILAkACQCAELQAAQbt/ag4QAI8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwEBjwELIARBAWohBEG7ASEQDPkBCyAEQQFqIQRBvAEhEAz4AQsCQCAEIAJHDQBB2AEhEAyRAgsgBC0AAEHIAEcNjAEgBEEBaiEEDMQBCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEG+ASEQDPcBC0HZASEQDI8CCwJAIAQgAkcNAEHaASEQDI8CCyAELQAAQcgARg3DASAAQQE6ACgMuQELIABBAjoALyAAIAQgAhCmgICAACIQDY0BQcIBIRAM9AELIAAtAChBf2oOArcBuQG4AQsDQAJAIAQtAABBdmoOBACOAY4BAI4BCyAEQQFqIgQgAkcNAAtB3QEhEAyLAgsgAEEAOgAvIAAtAC1BBHFFDYQCCyAAQQA6AC8gAEEBOgA0IAEhAQyMAQsgEEEVRg3aASAAQQA2AhwgACABNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAyIAgsCQCAAIBAgAhC0gICAACIEDQAgECEBDIECCwJAIARBFUcNACAAQQM2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAyIAgsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMhwILIBBBFUYN1gEgAEEANgIcIAAgATYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMhgILIAAoAgQhFyAAQQA2AgQgECARp2oiFiEBIAAgFyAQIBYgFBsiEBC1gICAACIURQ2NASAAQQc2AhwgACAQNgIUIAAgFDYCDEEAIRAMhQILIAAgAC8BMEGAAXI7ATAgASEBC0EqIRAM6gELIBBBFUYN0QEgAEEANgIcIAAgATYCFCAAQYOMgIAANgIQIABBEzYCDEEAIRAMggILIBBBFUYNzwEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAMgQILIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDI0BCyAAQQw2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMgAILIBBBFUYNzAEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM/wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIwBCyAAQQ02AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/gELIBBBFUYNyQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM/QELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIsBCyAAQQ42AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/AELIABBADYCHCAAIAE2AhQgAEHAlYCAADYCECAAQQI2AgxBACEQDPsBCyAQQRVGDcUBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPoBCyAAQRA2AhwgACABNgIUIAAgEDYCDEEAIRAM+QELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDPEBCyAAQRE2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM+AELIBBBFUYNwQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM9wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIgBCyAAQRM2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM9gELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDO0BCyAAQRQ2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM9QELIBBBFUYNvQEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM9AELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIYBCyAAQRY2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM8wELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC3gICAACIEDQAgAUEBaiEBDOkBCyAAQRc2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM8gELIABBADYCHCAAIAE2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDPEBC0IBIRELIBBBAWohAQJAIAApAyAiEkL//////////w9WDQAgACASQgSGIBGENwMgIAEhAQyEAQsgAEEANgIcIAAgATYCFCAAQa2JgIAANgIQIABBDDYCDEEAIRAM7wELIABBADYCHCAAIBA2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDO4BCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNcyAAQQU2AhwgACAQNgIUIAAgFDYCDEEAIRAM7QELIABBADYCHCAAIBA2AhQgAEGqnICAADYCECAAQQ82AgxBACEQDOwBCyAAIBAgAhC0gICAACIBDQEgECEBC0EOIRAM0QELAkAgAUEVRw0AIABBAjYCHCAAIBA2AhQgAEGwmICAADYCECAAQRU2AgxBACEQDOoBCyAAQQA2AhwgACAQNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAzpAQsgAUEBaiEQAkAgAC8BMCIBQYABcUUNAAJAIAAgECACELuAgIAAIgENACAQIQEMcAsgAUEVRw26ASAAQQU2AhwgACAQNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAzpAQsCQCABQaAEcUGgBEcNACAALQAtQQJxDQAgAEEANgIcIAAgEDYCFCAAQZaTgIAANgIQIABBBDYCDEEAIRAM6QELIAAgECACEL2AgIAAGiAQIQECQAJAAkACQAJAIAAgECACELOAgIAADhYCAQAEBAQEBAQEBAQEBAQEBAQEBAQDBAsgAEEBOgAuCyAAIAAvATBBwAByOwEwIBAhAQtBJiEQDNEBCyAAQSM2AhwgACAQNgIUIABBpZaAgAA2AhAgAEEVNgIMQQAhEAzpAQsgAEEANgIcIAAgEDYCFCAAQdWLgIAANgIQIABBETYCDEEAIRAM6AELIAAtAC1BAXFFDQFBwwEhEAzOAQsCQCANIAJGDQADQAJAIA0tAABBIEYNACANIQEMxAELIA1BAWoiDSACRw0AC0ElIRAM5wELQSUhEAzmAQsgACgCBCEEIABBADYCBCAAIAQgDRCvgICAACIERQ2tASAAQSY2AhwgACAENgIMIAAgDUEBajYCFEEAIRAM5QELIBBBFUYNqwEgAEEANgIcIAAgATYCFCAAQf2NgIAANgIQIABBHTYCDEEAIRAM5AELIABBJzYCHCAAIAE2AhQgACAQNgIMQQAhEAzjAQsgECEBQQEhFAJAAkACQAJAAkACQAJAIAAtACxBfmoOBwYFBQMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0ErIRAMygELIABBADYCHCAAIBA2AhQgAEGrkoCAADYCECAAQQs2AgxBACEQDOIBCyAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMQQAhEAzhAQsgAEEAOgAsIBAhAQy9AQsgECEBQQEhFAJAAkACQAJAAkAgAC0ALEF7ag4EAwECAAULIAAgAC8BMEEIcjsBMAwDC0ECIRQMAQtBBCEUCyAAQQE6ACwgACAALwEwIBRyOwEwCyAQIQELQSkhEAzFAQsgAEEANgIcIAAgATYCFCAAQfCUgIAANgIQIABBAzYCDEEAIRAM3QELAkAgDi0AAEENRw0AIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHULIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzdAQsgAC0ALUEBcUUNAUHEASEQDMMBCwJAIA4gAkcNAEEtIRAM3AELAkACQANAAkAgDi0AAEF2ag4EAgAAAwALIA5BAWoiDiACRw0AC0EtIRAM3QELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDiEBDHQLIABBLDYCHCAAIA42AhQgACABNgIMQQAhEAzcAQsgACgCBCEBIABBADYCBAJAIAAgASAOELGAgIAAIgENACAOQQFqIQEMcwsgAEEsNgIcIAAgATYCDCAAIA5BAWo2AhRBACEQDNsBCyAAKAIEIQQgAEEANgIEIAAgBCAOELGAgIAAIgQNoAEgDiEBDM4BCyAQQSxHDQEgAUEBaiEQQQEhAQJAAkACQAJAAkAgAC0ALEF7ag4EAwECBAALIBAhAQwEC0ECIQEMAQtBBCEBCyAAQQE6ACwgACAALwEwIAFyOwEwIBAhAQwBCyAAIAAvATBBCHI7ATAgECEBC0E5IRAMvwELIABBADoALCABIQELQTQhEAy9AQsgACAALwEwQSByOwEwIAEhAQwCCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBA0AIAEhAQzHAQsgAEE3NgIcIAAgATYCFCAAIAQ2AgxBACEQDNQBCyAAQQg6ACwgASEBC0EwIRAMuQELAkAgAC0AKEEBRg0AIAEhAQwECyAALQAtQQhxRQ2TASABIQEMAwsgAC0AMEEgcQ2UAUHFASEQDLcBCwJAIA8gAkYNAAJAA0ACQCAPLQAAQVBqIgFB/wFxQQpJDQAgDyEBQTUhEAy6AQsgACkDICIRQpmz5syZs+bMGVYNASAAIBFCCn4iETcDICARIAGtQv8BgyISQn+FVg0BIAAgESASfDcDICAPQQFqIg8gAkcNAAtBOSEQDNEBCyAAKAIEIQIgAEEANgIEIAAgAiAPQQFqIgQQsYCAgAAiAg2VASAEIQEMwwELQTkhEAzPAQsCQCAALwEwIgFBCHFFDQAgAC0AKEEBRw0AIAAtAC1BCHFFDZABCyAAIAFB9/sDcUGABHI7ATAgDyEBC0E3IRAMtAELIAAgAC8BMEEQcjsBMAyrAQsgEEEVRg2LASAAQQA2AhwgACABNgIUIABB8I6AgAA2AhAgAEEcNgIMQQAhEAzLAQsgAEHDADYCHCAAIAE2AgwgACANQQFqNgIUQQAhEAzKAQsCQCABLQAAQTpHDQAgACgCBCEQIABBADYCBAJAIAAgECABEK+AgIAAIhANACABQQFqIQEMYwsgAEHDADYCHCAAIBA2AgwgACABQQFqNgIUQQAhEAzKAQsgAEEANgIcIAAgATYCFCAAQbGRgIAANgIQIABBCjYCDEEAIRAMyQELIABBADYCHCAAIAE2AhQgAEGgmYCAADYCECAAQR42AgxBACEQDMgBCyAAQQA2AgALIABBgBI7ASogACAXQQFqIgEgAhCogICAACIQDQEgASEBC0HHACEQDKwBCyAQQRVHDYMBIABB0QA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAzEAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAzDAQsgAEEANgIcIAAgFDYCFCAAQcGogIAANgIQIABBBzYCDCAAQQA2AgBBACEQDMIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxdCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDMEBC0EAIRAgAEEANgIcIAAgATYCFCAAQYCRgIAANgIQIABBCTYCDAzAAQsgEEEVRg19IABBADYCHCAAIAE2AhQgAEGUjYCAADYCECAAQSE2AgxBACEQDL8BC0EBIRZBACEXQQAhFEEBIRALIAAgEDoAKyABQQFqIQECQAJAIAAtAC1BEHENAAJAAkACQCAALQAqDgMBAAIECyAWRQ0DDAILIBQNAQwCCyAXRQ0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQrYCAgAAiEA0AIAEhAQxcCyAAQdgANgIcIAAgATYCFCAAIBA2AgxBACEQDL4BCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQytAQsgAEHZADYCHCAAIAE2AhQgACAENgIMQQAhEAy9AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMqwELIABB2gA2AhwgACABNgIUIAAgBDYCDEEAIRAMvAELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKkBCyAAQdwANgIcIAAgATYCFCAAIAQ2AgxBACEQDLsBCwJAIAEtAABBUGoiEEH/AXFBCk8NACAAIBA6ACogAUEBaiEBQc8AIRAMogELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKcBCyAAQd4ANgIcIAAgATYCFCAAIAQ2AgxBACEQDLoBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKUEjTw0AIAEhAQxZCyAAQQA2AhwgACABNgIUIABB04mAgAA2AhAgAEEINgIMQQAhEAy5AQsgAEEANgIAC0EAIRAgAEEANgIcIAAgATYCFCAAQZCzgIAANgIQIABBCDYCDAy3AQsgAEEANgIAIBdBAWohAQJAIAAtAClBIUcNACABIQEMVgsgAEEANgIcIAAgATYCFCAAQZuKgIAANgIQIABBCDYCDEEAIRAMtgELIABBADYCACAXQQFqIQECQCAALQApIhBBXWpBC08NACABIQEMVQsCQCAQQQZLDQBBASAQdEHKAHFFDQAgASEBDFULQQAhECAAQQA2AhwgACABNgIUIABB94mAgAA2AhAgAEEINgIMDLUBCyAQQRVGDXEgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMtAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFQLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMswELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMsgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMsQELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFELIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMsAELIABBADYCHCAAIAE2AhQgAEHGioCAADYCECAAQQc2AgxBACEQDK8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDK4BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDK0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDKwBCyAAQQA2AhwgACABNgIUIABB3IiAgAA2AhAgAEEHNgIMQQAhEAyrAQsgEEE/Rw0BIAFBAWohAQtBBSEQDJABC0EAIRAgAEEANgIcIAAgATYCFCAAQf2SgIAANgIQIABBBzYCDAyoAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAynAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAymAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMRgsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAylAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHSADYCHCAAIBQ2AhQgACABNgIMQQAhEAykAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHTADYCHCAAIBQ2AhQgACABNgIMQQAhEAyjAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMQwsgAEHlADYCHCAAIBQ2AhQgACABNgIMQQAhEAyiAQsgAEEANgIcIAAgFDYCFCAAQcOPgIAANgIQIABBBzYCDEEAIRAMoQELIABBADYCHCAAIAE2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKABC0EAIRAgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDAyfAQsgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDEEAIRAMngELIABBADYCHCAAIBQ2AhQgAEH+kYCAADYCECAAQQc2AgxBACEQDJ0BCyAAQQA2AhwgACABNgIUIABBjpuAgAA2AhAgAEEGNgIMQQAhEAycAQsgEEEVRg1XIABBADYCHCAAIAE2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDJsBCyAAQQA2AgAgEEEBaiEBQSQhEAsgACAQOgApIAAoAgQhECAAQQA2AgQgACAQIAEQq4CAgAAiEA1UIAEhAQw+CyAAQQA2AgALQQAhECAAQQA2AhwgACAENgIUIABB8ZuAgAA2AhAgAEEGNgIMDJcBCyABQRVGDVAgAEEANgIcIAAgBTYCFCAAQfCMgIAANgIQIABBGzYCDEEAIRAMlgELIAAoAgQhBSAAQQA2AgQgACAFIBAQqYCAgAAiBQ0BIBBBAWohBQtBrQEhEAx7CyAAQcEBNgIcIAAgBTYCDCAAIBBBAWo2AhRBACEQDJMBCyAAKAIEIQYgAEEANgIEIAAgBiAQEKmAgIAAIgYNASAQQQFqIQYLQa4BIRAMeAsgAEHCATYCHCAAIAY2AgwgACAQQQFqNgIUQQAhEAyQAQsgAEEANgIcIAAgBzYCFCAAQZeLgIAANgIQIABBDTYCDEEAIRAMjwELIABBADYCHCAAIAg2AhQgAEHjkICAADYCECAAQQk2AgxBACEQDI4BCyAAQQA2AhwgACAINgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAyNAQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgCUEBaiEIAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBCAAIBAgCBCtgICAACIQRQ09IABByQE2AhwgACAINgIUIAAgEDYCDEEAIRAMjAELIAAoAgQhBCAAQQA2AgQgACAEIAgQrYCAgAAiBEUNdiAAQcoBNgIcIAAgCDYCFCAAIAQ2AgxBACEQDIsBCyAAKAIEIQQgAEEANgIEIAAgBCAJEK2AgIAAIgRFDXQgAEHLATYCHCAAIAk2AhQgACAENgIMQQAhEAyKAQsgACgCBCEEIABBADYCBCAAIAQgChCtgICAACIERQ1yIABBzQE2AhwgACAKNgIUIAAgBDYCDEEAIRAMiQELAkAgCy0AAEFQaiIQQf8BcUEKTw0AIAAgEDoAKiALQQFqIQpBtgEhEAxwCyAAKAIEIQQgAEEANgIEIAAgBCALEK2AgIAAIgRFDXAgAEHPATYCHCAAIAs2AhQgACAENgIMQQAhEAyIAQsgAEEANgIcIAAgBDYCFCAAQZCzgIAANgIQIABBCDYCDCAAQQA2AgBBACEQDIcBCyABQRVGDT8gAEEANgIcIAAgDDYCFCAAQcyOgIAANgIQIABBIDYCDEEAIRAMhgELIABBgQQ7ASggACgCBCEQIABCADcDACAAIBAgDEEBaiIMEKuAgIAAIhBFDTggAEHTATYCHCAAIAw2AhQgACAQNgIMQQAhEAyFAQsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQdibgIAANgIQIABBCDYCDAyDAQsgACgCBCEQIABCADcDACAAIBAgC0EBaiILEKuAgIAAIhANAUHGASEQDGkLIABBAjoAKAxVCyAAQdUBNgIcIAAgCzYCFCAAIBA2AgxBACEQDIABCyAQQRVGDTcgAEEANgIcIAAgBDYCFCAAQaSMgIAANgIQIABBEDYCDEEAIRAMfwsgAC0ANEEBRw00IAAgBCACELyAgIAAIhBFDTQgEEEVRw01IABB3AE2AhwgACAENgIUIABB1ZaAgAA2AhAgAEEVNgIMQQAhEAx+C0EAIRAgAEEANgIcIABBr4uAgAA2AhAgAEECNgIMIAAgFEEBajYCFAx9C0EAIRAMYwtBAiEQDGILQQ0hEAxhC0EPIRAMYAtBJSEQDF8LQRMhEAxeC0EVIRAMXQtBFiEQDFwLQRchEAxbC0EYIRAMWgtBGSEQDFkLQRohEAxYC0EbIRAMVwtBHCEQDFYLQR0hEAxVC0EfIRAMVAtBISEQDFMLQSMhEAxSC0HGACEQDFELQS4hEAxQC0EvIRAMTwtBOyEQDE4LQT0hEAxNC0HIACEQDEwLQckAIRAMSwtBywAhEAxKC0HMACEQDEkLQc4AIRAMSAtB0QAhEAxHC0HVACEQDEYLQdgAIRAMRQtB2QAhEAxEC0HbACEQDEMLQeQAIRAMQgtB5QAhEAxBC0HxACEQDEALQfQAIRAMPwtBjQEhEAw+C0GXASEQDD0LQakBIRAMPAtBrAEhEAw7C0HAASEQDDoLQbkBIRAMOQtBrwEhEAw4C0GxASEQDDcLQbIBIRAMNgtBtAEhEAw1C0G1ASEQDDQLQboBIRAMMwtBvQEhEAwyC0G/ASEQDDELQcEBIRAMMAsgAEEANgIcIAAgBDYCFCAAQemLgIAANgIQIABBHzYCDEEAIRAMSAsgAEHbATYCHCAAIAQ2AhQgAEH6loCAADYCECAAQRU2AgxBACEQDEcLIABB+AA2AhwgACAMNgIUIABBypiAgAA2AhAgAEEVNgIMQQAhEAxGCyAAQdEANgIcIAAgBTYCFCAAQbCXgIAANgIQIABBFTYCDEEAIRAMRQsgAEH5ADYCHCAAIAE2AhQgACAQNgIMQQAhEAxECyAAQfgANgIcIAAgATYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMQwsgAEHkADYCHCAAIAE2AhQgAEHjl4CAADYCECAAQRU2AgxBACEQDEILIABB1wA2AhwgACABNgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAxBCyAAQQA2AhwgACABNgIUIABBuY2AgAA2AhAgAEEaNgIMQQAhEAxACyAAQcIANgIcIAAgATYCFCAAQeOYgIAANgIQIABBFTYCDEEAIRAMPwsgAEEANgIEIAAgDyAPELGAgIAAIgRFDQEgAEE6NgIcIAAgBDYCDCAAIA9BAWo2AhRBACEQDD4LIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCxgICAACIERQ0AIABBOzYCHCAAIAQ2AgwgACABQQFqNgIUQQAhEAw+CyABQQFqIQEMLQsgD0EBaiEBDC0LIABBADYCHCAAIA82AhQgAEHkkoCAADYCECAAQQQ2AgxBACEQDDsLIABBNjYCHCAAIAQ2AhQgACACNgIMQQAhEAw6CyAAQS42AhwgACAONgIUIAAgBDYCDEEAIRAMOQsgAEHQADYCHCAAIAE2AhQgAEGRmICAADYCECAAQRU2AgxBACEQDDgLIA1BAWohAQwsCyAAQRU2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAw2CyAAQRs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw1CyAAQQ82AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw0CyAAQQs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAwzCyAAQRo2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwyCyAAQQs2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwxCyAAQQo2AhwgACABNgIUIABB5JaAgAA2AhAgAEEVNgIMQQAhEAwwCyAAQR42AhwgACABNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAwvCyAAQQA2AhwgACAQNgIUIABB2o2AgAA2AhAgAEEUNgIMQQAhEAwuCyAAQQQ2AhwgACABNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAwtCyAAQQA2AgAgC0EBaiELC0G4ASEQDBILIABBADYCACAQQQFqIQFB9QAhEAwRCyABIQECQCAALQApQQVHDQBB4wAhEAwRC0HiACEQDBALQQAhECAAQQA2AhwgAEHkkYCAADYCECAAQQc2AgwgACAUQQFqNgIUDCgLIABBADYCACAXQQFqIQFBwAAhEAwOC0EBIQELIAAgAToALCAAQQA2AgAgF0EBaiEBC0EoIRAMCwsgASEBC0E4IRAMCQsCQCABIg8gAkYNAANAAkAgDy0AAEGAvoCAAGotAAAiAUEBRg0AIAFBAkcNAyAPQQFqIQEMBAsgD0EBaiIPIAJHDQALQT4hEAwiC0E+IRAMIQsgAEEAOgAsIA8hAQwBC0ELIRAMBgtBOiEQDAULIAFBAWohAUEtIRAMBAsgACABOgAsIABBADYCACAWQQFqIQFBDCEQDAMLIABBADYCACAXQQFqIQFBCiEQDAILIABBADYCAAsgAEEAOgAsIA0hAUEJIRAMAAsLQQAhECAAQQA2AhwgACALNgIUIABBzZCAgAA2AhAgAEEJNgIMDBcLQQAhECAAQQA2AhwgACAKNgIUIABB6YqAgAA2AhAgAEEJNgIMDBYLQQAhECAAQQA2AhwgACAJNgIUIABBt5CAgAA2AhAgAEEJNgIMDBULQQAhECAAQQA2AhwgACAINgIUIABBnJGAgAA2AhAgAEEJNgIMDBQLQQAhECAAQQA2AhwgACABNgIUIABBzZCAgAA2AhAgAEEJNgIMDBMLQQAhECAAQQA2AhwgACABNgIUIABB6YqAgAA2AhAgAEEJNgIMDBILQQAhECAAQQA2AhwgACABNgIUIABBt5CAgAA2AhAgAEEJNgIMDBELQQAhECAAQQA2AhwgACABNgIUIABBnJGAgAA2AhAgAEEJNgIMDBALQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA8LQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA4LQQAhECAAQQA2AhwgACABNgIUIABBwJKAgAA2AhAgAEELNgIMDA0LQQAhECAAQQA2AhwgACABNgIUIABBlYmAgAA2AhAgAEELNgIMDAwLQQAhECAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMDAsLQQAhECAAQQA2AhwgACABNgIUIABB+4+AgAA2AhAgAEEKNgIMDAoLQQAhECAAQQA2AhwgACABNgIUIABB8ZmAgAA2AhAgAEECNgIMDAkLQQAhECAAQQA2AhwgACABNgIUIABBxJSAgAA2AhAgAEECNgIMDAgLQQAhECAAQQA2AhwgACABNgIUIABB8pWAgAA2AhAgAEECNgIMDAcLIABBAjYCHCAAIAE2AhQgAEGcmoCAADYCECAAQRY2AgxBACEQDAYLQQEhEAwFC0HUACEQIAEiBCACRg0EIANBCGogACAEIAJB2MKAgABBChDFgICAACADKAIMIQQgAygCCA4DAQQCAAsQyoCAgAAACyAAQQA2AhwgAEG1moCAADYCECAAQRc2AgwgACAEQQFqNgIUQQAhEAwCCyAAQQA2AhwgACAENgIUIABBypqAgAA2AhAgAEEJNgIMQQAhEAwBCwJAIAEiBCACRw0AQSIhEAwBCyAAQYmAgIAANgIIIAAgBDYCBEEhIRALIANBEGokgICAgAAgEAuvAQECfyABKAIAIQYCQAJAIAIgA0YNACAEIAZqIQQgBiADaiACayEHIAIgBkF/cyAFaiIGaiEFA0ACQCACLQAAIAQtAABGDQBBAiEEDAMLAkAgBg0AQQAhBCAFIQIMAwsgBkF/aiEGIARBAWohBCACQQFqIgIgA0cNAAsgByEGIAMhAgsgAEEBNgIAIAEgBjYCACAAIAI2AgQPCyABQQA2AgAgACAENgIAIAAgAjYCBAsKACAAEMeAgIAAC/I2AQt/I4CAgIAAQRBrIgEkgICAgAACQEEAKAKg0ICAAA0AQQAQy4CAgABBgNSEgABrIgJB2QBJDQBBACEDAkBBACgC4NOAgAAiBA0AQQBCfzcC7NOAgABBAEKAgISAgIDAADcC5NOAgABBACABQQhqQXBxQdiq1aoFcyIENgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgAALQQAgAjYCzNOAgABBAEGA1ISAADYCyNOAgABBAEGA1ISAADYCmNCAgABBACAENgKs0ICAAEEAQX82AqjQgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAtBgNSEgABBeEGA1ISAAGtBD3FBAEGA1ISAAEEIakEPcRsiA2oiBEEEaiACQUhqIgUgA2siA0EBcjYCAEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgABBgNSEgAAgBWpBODYCBAsCQAJAAkACQAJAAkACQAJAAkACQAJAAkAgAEHsAUsNAAJAQQAoAojQgIAAIgZBECAAQRNqQXBxIABBC0kbIgJBA3YiBHYiA0EDcUUNAAJAAkAgA0EBcSAEckEBcyIFQQN0IgRBsNCAgABqIgMgBEG40ICAAGooAgAiBCgCCCICRw0AQQAgBkF+IAV3cTYCiNCAgAAMAQsgAyACNgIIIAIgAzYCDAsgBEEIaiEDIAQgBUEDdCIFQQNyNgIEIAQgBWoiBCAEKAIEQQFyNgIEDAwLIAJBACgCkNCAgAAiB00NAQJAIANFDQACQAJAIAMgBHRBAiAEdCIDQQAgA2tycSIDQQAgA2txQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmoiBEEDdCIDQbDQgIAAaiIFIANBuNCAgABqKAIAIgMoAggiAEcNAEEAIAZBfiAEd3EiBjYCiNCAgAAMAQsgBSAANgIIIAAgBTYCDAsgAyACQQNyNgIEIAMgBEEDdCIEaiAEIAJrIgU2AgAgAyACaiIAIAVBAXI2AgQCQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhBAJAAkAgBkEBIAdBA3Z0IghxDQBBACAGIAhyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAQ2AgwgAiAENgIIIAQgAjYCDCAEIAg2AggLIANBCGohA0EAIAA2ApzQgIAAQQAgBTYCkNCAgAAMDAtBACgCjNCAgAAiCUUNASAJQQAgCWtxQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmpBAnRBuNKAgABqKAIAIgAoAgRBeHEgAmshBCAAIQUCQANAAkAgBSgCECIDDQAgBUEUaigCACIDRQ0CCyADKAIEQXhxIAJrIgUgBCAFIARJIgUbIQQgAyAAIAUbIQAgAyEFDAALCyAAKAIYIQoCQCAAKAIMIgggAEYNACAAKAIIIgNBACgCmNCAgABJGiAIIAM2AgggAyAINgIMDAsLAkAgAEEUaiIFKAIAIgMNACAAKAIQIgNFDQMgAEEQaiEFCwNAIAUhCyADIghBFGoiBSgCACIDDQAgCEEQaiEFIAgoAhAiAw0ACyALQQA2AgAMCgtBfyECIABBv39LDQAgAEETaiIDQXBxIQJBACgCjNCAgAAiB0UNAEEAIQsCQCACQYACSQ0AQR8hCyACQf///wdLDQAgA0EIdiIDIANBgP4/akEQdkEIcSIDdCIEIARBgOAfakEQdkEEcSIEdCIFIAVBgIAPakEQdkECcSIFdEEPdiADIARyIAVyayIDQQF0IAIgA0EVanZBAXFyQRxqIQsLQQAgAmshBAJAAkACQAJAIAtBAnRBuNKAgABqKAIAIgUNAEEAIQNBACEIDAELQQAhAyACQQBBGSALQQF2ayALQR9GG3QhAEEAIQgDQAJAIAUoAgRBeHEgAmsiBiAETw0AIAYhBCAFIQggBg0AQQAhBCAFIQggBSEDDAMLIAMgBUEUaigCACIGIAYgBSAAQR12QQRxakEQaigCACIFRhsgAyAGGyEDIABBAXQhACAFDQALCwJAIAMgCHINAEEAIQhBAiALdCIDQQAgA2tyIAdxIgNFDQMgA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBUEFdkEIcSIAIANyIAUgAHYiA0ECdkEEcSIFciADIAV2IgNBAXZBAnEiBXIgAyAFdiIDQQF2QQFxIgVyIAMgBXZqQQJ0QbjSgIAAaigCACEDCyADRQ0BCwNAIAMoAgRBeHEgAmsiBiAESSEAAkAgAygCECIFDQAgA0EUaigCACEFCyAGIAQgABshBCADIAggABshCCAFIQMgBQ0ACwsgCEUNACAEQQAoApDQgIAAIAJrTw0AIAgoAhghCwJAIAgoAgwiACAIRg0AIAgoAggiA0EAKAKY0ICAAEkaIAAgAzYCCCADIAA2AgwMCQsCQCAIQRRqIgUoAgAiAw0AIAgoAhAiA0UNAyAIQRBqIQULA0AgBSEGIAMiAEEUaiIFKAIAIgMNACAAQRBqIQUgACgCECIDDQALIAZBADYCAAwICwJAQQAoApDQgIAAIgMgAkkNAEEAKAKc0ICAACEEAkACQCADIAJrIgVBEEkNACAEIAJqIgAgBUEBcjYCBEEAIAU2ApDQgIAAQQAgADYCnNCAgAAgBCADaiAFNgIAIAQgAkEDcjYCBAwBCyAEIANBA3I2AgQgBCADaiIDIAMoAgRBAXI2AgRBAEEANgKc0ICAAEEAQQA2ApDQgIAACyAEQQhqIQMMCgsCQEEAKAKU0ICAACIAIAJNDQBBACgCoNCAgAAiAyACaiIEIAAgAmsiBUEBcjYCBEEAIAU2ApTQgIAAQQAgBDYCoNCAgAAgAyACQQNyNgIEIANBCGohAwwKCwJAAkBBACgC4NOAgABFDQBBACgC6NOAgAAhBAwBC0EAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEMakFwcUHYqtWqBXM2AuDTgIAAQQBBADYC9NOAgABBAEEANgLE04CAAEGAgAQhBAtBACEDAkAgBCACQccAaiIHaiIGQQAgBGsiC3EiCCACSw0AQQBBMDYC+NOAgAAMCgsCQEEAKALA04CAACIDRQ0AAkBBACgCuNOAgAAiBCAIaiIFIARNDQAgBSADTQ0BC0EAIQNBAEEwNgL404CAAAwKC0EALQDE04CAAEEEcQ0EAkACQAJAQQAoAqDQgIAAIgRFDQBByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiAESw0DCyADKAIIIgMNAAsLQQAQy4CAgAAiAEF/Rg0FIAghBgJAQQAoAuTTgIAAIgNBf2oiBCAAcUUNACAIIABrIAQgAGpBACADa3FqIQYLIAYgAk0NBSAGQf7///8HSw0FAkBBACgCwNOAgAAiA0UNAEEAKAK404CAACIEIAZqIgUgBE0NBiAFIANLDQYLIAYQy4CAgAAiAyAARw0BDAcLIAYgAGsgC3EiBkH+////B0sNBCAGEMuAgIAAIgAgAygCACADKAIEakYNAyAAIQMLAkAgA0F/Rg0AIAJByABqIAZNDQACQCAHIAZrQQAoAujTgIAAIgRqQQAgBGtxIgRB/v///wdNDQAgAyEADAcLAkAgBBDLgICAAEF/Rg0AIAQgBmohBiADIQAMBwtBACAGaxDLgICAABoMBAsgAyEAIANBf0cNBQwDC0EAIQgMBwtBACEADAULIABBf0cNAgtBAEEAKALE04CAAEEEcjYCxNOAgAALIAhB/v///wdLDQEgCBDLgICAACEAQQAQy4CAgAAhAyAAQX9GDQEgA0F/Rg0BIAAgA08NASADIABrIgYgAkE4ak0NAQtBAEEAKAK404CAACAGaiIDNgK404CAAAJAIANBACgCvNOAgABNDQBBACADNgK804CAAAsCQAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQCAAIAMoAgAiBSADKAIEIghqRg0CIAMoAggiAw0ADAMLCwJAAkBBACgCmNCAgAAiA0UNACAAIANPDQELQQAgADYCmNCAgAALQQAhA0EAIAY2AszTgIAAQQAgADYCyNOAgABBAEF/NgKo0ICAAEEAQQAoAuDTgIAANgKs0ICAAEEAQQA2AtTTgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiBCAGQUhqIgUgA2siA0EBcjYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgAAgACAFakE4NgIEDAILIAMtAAxBCHENACAEIAVJDQAgBCAATw0AIARBeCAEa0EPcUEAIARBCGpBD3EbIgVqIgBBACgClNCAgAAgBmoiCyAFayIFQQFyNgIEIAMgCCAGajYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAU2ApTQgIAAQQAgADYCoNCAgAAgBCALakE4NgIEDAELAkAgAEEAKAKY0ICAACIITw0AQQAgADYCmNCAgAAgACEICyAAIAZqIQVByNOAgAAhAwJAAkACQAJAAkACQAJAA0AgAygCACAFRg0BIAMoAggiAw0ADAILCyADLQAMQQhxRQ0BC0HI04CAACEDA0ACQCADKAIAIgUgBEsNACAFIAMoAgRqIgUgBEsNAwsgAygCCCEDDAALCyADIAA2AgAgAyADKAIEIAZqNgIEIABBeCAAa0EPcUEAIABBCGpBD3EbaiILIAJBA3I2AgQgBUF4IAVrQQ9xQQAgBUEIakEPcRtqIgYgCyACaiICayEDAkAgBiAERw0AQQAgAjYCoNCAgABBAEEAKAKU0ICAACADaiIDNgKU0ICAACACIANBAXI2AgQMAwsCQCAGQQAoApzQgIAARw0AQQAgAjYCnNCAgABBAEEAKAKQ0ICAACADaiIDNgKQ0ICAACACIANBAXI2AgQgAiADaiADNgIADAMLAkAgBigCBCIEQQNxQQFHDQAgBEF4cSEHAkACQCAEQf8BSw0AIAYoAggiBSAEQQN2IghBA3RBsNCAgABqIgBGGgJAIAYoAgwiBCAFRw0AQQBBACgCiNCAgABBfiAId3E2AojQgIAADAILIAQgAEYaIAQgBTYCCCAFIAQ2AgwMAQsgBigCGCEJAkACQCAGKAIMIgAgBkYNACAGKAIIIgQgCEkaIAAgBDYCCCAEIAA2AgwMAQsCQCAGQRRqIgQoAgAiBQ0AIAZBEGoiBCgCACIFDQBBACEADAELA0AgBCEIIAUiAEEUaiIEKAIAIgUNACAAQRBqIQQgACgCECIFDQALIAhBADYCAAsgCUUNAAJAAkAgBiAGKAIcIgVBAnRBuNKAgABqIgQoAgBHDQAgBCAANgIAIAANAUEAQQAoAozQgIAAQX4gBXdxNgKM0ICAAAwCCyAJQRBBFCAJKAIQIAZGG2ogADYCACAARQ0BCyAAIAk2AhgCQCAGKAIQIgRFDQAgACAENgIQIAQgADYCGAsgBigCFCIERQ0AIABBFGogBDYCACAEIAA2AhgLIAcgA2ohAyAGIAdqIgYoAgQhBAsgBiAEQX5xNgIEIAIgA2ogAzYCACACIANBAXI2AgQCQCADQf8BSw0AIANBeHFBsNCAgABqIQQCQAJAQQAoAojQgIAAIgVBASADQQN2dCIDcQ0AQQAgBSADcjYCiNCAgAAgBCEDDAELIAQoAgghAwsgAyACNgIMIAQgAjYCCCACIAQ2AgwgAiADNgIIDAMLQR8hBAJAIANB////B0sNACADQQh2IgQgBEGA/j9qQRB2QQhxIgR0IgUgBUGA4B9qQRB2QQRxIgV0IgAgAEGAgA9qQRB2QQJxIgB0QQ92IAQgBXIgAHJrIgRBAXQgAyAEQRVqdkEBcXJBHGohBAsgAiAENgIcIAJCADcCECAEQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiAEEBIAR0IghxDQAgBSACNgIAQQAgACAIcjYCjNCAgAAgAiAFNgIYIAIgAjYCCCACIAI2AgwMAwsgA0EAQRkgBEEBdmsgBEEfRht0IQQgBSgCACEAA0AgACIFKAIEQXhxIANGDQIgBEEddiEAIARBAXQhBCAFIABBBHFqQRBqIggoAgAiAA0ACyAIIAI2AgAgAiAFNgIYIAIgAjYCDCACIAI2AggMAgsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiCyAGQUhqIgggA2siA0EBcjYCBCAAIAhqQTg2AgQgBCAFQTcgBWtBD3FBACAFQUlqQQ9xG2pBQWoiCCAIIARBEGpJGyIIQSM2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAs2AqDQgIAAIAhBEGpBACkC0NOAgAA3AgAgCEEAKQLI04CAADcCCEEAIAhBCGo2AtDTgIAAQQAgBjYCzNOAgABBACAANgLI04CAAEEAQQA2AtTTgIAAIAhBJGohAwNAIANBBzYCACADQQRqIgMgBUkNAAsgCCAERg0DIAggCCgCBEF+cTYCBCAIIAggBGsiADYCACAEIABBAXI2AgQCQCAAQf8BSw0AIABBeHFBsNCAgABqIQMCQAJAQQAoAojQgIAAIgVBASAAQQN2dCIAcQ0AQQAgBSAAcjYCiNCAgAAgAyEFDAELIAMoAgghBQsgBSAENgIMIAMgBDYCCCAEIAM2AgwgBCAFNgIIDAQLQR8hAwJAIABB////B0sNACAAQQh2IgMgA0GA/j9qQRB2QQhxIgN0IgUgBUGA4B9qQRB2QQRxIgV0IgggCEGAgA9qQRB2QQJxIgh0QQ92IAMgBXIgCHJrIgNBAXQgACADQRVqdkEBcXJBHGohAwsgBCADNgIcIARCADcCECADQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiCEEBIAN0IgZxDQAgBSAENgIAQQAgCCAGcjYCjNCAgAAgBCAFNgIYIAQgBDYCCCAEIAQ2AgwMBAsgAEEAQRkgA0EBdmsgA0EfRht0IQMgBSgCACEIA0AgCCIFKAIEQXhxIABGDQMgA0EddiEIIANBAXQhAyAFIAhBBHFqQRBqIgYoAgAiCA0ACyAGIAQ2AgAgBCAFNgIYIAQgBDYCDCAEIAQ2AggMAwsgBSgCCCIDIAI2AgwgBSACNgIIIAJBADYCGCACIAU2AgwgAiADNgIICyALQQhqIQMMBQsgBSgCCCIDIAQ2AgwgBSAENgIIIARBADYCGCAEIAU2AgwgBCADNgIIC0EAKAKU0ICAACIDIAJNDQBBACgCoNCAgAAiBCACaiIFIAMgAmsiA0EBcjYCBEEAIAM2ApTQgIAAQQAgBTYCoNCAgAAgBCACQQNyNgIEIARBCGohAwwDC0EAIQNBAEEwNgL404CAAAwCCwJAIAtFDQACQAJAIAggCCgCHCIFQQJ0QbjSgIAAaiIDKAIARw0AIAMgADYCACAADQFBACAHQX4gBXdxIgc2AozQgIAADAILIAtBEEEUIAsoAhAgCEYbaiAANgIAIABFDQELIAAgCzYCGAJAIAgoAhAiA0UNACAAIAM2AhAgAyAANgIYCyAIQRRqKAIAIgNFDQAgAEEUaiADNgIAIAMgADYCGAsCQAJAIARBD0sNACAIIAQgAmoiA0EDcjYCBCAIIANqIgMgAygCBEEBcjYCBAwBCyAIIAJqIgAgBEEBcjYCBCAIIAJBA3I2AgQgACAEaiAENgIAAkAgBEH/AUsNACAEQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgBEEDdnQiBHENAEEAIAUgBHI2AojQgIAAIAMhBAwBCyADKAIIIQQLIAQgADYCDCADIAA2AgggACADNgIMIAAgBDYCCAwBC0EfIQMCQCAEQf///wdLDQAgBEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCICIAJBgIAPakEQdkECcSICdEEPdiADIAVyIAJyayIDQQF0IAQgA0EVanZBAXFyQRxqIQMLIAAgAzYCHCAAQgA3AhAgA0ECdEG40oCAAGohBQJAIAdBASADdCICcQ0AIAUgADYCAEEAIAcgAnI2AozQgIAAIAAgBTYCGCAAIAA2AgggACAANgIMDAELIARBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhAgJAA0AgAiIFKAIEQXhxIARGDQEgA0EddiECIANBAXQhAyAFIAJBBHFqQRBqIgYoAgAiAg0ACyAGIAA2AgAgACAFNgIYIAAgADYCDCAAIAA2AggMAQsgBSgCCCIDIAA2AgwgBSAANgIIIABBADYCGCAAIAU2AgwgACADNgIICyAIQQhqIQMMAQsCQCAKRQ0AAkACQCAAIAAoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAg2AgAgCA0BQQAgCUF+IAV3cTYCjNCAgAAMAgsgCkEQQRQgCigCECAARhtqIAg2AgAgCEUNAQsgCCAKNgIYAkAgACgCECIDRQ0AIAggAzYCECADIAg2AhgLIABBFGooAgAiA0UNACAIQRRqIAM2AgAgAyAINgIYCwJAAkAgBEEPSw0AIAAgBCACaiIDQQNyNgIEIAAgA2oiAyADKAIEQQFyNgIEDAELIAAgAmoiBSAEQQFyNgIEIAAgAkEDcjYCBCAFIARqIAQ2AgACQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhAwJAAkBBASAHQQN2dCIIIAZxDQBBACAIIAZyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAM2AgwgAiADNgIIIAMgAjYCDCADIAg2AggLQQAgBTYCnNCAgABBACAENgKQ0ICAAAsgAEEIaiEDCyABQRBqJICAgIAAIAMLCgAgABDJgICAAAviDQEHfwJAIABFDQAgAEF4aiIBIABBfGooAgAiAkF4cSIAaiEDAkAgAkEBcQ0AIAJBA3FFDQEgASABKAIAIgJrIgFBACgCmNCAgAAiBEkNASACIABqIQACQCABQQAoApzQgIAARg0AAkAgAkH/AUsNACABKAIIIgQgAkEDdiIFQQN0QbDQgIAAaiIGRhoCQCABKAIMIgIgBEcNAEEAQQAoAojQgIAAQX4gBXdxNgKI0ICAAAwDCyACIAZGGiACIAQ2AgggBCACNgIMDAILIAEoAhghBwJAAkAgASgCDCIGIAFGDQAgASgCCCICIARJGiAGIAI2AgggAiAGNgIMDAELAkAgAUEUaiICKAIAIgQNACABQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQECQAJAIAEgASgCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAwsgB0EQQRQgBygCECABRhtqIAY2AgAgBkUNAgsgBiAHNgIYAkAgASgCECICRQ0AIAYgAjYCECACIAY2AhgLIAEoAhQiAkUNASAGQRRqIAI2AgAgAiAGNgIYDAELIAMoAgQiAkEDcUEDRw0AIAMgAkF+cTYCBEEAIAA2ApDQgIAAIAEgAGogADYCACABIABBAXI2AgQPCyABIANPDQAgAygCBCICQQFxRQ0AAkACQCACQQJxDQACQCADQQAoAqDQgIAARw0AQQAgATYCoNCAgABBAEEAKAKU0ICAACAAaiIANgKU0ICAACABIABBAXI2AgQgAUEAKAKc0ICAAEcNA0EAQQA2ApDQgIAAQQBBADYCnNCAgAAPCwJAIANBACgCnNCAgABHDQBBACABNgKc0ICAAEEAQQAoApDQgIAAIABqIgA2ApDQgIAAIAEgAEEBcjYCBCABIABqIAA2AgAPCyACQXhxIABqIQACQAJAIAJB/wFLDQAgAygCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgAygCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAgsgAiAGRhogAiAENgIIIAQgAjYCDAwBCyADKAIYIQcCQAJAIAMoAgwiBiADRg0AIAMoAggiAkEAKAKY0ICAAEkaIAYgAjYCCCACIAY2AgwMAQsCQCADQRRqIgIoAgAiBA0AIANBEGoiAigCACIEDQBBACEGDAELA0AgAiEFIAQiBkEUaiICKAIAIgQNACAGQRBqIQIgBigCECIEDQALIAVBADYCAAsgB0UNAAJAAkAgAyADKAIcIgRBAnRBuNKAgABqIgIoAgBHDQAgAiAGNgIAIAYNAUEAQQAoAozQgIAAQX4gBHdxNgKM0ICAAAwCCyAHQRBBFCAHKAIQIANGG2ogBjYCACAGRQ0BCyAGIAc2AhgCQCADKAIQIgJFDQAgBiACNgIQIAIgBjYCGAsgAygCFCICRQ0AIAZBFGogAjYCACACIAY2AhgLIAEgAGogADYCACABIABBAXI2AgQgAUEAKAKc0ICAAEcNAUEAIAA2ApDQgIAADwsgAyACQX5xNgIEIAEgAGogADYCACABIABBAXI2AgQLAkAgAEH/AUsNACAAQXhxQbDQgIAAaiECAkACQEEAKAKI0ICAACIEQQEgAEEDdnQiAHENAEEAIAQgAHI2AojQgIAAIAIhAAwBCyACKAIIIQALIAAgATYCDCACIAE2AgggASACNgIMIAEgADYCCA8LQR8hAgJAIABB////B0sNACAAQQh2IgIgAkGA/j9qQRB2QQhxIgJ0IgQgBEGA4B9qQRB2QQRxIgR0IgYgBkGAgA9qQRB2QQJxIgZ0QQ92IAIgBHIgBnJrIgJBAXQgACACQRVqdkEBcXJBHGohAgsgASACNgIcIAFCADcCECACQQJ0QbjSgIAAaiEEAkACQEEAKAKM0ICAACIGQQEgAnQiA3ENACAEIAE2AgBBACAGIANyNgKM0ICAACABIAQ2AhggASABNgIIIAEgATYCDAwBCyAAQQBBGSACQQF2ayACQR9GG3QhAiAEKAIAIQYCQANAIAYiBCgCBEF4cSAARg0BIAJBHXYhBiACQQF0IQIgBCAGQQRxakEQaiIDKAIAIgYNAAsgAyABNgIAIAEgBDYCGCABIAE2AgwgASABNgIIDAELIAQoAggiACABNgIMIAQgATYCCCABQQA2AhggASAENgIMIAEgADYCCAtBAEEAKAKo0ICAAEF/aiIBQX8gARs2AqjQgIAACwsEAAAAC04AAkAgAA0APwBBEHQPCwJAIABB//8DcQ0AIABBf0wNAAJAIABBEHZAACIAQX9HDQBBAEEwNgL404CAAEF/DwsgAEEQdA8LEMqAgIAAAAvyAgIDfwF+AkAgAkUNACAAIAE6AAAgAiAAaiIDQX9qIAE6AAAgAkEDSQ0AIAAgAToAAiAAIAE6AAEgA0F9aiABOgAAIANBfmogAToAACACQQdJDQAgACABOgADIANBfGogAToAACACQQlJDQAgAEEAIABrQQNxIgRqIgMgAUH/AXFBgYKECGwiATYCACADIAIgBGtBfHEiBGoiAkF8aiABNgIAIARBCUkNACADIAE2AgggAyABNgIEIAJBeGogATYCACACQXRqIAE2AgAgBEEZSQ0AIAMgATYCGCADIAE2AhQgAyABNgIQIAMgATYCDCACQXBqIAE2AgAgAkFsaiABNgIAIAJBaGogATYCACACQWRqIAE2AgAgBCADQQRxQRhyIgVrIgJBIEkNACABrUKBgICAEH4hBiADIAVqIQEDQCABIAY3AxggASAGNwMQIAEgBjcDCCABIAY3AwAgAUEgaiEBIAJBYGoiAkEfSw0ACwsgAAsLjkgBAEGACAuGSAEAAAACAAAAAwAAAAAAAAAAAAAABAAAAAUAAAAAAAAAAAAAAAYAAAAHAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASW52YWxpZCBjaGFyIGluIHVybCBxdWVyeQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2JvZHkAQ29udGVudC1MZW5ndGggb3ZlcmZsb3cAQ2h1bmsgc2l6ZSBvdmVyZmxvdwBSZXNwb25zZSBvdmVyZmxvdwBJbnZhbGlkIG1ldGhvZCBmb3IgSFRUUC94LnggcmVxdWVzdABJbnZhbGlkIG1ldGhvZCBmb3IgUlRTUC94LnggcmVxdWVzdABFeHBlY3RlZCBTT1VSQ0UgbWV0aG9kIGZvciBJQ0UveC54IHJlcXVlc3QASW52YWxpZCBjaGFyIGluIHVybCBmcmFnbWVudCBzdGFydABFeHBlY3RlZCBkb3QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9zdGF0dXMASW52YWxpZCByZXNwb25zZSBzdGF0dXMASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucwBVc2VyIGNhbGxiYWNrIGVycm9yAGBvbl9yZXNldGAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2hlYWRlcmAgY2FsbGJhY2sgZXJyb3IAYG9uX21lc3NhZ2VfYmVnaW5gIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19leHRlbnNpb25fdmFsdWVgIGNhbGxiYWNrIGVycm9yAGBvbl9zdGF0dXNfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl92ZXJzaW9uX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdXJsX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWV0aG9kX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX25hbWVgIGNhbGxiYWNrIGVycm9yAFVuZXhwZWN0ZWQgY2hhciBpbiB1cmwgc2VydmVyAEludmFsaWQgaGVhZGVyIHZhbHVlIGNoYXIASW52YWxpZCBoZWFkZXIgZmllbGQgY2hhcgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3ZlcnNpb24ASW52YWxpZCBtaW5vciB2ZXJzaW9uAEludmFsaWQgbWFqb3IgdmVyc2lvbgBFeHBlY3RlZCBzcGFjZSBhZnRlciB2ZXJzaW9uAEV4cGVjdGVkIENSTEYgYWZ0ZXIgdmVyc2lvbgBJbnZhbGlkIEhUVFAgdmVyc2lvbgBJbnZhbGlkIGhlYWRlciB0b2tlbgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3VybABJbnZhbGlkIGNoYXJhY3RlcnMgaW4gdXJsAFVuZXhwZWN0ZWQgc3RhcnQgY2hhciBpbiB1cmwARG91YmxlIEAgaW4gdXJsAEVtcHR5IENvbnRlbnQtTGVuZ3RoAEludmFsaWQgY2hhcmFjdGVyIGluIENvbnRlbnQtTGVuZ3RoAER1cGxpY2F0ZSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXIgaW4gdXJsIHBhdGgAQ29udGVudC1MZW5ndGggY2FuJ3QgYmUgcHJlc2VudCB3aXRoIFRyYW5zZmVyLUVuY29kaW5nAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIHNpemUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfdmFsdWUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyB2YWx1ZQBNaXNzaW5nIGV4cGVjdGVkIExGIGFmdGVyIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AgaGVhZGVyIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGUgdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyBxdW90ZWQgdmFsdWUAUGF1c2VkIGJ5IG9uX2hlYWRlcnNfY29tcGxldGUASW52YWxpZCBFT0Ygc3RhdGUAb25fcmVzZXQgcGF1c2UAb25fY2h1bmtfaGVhZGVyIHBhdXNlAG9uX21lc3NhZ2VfYmVnaW4gcGF1c2UAb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlIHBhdXNlAG9uX3N0YXR1c19jb21wbGV0ZSBwYXVzZQBvbl92ZXJzaW9uX2NvbXBsZXRlIHBhdXNlAG9uX3VybF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19jb21wbGV0ZSBwYXVzZQBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGUgcGF1c2UAb25fbWVzc2FnZV9jb21wbGV0ZSBwYXVzZQBvbl9tZXRob2RfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lIHBhdXNlAFVuZXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgc3RhcnQgbGluZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgbmFtZQBQYXVzZSBvbiBDT05ORUNUL1VwZ3JhZGUAUGF1c2Ugb24gUFJJL1VwZ3JhZGUARXhwZWN0ZWQgSFRUUC8yIENvbm5lY3Rpb24gUHJlZmFjZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX21ldGhvZABFeHBlY3RlZCBzcGFjZSBhZnRlciBtZXRob2QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfZmllbGQAUGF1c2VkAEludmFsaWQgd29yZCBlbmNvdW50ZXJlZABJbnZhbGlkIG1ldGhvZCBlbmNvdW50ZXJlZABVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNjaGVtYQBSZXF1ZXN0IGhhcyBpbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AAU1dJVENIX1BST1hZAFVTRV9QUk9YWQBNS0FDVElWSVRZAFVOUFJPQ0VTU0FCTEVfRU5USVRZAENPUFkATU9WRURfUEVSTUFORU5UTFkAVE9PX0VBUkxZAE5PVElGWQBGQUlMRURfREVQRU5ERU5DWQBCQURfR0FURVdBWQBQTEFZAFBVVABDSEVDS09VVABHQVRFV0FZX1RJTUVPVVQAUkVRVUVTVF9USU1FT1VUAE5FVFdPUktfQ09OTkVDVF9USU1FT1VUAENPTk5FQ1RJT05fVElNRU9VVABMT0dJTl9USU1FT1VUAE5FVFdPUktfUkVBRF9USU1FT1VUAFBPU1QATUlTRElSRUNURURfUkVRVUVTVABDTElFTlRfQ0xPU0VEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9MT0FEX0JBTEFOQ0VEX1JFUVVFU1QAQkFEX1JFUVVFU1QASFRUUF9SRVFVRVNUX1NFTlRfVE9fSFRUUFNfUE9SVABSRVBPUlQASU1fQV9URUFQT1QAUkVTRVRfQ09OVEVOVABOT19DT05URU5UAFBBUlRJQUxfQ09OVEVOVABIUEVfSU5WQUxJRF9DT05TVEFOVABIUEVfQ0JfUkVTRVQAR0VUAEhQRV9TVFJJQ1QAQ09ORkxJQ1QAVEVNUE9SQVJZX1JFRElSRUNUAFBFUk1BTkVOVF9SRURJUkVDVABDT05ORUNUAE1VTFRJX1NUQVRVUwBIUEVfSU5WQUxJRF9TVEFUVVMAVE9PX01BTllfUkVRVUVTVFMARUFSTFlfSElOVFMAVU5BVkFJTEFCTEVfRk9SX0xFR0FMX1JFQVNPTlMAT1BUSU9OUwBTV0lUQ0hJTkdfUFJPVE9DT0xTAFZBUklBTlRfQUxTT19ORUdPVElBVEVTAE1VTFRJUExFX0NIT0lDRVMASU5URVJOQUxfU0VSVkVSX0VSUk9SAFdFQl9TRVJWRVJfVU5LTk9XTl9FUlJPUgBSQUlMR1VOX0VSUk9SAElERU5USVRZX1BST1ZJREVSX0FVVEhFTlRJQ0FUSU9OX0VSUk9SAFNTTF9DRVJUSUZJQ0FURV9FUlJPUgBJTlZBTElEX1hfRk9SV0FSREVEX0ZPUgBTRVRfUEFSQU1FVEVSAEdFVF9QQVJBTUVURVIASFBFX1VTRVIAU0VFX09USEVSAEhQRV9DQl9DSFVOS19IRUFERVIATUtDQUxFTkRBUgBTRVRVUABXRUJfU0VSVkVSX0lTX0RPV04AVEVBUkRPV04ASFBFX0NMT1NFRF9DT05ORUNUSU9OAEhFVVJJU1RJQ19FWFBJUkFUSU9OAERJU0NPTk5FQ1RFRF9PUEVSQVRJT04ATk9OX0FVVEhPUklUQVRJVkVfSU5GT1JNQVRJT04ASFBFX0lOVkFMSURfVkVSU0lPTgBIUEVfQ0JfTUVTU0FHRV9CRUdJTgBTSVRFX0lTX0ZST1pFTgBIUEVfSU5WQUxJRF9IRUFERVJfVE9LRU4ASU5WQUxJRF9UT0tFTgBGT1JCSURERU4ARU5IQU5DRV9ZT1VSX0NBTE0ASFBFX0lOVkFMSURfVVJMAEJMT0NLRURfQllfUEFSRU5UQUxfQ09OVFJPTABNS0NPTABBQ0wASFBFX0lOVEVSTkFMAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0VfVU5PRkZJQ0lBTABIUEVfT0sAVU5MSU5LAFVOTE9DSwBQUkkAUkVUUllfV0lUSABIUEVfSU5WQUxJRF9DT05URU5UX0xFTkdUSABIUEVfVU5FWFBFQ1RFRF9DT05URU5UX0xFTkdUSABGTFVTSABQUk9QUEFUQ0gATS1TRUFSQ0gAVVJJX1RPT19MT05HAFBST0NFU1NJTkcATUlTQ0VMTEFORU9VU19QRVJTSVNURU5UX1dBUk5JTkcATUlTQ0VMTEFORU9VU19XQVJOSU5HAEhQRV9JTlZBTElEX1RSQU5TRkVSX0VOQ09ESU5HAEV4cGVjdGVkIENSTEYASFBFX0lOVkFMSURfQ0hVTktfU0laRQBNT1ZFAENPTlRJTlVFAEhQRV9DQl9TVEFUVVNfQ09NUExFVEUASFBFX0NCX0hFQURFUlNfQ09NUExFVEUASFBFX0NCX1ZFUlNJT05fQ09NUExFVEUASFBFX0NCX1VSTF9DT01QTEVURQBIUEVfQ0JfQ0hVTktfQ09NUExFVEUASFBFX0NCX0hFQURFUl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX1ZBTFVFX0NPTVBMRVRFAEhQRV9DQl9DSFVOS19FWFRFTlNJT05fTkFNRV9DT01QTEVURQBIUEVfQ0JfTUVTU0FHRV9DT01QTEVURQBIUEVfQ0JfTUVUSE9EX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfRklFTERfQ09NUExFVEUAREVMRVRFAEhQRV9JTlZBTElEX0VPRl9TVEFURQBJTlZBTElEX1NTTF9DRVJUSUZJQ0FURQBQQVVTRQBOT19SRVNQT05TRQBVTlNVUFBPUlRFRF9NRURJQV9UWVBFAEdPTkUATk9UX0FDQ0VQVEFCTEUAU0VSVklDRV9VTkFWQUlMQUJMRQBSQU5HRV9OT1RfU0FUSVNGSUFCTEUAT1JJR0lOX0lTX1VOUkVBQ0hBQkxFAFJFU1BPTlNFX0lTX1NUQUxFAFBVUkdFAE1FUkdFAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0UAUkVRVUVTVF9IRUFERVJfVE9PX0xBUkdFAFBBWUxPQURfVE9PX0xBUkdFAElOU1VGRklDSUVOVF9TVE9SQUdFAEhQRV9QQVVTRURfVVBHUkFERQBIUEVfUEFVU0VEX0gyX1VQR1JBREUAU09VUkNFAEFOTk9VTkNFAFRSQUNFAEhQRV9VTkVYUEVDVEVEX1NQQUNFAERFU0NSSUJFAFVOU1VCU0NSSUJFAFJFQ09SRABIUEVfSU5WQUxJRF9NRVRIT0QATk9UX0ZPVU5EAFBST1BGSU5EAFVOQklORABSRUJJTkQAVU5BVVRIT1JJWkVEAE1FVEhPRF9OT1RfQUxMT1dFRABIVFRQX1ZFUlNJT05fTk9UX1NVUFBPUlRFRABBTFJFQURZX1JFUE9SVEVEAEFDQ0VQVEVEAE5PVF9JTVBMRU1FTlRFRABMT09QX0RFVEVDVEVEAEhQRV9DUl9FWFBFQ1RFRABIUEVfTEZfRVhQRUNURUQAQ1JFQVRFRABJTV9VU0VEAEhQRV9QQVVTRUQAVElNRU9VVF9PQ0NVUkVEAFBBWU1FTlRfUkVRVUlSRUQAUFJFQ09ORElUSU9OX1JFUVVJUkVEAFBST1hZX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAE5FVFdPUktfQVVUSEVOVElDQVRJT05fUkVRVUlSRUQATEVOR1RIX1JFUVVJUkVEAFNTTF9DRVJUSUZJQ0FURV9SRVFVSVJFRABVUEdSQURFX1JFUVVJUkVEAFBBR0VfRVhQSVJFRABQUkVDT05ESVRJT05fRkFJTEVEAEVYUEVDVEFUSU9OX0ZBSUxFRABSRVZBTElEQVRJT05fRkFJTEVEAFNTTF9IQU5EU0hBS0VfRkFJTEVEAExPQ0tFRABUUkFOU0ZPUk1BVElPTl9BUFBMSUVEAE5PVF9NT0RJRklFRABOT1RfRVhURU5ERUQAQkFORFdJRFRIX0xJTUlUX0VYQ0VFREVEAFNJVEVfSVNfT1ZFUkxPQURFRABIRUFEAEV4cGVjdGVkIEhUVFAvAABeEwAAJhMAADAQAADwFwAAnRMAABUSAAA5FwAA8BIAAAoQAAB1EgAArRIAAIITAABPFAAAfxAAAKAVAAAjFAAAiRIAAIsUAABNFQAA1BEAAM8UAAAQGAAAyRYAANwWAADBEQAA4BcAALsUAAB0FAAAfBUAAOUUAAAIFwAAHxAAAGUVAACjFAAAKBUAAAIVAACZFQAALBAAAIsZAABPDwAA1A4AAGoQAADOEAAAAhcAAIkOAABuEwAAHBMAAGYUAABWFwAAwRMAAM0TAABsEwAAaBcAAGYXAABfFwAAIhMAAM4PAABpDgAA2A4AAGMWAADLEwAAqg4AACgXAAAmFwAAxRMAAF0WAADoEQAAZxMAAGUTAADyFgAAcxMAAB0XAAD5FgAA8xEAAM8OAADOFQAADBIAALMRAAClEQAAYRAAADIXAAC7EwAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAgMCAgICAgAAAgIAAgIAAgICAgICAgICAgAEAAAAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAgICAAIAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIAAgICAgIAAAICAAICAAICAgICAgICAgIAAwAEAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABsb3NlZWVwLWFsaXZlAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQFjaHVua2VkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQABAQEBAQAAAQEAAQEAAQEBAQEBAQEBAQAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGVjdGlvbmVudC1sZW5ndGhvbnJveHktY29ubmVjdGlvbgAAAAAAAAAAAAAAAAAAAHJhbnNmZXItZW5jb2RpbmdwZ3JhZGUNCg0KDQpTTQ0KDQpUVFAvQ0UvVFNQLwAAAAAAAAAAAAAAAAECAAEDAAAAAAAAAAAAAAAAAAAAAAAABAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQUBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAABAAACAAAAAAAAAAAAAAAAAAAAAAAAAwQAAAQEBAQEBAQEBAQEBQQEBAQEBAQEBAQEBAAEAAYHBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQABAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAgAAAAACAAAAAAAAAAAAAAAAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAE5PVU5DRUVDS09VVE5FQ1RFVEVDUklCRUxVU0hFVEVBRFNFQVJDSFJHRUNUSVZJVFlMRU5EQVJWRU9USUZZUFRJT05TQ0hTRUFZU1RBVENIR0VPUkRJUkVDVE9SVFJDSFBBUkFNRVRFUlVSQ0VCU0NSSUJFQVJET1dOQUNFSU5ETktDS1VCU0NSSUJFSFRUUC9BRFRQLw=='
+
+
+/***/ }),
+
+/***/ 1891:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.enumToMap = void 0;
+function enumToMap(obj) {
+ const res = {};
+ Object.keys(obj).forEach((key) => {
+ const value = obj[key];
+ if (typeof value === 'number') {
+ res[key] = value;
+ }
+ });
+ return res;
+}
+exports.enumToMap = enumToMap;
+//# sourceMappingURL=utils.js.map
+
+/***/ }),
+
+/***/ 6771:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { kClients } = __nccwpck_require__(2785)
+const Agent = __nccwpck_require__(7890)
+const {
+ kAgent,
+ kMockAgentSet,
+ kMockAgentGet,
+ kDispatches,
+ kIsMockActive,
+ kNetConnect,
+ kGetNetConnect,
+ kOptions,
+ kFactory
+} = __nccwpck_require__(4347)
+const MockClient = __nccwpck_require__(8687)
+const MockPool = __nccwpck_require__(6193)
+const { matchValue, buildMockOptions } = __nccwpck_require__(9323)
+const { InvalidArgumentError, UndiciError } = __nccwpck_require__(8045)
+const Dispatcher = __nccwpck_require__(412)
+const Pluralizer = __nccwpck_require__(8891)
+const PendingInterceptorsFormatter = __nccwpck_require__(6823)
+
+class FakeWeakRef {
+ constructor (value) {
+ this.value = value
+ }
+
+ deref () {
+ return this.value
+ }
+}
+
+class MockAgent extends Dispatcher {
+ constructor (opts) {
+ super(opts)
+
+ this[kNetConnect] = true
+ this[kIsMockActive] = true
+
+ // Instantiate Agent and encapsulate
+ if ((opts && opts.agent && typeof opts.agent.dispatch !== 'function')) {
+ throw new InvalidArgumentError('Argument opts.agent must implement Agent')
+ }
+ const agent = opts && opts.agent ? opts.agent : new Agent(opts)
+ this[kAgent] = agent
+
+ this[kClients] = agent[kClients]
+ this[kOptions] = buildMockOptions(opts)
+ }
+
+ get (origin) {
+ let dispatcher = this[kMockAgentGet](origin)
+
+ if (!dispatcher) {
+ dispatcher = this[kFactory](origin)
+ this[kMockAgentSet](origin, dispatcher)
+ }
+ return dispatcher
+ }
+
+ dispatch (opts, handler) {
+ // Call MockAgent.get to perform additional setup before dispatching as normal
+ this.get(opts.origin)
+ return this[kAgent].dispatch(opts, handler)
+ }
+
+ async close () {
+ await this[kAgent].close()
+ this[kClients].clear()
+ }
+
+ deactivate () {
+ this[kIsMockActive] = false
+ }
+
+ activate () {
+ this[kIsMockActive] = true
+ }
+
+ enableNetConnect (matcher) {
+ if (typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp) {
+ if (Array.isArray(this[kNetConnect])) {
+ this[kNetConnect].push(matcher)
+ } else {
+ this[kNetConnect] = [matcher]
+ }
+ } else if (typeof matcher === 'undefined') {
+ this[kNetConnect] = true
+ } else {
+ throw new InvalidArgumentError('Unsupported matcher. Must be one of String|Function|RegExp.')
+ }
+ }
+
+ disableNetConnect () {
+ this[kNetConnect] = false
+ }
+
+ // This is required to bypass issues caused by using global symbols - see:
+ // https://github.com/nodejs/undici/issues/1447
+ get isMockActive () {
+ return this[kIsMockActive]
+ }
+
+ [kMockAgentSet] (origin, dispatcher) {
+ this[kClients].set(origin, new FakeWeakRef(dispatcher))
+ }
+
+ [kFactory] (origin) {
+ const mockOptions = Object.assign({ agent: this }, this[kOptions])
+ return this[kOptions] && this[kOptions].connections === 1
+ ? new MockClient(origin, mockOptions)
+ : new MockPool(origin, mockOptions)
+ }
+
+ [kMockAgentGet] (origin) {
+ // First check if we can immediately find it
+ const ref = this[kClients].get(origin)
+ if (ref) {
+ return ref.deref()
+ }
+
+ // If the origin is not a string create a dummy parent pool and return to user
+ if (typeof origin !== 'string') {
+ const dispatcher = this[kFactory]('http://localhost:9999')
+ this[kMockAgentSet](origin, dispatcher)
+ return dispatcher
+ }
+
+ // If we match, create a pool and assign the same dispatches
+ for (const [keyMatcher, nonExplicitRef] of Array.from(this[kClients])) {
+ const nonExplicitDispatcher = nonExplicitRef.deref()
+ if (nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) {
+ const dispatcher = this[kFactory](origin)
+ this[kMockAgentSet](origin, dispatcher)
+ dispatcher[kDispatches] = nonExplicitDispatcher[kDispatches]
+ return dispatcher
+ }
+ }
+ }
+
+ [kGetNetConnect] () {
+ return this[kNetConnect]
+ }
+
+ pendingInterceptors () {
+ const mockAgentClients = this[kClients]
+
+ return Array.from(mockAgentClients.entries())
+ .flatMap(([origin, scope]) => scope.deref()[kDispatches].map(dispatch => ({ ...dispatch, origin })))
+ .filter(({ pending }) => pending)
+ }
+
+ assertNoPendingInterceptors ({ pendingInterceptorsFormatter = new PendingInterceptorsFormatter() } = {}) {
+ const pending = this.pendingInterceptors()
+
+ if (pending.length === 0) {
+ return
+ }
+
+ const pluralizer = new Pluralizer('interceptor', 'interceptors').pluralize(pending.length)
+
+ throw new UndiciError(`
+${pluralizer.count} ${pluralizer.noun} ${pluralizer.is} pending:
+
+${pendingInterceptorsFormatter.format(pending)}
+`.trim())
+ }
+}
+
+module.exports = MockAgent
+
+
+/***/ }),
+
+/***/ 8687:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { promisify } = __nccwpck_require__(3837)
+const Client = __nccwpck_require__(3598)
+const { buildMockDispatch } = __nccwpck_require__(9323)
+const {
+ kDispatches,
+ kMockAgent,
+ kClose,
+ kOriginalClose,
+ kOrigin,
+ kOriginalDispatch,
+ kConnected
+} = __nccwpck_require__(4347)
+const { MockInterceptor } = __nccwpck_require__(410)
+const Symbols = __nccwpck_require__(2785)
+const { InvalidArgumentError } = __nccwpck_require__(8045)
+
+/**
+ * MockClient provides an API that extends the Client to influence the mockDispatches.
+ */
+class MockClient extends Client {
+ constructor (origin, opts) {
+ super(origin, opts)
+
+ if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') {
+ throw new InvalidArgumentError('Argument opts.agent must implement Agent')
+ }
+
+ this[kMockAgent] = opts.agent
+ this[kOrigin] = origin
+ this[kDispatches] = []
+ this[kConnected] = 1
+ this[kOriginalDispatch] = this.dispatch
+ this[kOriginalClose] = this.close.bind(this)
+
+ this.dispatch = buildMockDispatch.call(this)
+ this.close = this[kClose]
+ }
+
+ get [Symbols.kConnected] () {
+ return this[kConnected]
+ }
+
+ /**
+ * Sets up the base interceptor for mocking replies from undici.
+ */
+ intercept (opts) {
+ return new MockInterceptor(opts, this[kDispatches])
+ }
+
+ async [kClose] () {
+ await promisify(this[kOriginalClose])()
+ this[kConnected] = 0
+ this[kMockAgent][Symbols.kClients].delete(this[kOrigin])
+ }
+}
+
+module.exports = MockClient
+
+
+/***/ }),
+
+/***/ 888:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { UndiciError } = __nccwpck_require__(8045)
+
+class MockNotMatchedError extends UndiciError {
+ constructor (message) {
+ super(message)
+ Error.captureStackTrace(this, MockNotMatchedError)
+ this.name = 'MockNotMatchedError'
+ this.message = message || 'The request does not match any registered mock dispatches'
+ this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED'
+ }
+}
+
+module.exports = {
+ MockNotMatchedError
+}
+
+
+/***/ }),
+
+/***/ 410:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { getResponseData, buildKey, addMockDispatch } = __nccwpck_require__(9323)
+const {
+ kDispatches,
+ kDispatchKey,
+ kDefaultHeaders,
+ kDefaultTrailers,
+ kContentLength,
+ kMockDispatch
+} = __nccwpck_require__(4347)
+const { InvalidArgumentError } = __nccwpck_require__(8045)
+const { buildURL } = __nccwpck_require__(3983)
+
+/**
+ * Defines the scope API for an interceptor reply
+ */
+class MockScope {
+ constructor (mockDispatch) {
+ this[kMockDispatch] = mockDispatch
+ }
+
+ /**
+ * Delay a reply by a set amount in ms.
+ */
+ delay (waitInMs) {
+ if (typeof waitInMs !== 'number' || !Number.isInteger(waitInMs) || waitInMs <= 0) {
+ throw new InvalidArgumentError('waitInMs must be a valid integer > 0')
+ }
+
+ this[kMockDispatch].delay = waitInMs
+ return this
+ }
+
+ /**
+ * For a defined reply, never mark as consumed.
+ */
+ persist () {
+ this[kMockDispatch].persist = true
+ return this
+ }
+
+ /**
+ * Allow one to define a reply for a set amount of matching requests.
+ */
+ times (repeatTimes) {
+ if (typeof repeatTimes !== 'number' || !Number.isInteger(repeatTimes) || repeatTimes <= 0) {
+ throw new InvalidArgumentError('repeatTimes must be a valid integer > 0')
+ }
+
+ this[kMockDispatch].times = repeatTimes
+ return this
+ }
+}
+
+/**
+ * Defines an interceptor for a Mock
+ */
+class MockInterceptor {
+ constructor (opts, mockDispatches) {
+ if (typeof opts !== 'object') {
+ throw new InvalidArgumentError('opts must be an object')
+ }
+ if (typeof opts.path === 'undefined') {
+ throw new InvalidArgumentError('opts.path must be defined')
+ }
+ if (typeof opts.method === 'undefined') {
+ opts.method = 'GET'
+ }
+ // See https://github.com/nodejs/undici/issues/1245
+ // As per RFC 3986, clients are not supposed to send URI
+ // fragments to servers when they retrieve a document,
+ if (typeof opts.path === 'string') {
+ if (opts.query) {
+ opts.path = buildURL(opts.path, opts.query)
+ } else {
+ // Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811
+ const parsedURL = new URL(opts.path, 'data://')
+ opts.path = parsedURL.pathname + parsedURL.search
+ }
+ }
+ if (typeof opts.method === 'string') {
+ opts.method = opts.method.toUpperCase()
+ }
+
+ this[kDispatchKey] = buildKey(opts)
+ this[kDispatches] = mockDispatches
+ this[kDefaultHeaders] = {}
+ this[kDefaultTrailers] = {}
+ this[kContentLength] = false
+ }
+
+ createMockScopeDispatchData (statusCode, data, responseOptions = {}) {
+ const responseData = getResponseData(data)
+ const contentLength = this[kContentLength] ? { 'content-length': responseData.length } : {}
+ const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers }
+ const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers }
+
+ return { statusCode, data, headers, trailers }
+ }
+
+ validateReplyParameters (statusCode, data, responseOptions) {
+ if (typeof statusCode === 'undefined') {
+ throw new InvalidArgumentError('statusCode must be defined')
+ }
+ if (typeof data === 'undefined') {
+ throw new InvalidArgumentError('data must be defined')
+ }
+ if (typeof responseOptions !== 'object') {
+ throw new InvalidArgumentError('responseOptions must be an object')
+ }
+ }
+
+ /**
+ * Mock an undici request with a defined reply.
+ */
+ reply (replyData) {
+ // Values of reply aren't available right now as they
+ // can only be available when the reply callback is invoked.
+ if (typeof replyData === 'function') {
+ // We'll first wrap the provided callback in another function,
+ // this function will properly resolve the data from the callback
+ // when invoked.
+ const wrappedDefaultsCallback = (opts) => {
+ // Our reply options callback contains the parameter for statusCode, data and options.
+ const resolvedData = replyData(opts)
+
+ // Check if it is in the right format
+ if (typeof resolvedData !== 'object') {
+ throw new InvalidArgumentError('reply options callback must return an object')
+ }
+
+ const { statusCode, data = '', responseOptions = {} } = resolvedData
+ this.validateReplyParameters(statusCode, data, responseOptions)
+ // Since the values can be obtained immediately we return them
+ // from this higher order function that will be resolved later.
+ return {
+ ...this.createMockScopeDispatchData(statusCode, data, responseOptions)
+ }
+ }
+
+ // Add usual dispatch data, but this time set the data parameter to function that will eventually provide data.
+ const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], wrappedDefaultsCallback)
+ return new MockScope(newMockDispatch)
+ }
+
+ // We can have either one or three parameters, if we get here,
+ // we should have 1-3 parameters. So we spread the arguments of
+ // this function to obtain the parameters, since replyData will always
+ // just be the statusCode.
+ const [statusCode, data = '', responseOptions = {}] = [...arguments]
+ this.validateReplyParameters(statusCode, data, responseOptions)
+
+ // Send in-already provided data like usual
+ const dispatchData = this.createMockScopeDispatchData(statusCode, data, responseOptions)
+ const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData)
+ return new MockScope(newMockDispatch)
+ }
+
+ /**
+ * Mock an undici request with a defined error.
+ */
+ replyWithError (error) {
+ if (typeof error === 'undefined') {
+ throw new InvalidArgumentError('error must be defined')
+ }
+
+ const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error })
+ return new MockScope(newMockDispatch)
+ }
+
+ /**
+ * Set default reply headers on the interceptor for subsequent replies
+ */
+ defaultReplyHeaders (headers) {
+ if (typeof headers === 'undefined') {
+ throw new InvalidArgumentError('headers must be defined')
+ }
+
+ this[kDefaultHeaders] = headers
+ return this
+ }
+
+ /**
+ * Set default reply trailers on the interceptor for subsequent replies
+ */
+ defaultReplyTrailers (trailers) {
+ if (typeof trailers === 'undefined') {
+ throw new InvalidArgumentError('trailers must be defined')
+ }
+
+ this[kDefaultTrailers] = trailers
+ return this
+ }
+
+ /**
+ * Set reply content length header for replies on the interceptor
+ */
+ replyContentLength () {
+ this[kContentLength] = true
+ return this
+ }
+}
+
+module.exports.MockInterceptor = MockInterceptor
+module.exports.MockScope = MockScope
+
+
+/***/ }),
+
+/***/ 6193:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { promisify } = __nccwpck_require__(3837)
+const Pool = __nccwpck_require__(4634)
+const { buildMockDispatch } = __nccwpck_require__(9323)
+const {
+ kDispatches,
+ kMockAgent,
+ kClose,
+ kOriginalClose,
+ kOrigin,
+ kOriginalDispatch,
+ kConnected
+} = __nccwpck_require__(4347)
+const { MockInterceptor } = __nccwpck_require__(410)
+const Symbols = __nccwpck_require__(2785)
+const { InvalidArgumentError } = __nccwpck_require__(8045)
+
+/**
+ * MockPool provides an API that extends the Pool to influence the mockDispatches.
+ */
+class MockPool extends Pool {
+ constructor (origin, opts) {
+ super(origin, opts)
+
+ if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') {
+ throw new InvalidArgumentError('Argument opts.agent must implement Agent')
+ }
+
+ this[kMockAgent] = opts.agent
+ this[kOrigin] = origin
+ this[kDispatches] = []
+ this[kConnected] = 1
+ this[kOriginalDispatch] = this.dispatch
+ this[kOriginalClose] = this.close.bind(this)
+
+ this.dispatch = buildMockDispatch.call(this)
+ this.close = this[kClose]
+ }
+
+ get [Symbols.kConnected] () {
+ return this[kConnected]
+ }
+
+ /**
+ * Sets up the base interceptor for mocking replies from undici.
+ */
+ intercept (opts) {
+ return new MockInterceptor(opts, this[kDispatches])
+ }
+
+ async [kClose] () {
+ await promisify(this[kOriginalClose])()
+ this[kConnected] = 0
+ this[kMockAgent][Symbols.kClients].delete(this[kOrigin])
+ }
+}
+
+module.exports = MockPool
+
+
+/***/ }),
+
+/***/ 4347:
+/***/ ((module) => {
+
+"use strict";
+
+
+module.exports = {
+ kAgent: Symbol('agent'),
+ kOptions: Symbol('options'),
+ kFactory: Symbol('factory'),
+ kDispatches: Symbol('dispatches'),
+ kDispatchKey: Symbol('dispatch key'),
+ kDefaultHeaders: Symbol('default headers'),
+ kDefaultTrailers: Symbol('default trailers'),
+ kContentLength: Symbol('content length'),
+ kMockAgent: Symbol('mock agent'),
+ kMockAgentSet: Symbol('mock agent set'),
+ kMockAgentGet: Symbol('mock agent get'),
+ kMockDispatch: Symbol('mock dispatch'),
+ kClose: Symbol('close'),
+ kOriginalClose: Symbol('original agent close'),
+ kOrigin: Symbol('origin'),
+ kIsMockActive: Symbol('is mock active'),
+ kNetConnect: Symbol('net connect'),
+ kGetNetConnect: Symbol('get net connect'),
+ kConnected: Symbol('connected')
+}
+
+
+/***/ }),
+
+/***/ 9323:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { MockNotMatchedError } = __nccwpck_require__(888)
+const {
+ kDispatches,
+ kMockAgent,
+ kOriginalDispatch,
+ kOrigin,
+ kGetNetConnect
+} = __nccwpck_require__(4347)
+const { buildURL, nop } = __nccwpck_require__(3983)
+const { STATUS_CODES } = __nccwpck_require__(3685)
+const {
+ types: {
+ isPromise
+ }
+} = __nccwpck_require__(3837)
+
+function matchValue (match, value) {
+ if (typeof match === 'string') {
+ return match === value
+ }
+ if (match instanceof RegExp) {
+ return match.test(value)
+ }
+ if (typeof match === 'function') {
+ return match(value) === true
+ }
+ return false
+}
+
+function lowerCaseEntries (headers) {
+ return Object.fromEntries(
+ Object.entries(headers).map(([headerName, headerValue]) => {
+ return [headerName.toLocaleLowerCase(), headerValue]
+ })
+ )
+}
+
+/**
+ * @param {import('../../index').Headers|string[]|Record} headers
+ * @param {string} key
+ */
+function getHeaderByName (headers, key) {
+ if (Array.isArray(headers)) {
+ for (let i = 0; i < headers.length; i += 2) {
+ if (headers[i].toLocaleLowerCase() === key.toLocaleLowerCase()) {
+ return headers[i + 1]
+ }
+ }
+
+ return undefined
+ } else if (typeof headers.get === 'function') {
+ return headers.get(key)
+ } else {
+ return lowerCaseEntries(headers)[key.toLocaleLowerCase()]
+ }
+}
+
+/** @param {string[]} headers */
+function buildHeadersFromArray (headers) { // fetch HeadersList
+ const clone = headers.slice()
+ const entries = []
+ for (let index = 0; index < clone.length; index += 2) {
+ entries.push([clone[index], clone[index + 1]])
+ }
+ return Object.fromEntries(entries)
+}
+
+function matchHeaders (mockDispatch, headers) {
+ if (typeof mockDispatch.headers === 'function') {
+ if (Array.isArray(headers)) { // fetch HeadersList
+ headers = buildHeadersFromArray(headers)
+ }
+ return mockDispatch.headers(headers ? lowerCaseEntries(headers) : {})
+ }
+ if (typeof mockDispatch.headers === 'undefined') {
+ return true
+ }
+ if (typeof headers !== 'object' || typeof mockDispatch.headers !== 'object') {
+ return false
+ }
+
+ for (const [matchHeaderName, matchHeaderValue] of Object.entries(mockDispatch.headers)) {
+ const headerValue = getHeaderByName(headers, matchHeaderName)
+
+ if (!matchValue(matchHeaderValue, headerValue)) {
+ return false
+ }
+ }
+ return true
+}
+
+function safeUrl (path) {
+ if (typeof path !== 'string') {
+ return path
+ }
+
+ const pathSegments = path.split('?')
+
+ if (pathSegments.length !== 2) {
+ return path
+ }
+
+ const qp = new URLSearchParams(pathSegments.pop())
+ qp.sort()
+ return [...pathSegments, qp.toString()].join('?')
+}
+
+function matchKey (mockDispatch, { path, method, body, headers }) {
+ const pathMatch = matchValue(mockDispatch.path, path)
+ const methodMatch = matchValue(mockDispatch.method, method)
+ const bodyMatch = typeof mockDispatch.body !== 'undefined' ? matchValue(mockDispatch.body, body) : true
+ const headersMatch = matchHeaders(mockDispatch, headers)
+ return pathMatch && methodMatch && bodyMatch && headersMatch
+}
+
+function getResponseData (data) {
+ if (Buffer.isBuffer(data)) {
+ return data
+ } else if (typeof data === 'object') {
+ return JSON.stringify(data)
+ } else {
+ return data.toString()
+ }
+}
+
+function getMockDispatch (mockDispatches, key) {
+ const basePath = key.query ? buildURL(key.path, key.query) : key.path
+ const resolvedPath = typeof basePath === 'string' ? safeUrl(basePath) : basePath
+
+ // Match path
+ let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path }) => matchValue(safeUrl(path), resolvedPath))
+ if (matchedMockDispatches.length === 0) {
+ throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`)
+ }
+
+ // Match method
+ matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method))
+ if (matchedMockDispatches.length === 0) {
+ throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}'`)
+ }
+
+ // Match body
+ matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true)
+ if (matchedMockDispatches.length === 0) {
+ throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}'`)
+ }
+
+ // Match headers
+ matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers))
+ if (matchedMockDispatches.length === 0) {
+ throw new MockNotMatchedError(`Mock dispatch not matched for headers '${typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers}'`)
+ }
+
+ return matchedMockDispatches[0]
+}
+
+function addMockDispatch (mockDispatches, key, data) {
+ const baseData = { timesInvoked: 0, times: 1, persist: false, consumed: false }
+ const replyData = typeof data === 'function' ? { callback: data } : { ...data }
+ const newMockDispatch = { ...baseData, ...key, pending: true, data: { error: null, ...replyData } }
+ mockDispatches.push(newMockDispatch)
+ return newMockDispatch
+}
+
+function deleteMockDispatch (mockDispatches, key) {
+ const index = mockDispatches.findIndex(dispatch => {
+ if (!dispatch.consumed) {
+ return false
+ }
+ return matchKey(dispatch, key)
+ })
+ if (index !== -1) {
+ mockDispatches.splice(index, 1)
+ }
+}
+
+function buildKey (opts) {
+ const { path, method, body, headers, query } = opts
+ return {
+ path,
+ method,
+ body,
+ headers,
+ query
+ }
+}
+
+function generateKeyValues (data) {
+ return Object.entries(data).reduce((keyValuePairs, [key, value]) => [
+ ...keyValuePairs,
+ Buffer.from(`${key}`),
+ Array.isArray(value) ? value.map(x => Buffer.from(`${x}`)) : Buffer.from(`${value}`)
+ ], [])
+}
+
+/**
+ * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Status
+ * @param {number} statusCode
+ */
+function getStatusText (statusCode) {
+ return STATUS_CODES[statusCode] || 'unknown'
+}
+
+async function getResponse (body) {
+ const buffers = []
+ for await (const data of body) {
+ buffers.push(data)
+ }
+ return Buffer.concat(buffers).toString('utf8')
+}
+
+/**
+ * Mock dispatch function used to simulate undici dispatches
+ */
+function mockDispatch (opts, handler) {
+ // Get mock dispatch from built key
+ const key = buildKey(opts)
+ const mockDispatch = getMockDispatch(this[kDispatches], key)
+
+ mockDispatch.timesInvoked++
+
+ // Here's where we resolve a callback if a callback is present for the dispatch data.
+ if (mockDispatch.data.callback) {
+ mockDispatch.data = { ...mockDispatch.data, ...mockDispatch.data.callback(opts) }
+ }
+
+ // Parse mockDispatch data
+ const { data: { statusCode, data, headers, trailers, error }, delay, persist } = mockDispatch
+ const { timesInvoked, times } = mockDispatch
+
+ // If it's used up and not persistent, mark as consumed
+ mockDispatch.consumed = !persist && timesInvoked >= times
+ mockDispatch.pending = timesInvoked < times
+
+ // If specified, trigger dispatch error
+ if (error !== null) {
+ deleteMockDispatch(this[kDispatches], key)
+ handler.onError(error)
+ return true
+ }
+
+ // Handle the request with a delay if necessary
+ if (typeof delay === 'number' && delay > 0) {
+ setTimeout(() => {
+ handleReply(this[kDispatches])
+ }, delay)
+ } else {
+ handleReply(this[kDispatches])
+ }
+
+ function handleReply (mockDispatches, _data = data) {
+ // fetch's HeadersList is a 1D string array
+ const optsHeaders = Array.isArray(opts.headers)
+ ? buildHeadersFromArray(opts.headers)
+ : opts.headers
+ const body = typeof _data === 'function'
+ ? _data({ ...opts, headers: optsHeaders })
+ : _data
+
+ // util.types.isPromise is likely needed for jest.
+ if (isPromise(body)) {
+ // If handleReply is asynchronous, throwing an error
+ // in the callback will reject the promise, rather than
+ // synchronously throw the error, which breaks some tests.
+ // Rather, we wait for the callback to resolve if it is a
+ // promise, and then re-run handleReply with the new body.
+ body.then((newData) => handleReply(mockDispatches, newData))
+ return
+ }
+
+ const responseData = getResponseData(body)
+ const responseHeaders = generateKeyValues(headers)
+ const responseTrailers = generateKeyValues(trailers)
+
+ handler.abort = nop
+ handler.onHeaders(statusCode, responseHeaders, resume, getStatusText(statusCode))
+ handler.onData(Buffer.from(responseData))
+ handler.onComplete(responseTrailers)
+ deleteMockDispatch(mockDispatches, key)
+ }
+
+ function resume () {}
+
+ return true
+}
+
+function buildMockDispatch () {
+ const agent = this[kMockAgent]
+ const origin = this[kOrigin]
+ const originalDispatch = this[kOriginalDispatch]
+
+ return function dispatch (opts, handler) {
+ if (agent.isMockActive) {
+ try {
+ mockDispatch.call(this, opts, handler)
+ } catch (error) {
+ if (error instanceof MockNotMatchedError) {
+ const netConnect = agent[kGetNetConnect]()
+ if (netConnect === false) {
+ throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`)
+ }
+ if (checkNetConnect(netConnect, origin)) {
+ originalDispatch.call(this, opts, handler)
+ } else {
+ throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`)
+ }
+ } else {
+ throw error
+ }
+ }
+ } else {
+ originalDispatch.call(this, opts, handler)
+ }
+ }
+}
+
+function checkNetConnect (netConnect, origin) {
+ const url = new URL(origin)
+ if (netConnect === true) {
+ return true
+ } else if (Array.isArray(netConnect) && netConnect.some((matcher) => matchValue(matcher, url.host))) {
+ return true
+ }
+ return false
+}
+
+function buildMockOptions (opts) {
+ if (opts) {
+ const { agent, ...mockOptions } = opts
+ return mockOptions
+ }
+}
+
+module.exports = {
+ getResponseData,
+ getMockDispatch,
+ addMockDispatch,
+ deleteMockDispatch,
+ buildKey,
+ generateKeyValues,
+ matchValue,
+ getResponse,
+ getStatusText,
+ mockDispatch,
+ buildMockDispatch,
+ checkNetConnect,
+ buildMockOptions,
+ getHeaderByName
+}
+
+
+/***/ }),
+
+/***/ 6823:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { Transform } = __nccwpck_require__(2781)
+const { Console } = __nccwpck_require__(6206)
+
+/**
+ * Gets the output of `console.table(…)` as a string.
+ */
+module.exports = class PendingInterceptorsFormatter {
+ constructor ({ disableColors } = {}) {
+ this.transform = new Transform({
+ transform (chunk, _enc, cb) {
+ cb(null, chunk)
+ }
+ })
+
+ this.logger = new Console({
+ stdout: this.transform,
+ inspectOptions: {
+ colors: !disableColors && !process.env.CI
+ }
+ })
+ }
+
+ format (pendingInterceptors) {
+ const withPrettyHeaders = pendingInterceptors.map(
+ ({ method, path, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
+ Method: method,
+ Origin: origin,
+ Path: path,
+ 'Status code': statusCode,
+ Persistent: persist ? '✅' : '❌',
+ Invocations: timesInvoked,
+ Remaining: persist ? Infinity : times - timesInvoked
+ }))
+
+ this.logger.table(withPrettyHeaders)
+ return this.transform.read().toString()
+ }
+}
+
+
+/***/ }),
+
+/***/ 8891:
+/***/ ((module) => {
+
+"use strict";
+
+
+const singulars = {
+ pronoun: 'it',
+ is: 'is',
+ was: 'was',
+ this: 'this'
+}
+
+const plurals = {
+ pronoun: 'they',
+ is: 'are',
+ was: 'were',
+ this: 'these'
+}
+
+module.exports = class Pluralizer {
+ constructor (singular, plural) {
+ this.singular = singular
+ this.plural = plural
+ }
+
+ pluralize (count) {
+ const one = count === 1
+ const keys = one ? singulars : plurals
+ const noun = one ? this.singular : this.plural
+ return { ...keys, count, noun }
+ }
+}
+
+
+/***/ }),
+
+/***/ 8266:
+/***/ ((module) => {
+
+"use strict";
+/* eslint-disable */
+
+
+
+// Extracted from node/lib/internal/fixed_queue.js
+
+// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two.
+const kSize = 2048;
+const kMask = kSize - 1;
+
+// The FixedQueue is implemented as a singly-linked list of fixed-size
+// circular buffers. It looks something like this:
+//
+// head tail
+// | |
+// v v
+// +-----------+ <-----\ +-----------+ <------\ +-----------+
+// | [null] | \----- | next | \------- | next |
+// +-----------+ +-----------+ +-----------+
+// | item | <-- bottom | item | <-- bottom | [empty] |
+// | item | | item | | [empty] |
+// | item | | item | | [empty] |
+// | item | | item | | [empty] |
+// | item | | item | bottom --> | item |
+// | item | | item | | item |
+// | ... | | ... | | ... |
+// | item | | item | | item |
+// | item | | item | | item |
+// | [empty] | <-- top | item | | item |
+// | [empty] | | item | | item |
+// | [empty] | | [empty] | <-- top top --> | [empty] |
+// +-----------+ +-----------+ +-----------+
+//
+// Or, if there is only one circular buffer, it looks something
+// like either of these:
+//
+// head tail head tail
+// | | | |
+// v v v v
+// +-----------+ +-----------+
+// | [null] | | [null] |
+// +-----------+ +-----------+
+// | [empty] | | item |
+// | [empty] | | item |
+// | item | <-- bottom top --> | [empty] |
+// | item | | [empty] |
+// | [empty] | <-- top bottom --> | item |
+// | [empty] | | item |
+// +-----------+ +-----------+
+//
+// Adding a value means moving `top` forward by one, removing means
+// moving `bottom` forward by one. After reaching the end, the queue
+// wraps around.
+//
+// When `top === bottom` the current queue is empty and when
+// `top + 1 === bottom` it's full. This wastes a single space of storage
+// but allows much quicker checks.
+
+class FixedCircularBuffer {
+ constructor() {
+ this.bottom = 0;
+ this.top = 0;
+ this.list = new Array(kSize);
+ this.next = null;
+ }
+
+ isEmpty() {
+ return this.top === this.bottom;
+ }
+
+ isFull() {
+ return ((this.top + 1) & kMask) === this.bottom;
+ }
+
+ push(data) {
+ this.list[this.top] = data;
+ this.top = (this.top + 1) & kMask;
+ }
+
+ shift() {
+ const nextItem = this.list[this.bottom];
+ if (nextItem === undefined)
+ return null;
+ this.list[this.bottom] = undefined;
+ this.bottom = (this.bottom + 1) & kMask;
+ return nextItem;
+ }
+}
+
+module.exports = class FixedQueue {
+ constructor() {
+ this.head = this.tail = new FixedCircularBuffer();
+ }
+
+ isEmpty() {
+ return this.head.isEmpty();
+ }
+
+ push(data) {
+ if (this.head.isFull()) {
+ // Head is full: Creates a new queue, sets the old queue's `.next` to it,
+ // and sets it as the new main queue.
+ this.head = this.head.next = new FixedCircularBuffer();
+ }
+ this.head.push(data);
+ }
+
+ shift() {
+ const tail = this.tail;
+ const next = tail.shift();
+ if (tail.isEmpty() && tail.next !== null) {
+ // If there is another queue, it forms the new tail.
+ this.tail = tail.next;
+ }
+ return next;
+ }
+};
+
+
+/***/ }),
+
+/***/ 3198:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const DispatcherBase = __nccwpck_require__(4839)
+const FixedQueue = __nccwpck_require__(8266)
+const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = __nccwpck_require__(2785)
+const PoolStats = __nccwpck_require__(9689)
+
+const kClients = Symbol('clients')
+const kNeedDrain = Symbol('needDrain')
+const kQueue = Symbol('queue')
+const kClosedResolve = Symbol('closed resolve')
+const kOnDrain = Symbol('onDrain')
+const kOnConnect = Symbol('onConnect')
+const kOnDisconnect = Symbol('onDisconnect')
+const kOnConnectionError = Symbol('onConnectionError')
+const kGetDispatcher = Symbol('get dispatcher')
+const kAddClient = Symbol('add client')
+const kRemoveClient = Symbol('remove client')
+const kStats = Symbol('stats')
+
+class PoolBase extends DispatcherBase {
+ constructor () {
+ super()
+
+ this[kQueue] = new FixedQueue()
+ this[kClients] = []
+ this[kQueued] = 0
+
+ const pool = this
+
+ this[kOnDrain] = function onDrain (origin, targets) {
+ const queue = pool[kQueue]
+
+ let needDrain = false
+
+ while (!needDrain) {
+ const item = queue.shift()
+ if (!item) {
+ break
+ }
+ pool[kQueued]--
+ needDrain = !this.dispatch(item.opts, item.handler)
+ }
+
+ this[kNeedDrain] = needDrain
+
+ if (!this[kNeedDrain] && pool[kNeedDrain]) {
+ pool[kNeedDrain] = false
+ pool.emit('drain', origin, [pool, ...targets])
+ }
+
+ if (pool[kClosedResolve] && queue.isEmpty()) {
+ Promise
+ .all(pool[kClients].map(c => c.close()))
+ .then(pool[kClosedResolve])
+ }
+ }
+
+ this[kOnConnect] = (origin, targets) => {
+ pool.emit('connect', origin, [pool, ...targets])
+ }
+
+ this[kOnDisconnect] = (origin, targets, err) => {
+ pool.emit('disconnect', origin, [pool, ...targets], err)
+ }
+
+ this[kOnConnectionError] = (origin, targets, err) => {
+ pool.emit('connectionError', origin, [pool, ...targets], err)
+ }
+
+ this[kStats] = new PoolStats(this)
+ }
+
+ get [kBusy] () {
+ return this[kNeedDrain]
+ }
+
+ get [kConnected] () {
+ return this[kClients].filter(client => client[kConnected]).length
+ }
+
+ get [kFree] () {
+ return this[kClients].filter(client => client[kConnected] && !client[kNeedDrain]).length
+ }
+
+ get [kPending] () {
+ let ret = this[kQueued]
+ for (const { [kPending]: pending } of this[kClients]) {
+ ret += pending
+ }
+ return ret
+ }
+
+ get [kRunning] () {
+ let ret = 0
+ for (const { [kRunning]: running } of this[kClients]) {
+ ret += running
+ }
+ return ret
+ }
+
+ get [kSize] () {
+ let ret = this[kQueued]
+ for (const { [kSize]: size } of this[kClients]) {
+ ret += size
+ }
+ return ret
+ }
+
+ get stats () {
+ return this[kStats]
+ }
+
+ async [kClose] () {
+ if (this[kQueue].isEmpty()) {
+ return Promise.all(this[kClients].map(c => c.close()))
+ } else {
+ return new Promise((resolve) => {
+ this[kClosedResolve] = resolve
+ })
+ }
+ }
+
+ async [kDestroy] (err) {
+ while (true) {
+ const item = this[kQueue].shift()
+ if (!item) {
+ break
+ }
+ item.handler.onError(err)
+ }
+
+ return Promise.all(this[kClients].map(c => c.destroy(err)))
+ }
+
+ [kDispatch] (opts, handler) {
+ const dispatcher = this[kGetDispatcher]()
+
+ if (!dispatcher) {
+ this[kNeedDrain] = true
+ this[kQueue].push({ opts, handler })
+ this[kQueued]++
+ } else if (!dispatcher.dispatch(opts, handler)) {
+ dispatcher[kNeedDrain] = true
+ this[kNeedDrain] = !this[kGetDispatcher]()
+ }
+
+ return !this[kNeedDrain]
+ }
+
+ [kAddClient] (client) {
+ client
+ .on('drain', this[kOnDrain])
+ .on('connect', this[kOnConnect])
+ .on('disconnect', this[kOnDisconnect])
+ .on('connectionError', this[kOnConnectionError])
+
+ this[kClients].push(client)
+
+ if (this[kNeedDrain]) {
+ process.nextTick(() => {
+ if (this[kNeedDrain]) {
+ this[kOnDrain](client[kUrl], [this, client])
+ }
+ })
+ }
+
+ return this
+ }
+
+ [kRemoveClient] (client) {
+ client.close(() => {
+ const idx = this[kClients].indexOf(client)
+ if (idx !== -1) {
+ this[kClients].splice(idx, 1)
+ }
+ })
+
+ this[kNeedDrain] = this[kClients].some(dispatcher => (
+ !dispatcher[kNeedDrain] &&
+ dispatcher.closed !== true &&
+ dispatcher.destroyed !== true
+ ))
+ }
+}
+
+module.exports = {
+ PoolBase,
+ kClients,
+ kNeedDrain,
+ kAddClient,
+ kRemoveClient,
+ kGetDispatcher
+}
+
+
+/***/ }),
+
+/***/ 9689:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = __nccwpck_require__(2785)
+const kPool = Symbol('pool')
+
+class PoolStats {
+ constructor (pool) {
+ this[kPool] = pool
+ }
+
+ get connected () {
+ return this[kPool][kConnected]
+ }
+
+ get free () {
+ return this[kPool][kFree]
+ }
+
+ get pending () {
+ return this[kPool][kPending]
+ }
+
+ get queued () {
+ return this[kPool][kQueued]
+ }
+
+ get running () {
+ return this[kPool][kRunning]
+ }
+
+ get size () {
+ return this[kPool][kSize]
+ }
+}
+
+module.exports = PoolStats
+
+
+/***/ }),
+
+/***/ 4634:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const {
+ PoolBase,
+ kClients,
+ kNeedDrain,
+ kAddClient,
+ kGetDispatcher
+} = __nccwpck_require__(3198)
+const Client = __nccwpck_require__(3598)
+const {
+ InvalidArgumentError
+} = __nccwpck_require__(8045)
+const util = __nccwpck_require__(3983)
+const { kUrl, kInterceptors } = __nccwpck_require__(2785)
+const buildConnector = __nccwpck_require__(2067)
+
+const kOptions = Symbol('options')
+const kConnections = Symbol('connections')
+const kFactory = Symbol('factory')
+
+function defaultFactory (origin, opts) {
+ return new Client(origin, opts)
+}
+
+class Pool extends PoolBase {
+ constructor (origin, {
+ connections,
+ factory = defaultFactory,
+ connect,
+ connectTimeout,
+ tls,
+ maxCachedSessions,
+ socketPath,
+ autoSelectFamily,
+ autoSelectFamilyAttemptTimeout,
+ allowH2,
+ ...options
+ } = {}) {
+ super()
+
+ if (connections != null && (!Number.isFinite(connections) || connections < 0)) {
+ throw new InvalidArgumentError('invalid connections')
+ }
+
+ if (typeof factory !== 'function') {
+ throw new InvalidArgumentError('factory must be a function.')
+ }
+
+ if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
+ throw new InvalidArgumentError('connect must be a function or an object')
+ }
+
+ if (typeof connect !== 'function') {
+ connect = buildConnector({
+ ...tls,
+ maxCachedSessions,
+ allowH2,
+ socketPath,
+ timeout: connectTimeout,
+ ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
+ ...connect
+ })
+ }
+
+ this[kInterceptors] = options.interceptors && options.interceptors.Pool && Array.isArray(options.interceptors.Pool)
+ ? options.interceptors.Pool
+ : []
+ this[kConnections] = connections || null
+ this[kUrl] = util.parseOrigin(origin)
+ this[kOptions] = { ...util.deepClone(options), connect, allowH2 }
+ this[kOptions].interceptors = options.interceptors
+ ? { ...options.interceptors }
+ : undefined
+ this[kFactory] = factory
+ }
+
+ [kGetDispatcher] () {
+ let dispatcher = this[kClients].find(dispatcher => !dispatcher[kNeedDrain])
+
+ if (dispatcher) {
+ return dispatcher
+ }
+
+ if (!this[kConnections] || this[kClients].length < this[kConnections]) {
+ dispatcher = this[kFactory](this[kUrl], this[kOptions])
+ this[kAddClient](dispatcher)
+ }
+
+ return dispatcher
+ }
+}
+
+module.exports = Pool
+
+
+/***/ }),
+
+/***/ 7858:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { kProxy, kClose, kDestroy, kInterceptors } = __nccwpck_require__(2785)
+const { URL } = __nccwpck_require__(7310)
+const Agent = __nccwpck_require__(7890)
+const Pool = __nccwpck_require__(4634)
+const DispatcherBase = __nccwpck_require__(4839)
+const { InvalidArgumentError, RequestAbortedError } = __nccwpck_require__(8045)
+const buildConnector = __nccwpck_require__(2067)
+
+const kAgent = Symbol('proxy agent')
+const kClient = Symbol('proxy client')
+const kProxyHeaders = Symbol('proxy headers')
+const kRequestTls = Symbol('request tls settings')
+const kProxyTls = Symbol('proxy tls settings')
+const kConnectEndpoint = Symbol('connect endpoint function')
+
+function defaultProtocolPort (protocol) {
+ return protocol === 'https:' ? 443 : 80
+}
+
+function buildProxyOptions (opts) {
+ if (typeof opts === 'string') {
+ opts = { uri: opts }
+ }
+
+ if (!opts || !opts.uri) {
+ throw new InvalidArgumentError('Proxy opts.uri is mandatory')
+ }
+
+ return {
+ uri: opts.uri,
+ protocol: opts.protocol || 'https'
+ }
+}
+
+function defaultFactory (origin, opts) {
+ return new Pool(origin, opts)
+}
+
+class ProxyAgent extends DispatcherBase {
+ constructor (opts) {
+ super(opts)
+ this[kProxy] = buildProxyOptions(opts)
+ this[kAgent] = new Agent(opts)
+ this[kInterceptors] = opts.interceptors && opts.interceptors.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent)
+ ? opts.interceptors.ProxyAgent
+ : []
+
+ if (typeof opts === 'string') {
+ opts = { uri: opts }
+ }
+
+ if (!opts || !opts.uri) {
+ throw new InvalidArgumentError('Proxy opts.uri is mandatory')
+ }
+
+ const { clientFactory = defaultFactory } = opts
+
+ if (typeof clientFactory !== 'function') {
+ throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.')
+ }
+
+ this[kRequestTls] = opts.requestTls
+ this[kProxyTls] = opts.proxyTls
+ this[kProxyHeaders] = opts.headers || {}
+
+ const resolvedUrl = new URL(opts.uri)
+ const { origin, port, host, username, password } = resolvedUrl
+
+ if (opts.auth && opts.token) {
+ throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token')
+ } else if (opts.auth) {
+ /* @deprecated in favour of opts.token */
+ this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}`
+ } else if (opts.token) {
+ this[kProxyHeaders]['proxy-authorization'] = opts.token
+ } else if (username && password) {
+ this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}`
+ }
+
+ const connect = buildConnector({ ...opts.proxyTls })
+ this[kConnectEndpoint] = buildConnector({ ...opts.requestTls })
+ this[kClient] = clientFactory(resolvedUrl, { connect })
+ this[kAgent] = new Agent({
+ ...opts,
+ connect: async (opts, callback) => {
+ let requestedHost = opts.host
+ if (!opts.port) {
+ requestedHost += `:${defaultProtocolPort(opts.protocol)}`
+ }
+ try {
+ const { socket, statusCode } = await this[kClient].connect({
+ origin,
+ port,
+ path: requestedHost,
+ signal: opts.signal,
+ headers: {
+ ...this[kProxyHeaders],
+ host
+ }
+ })
+ if (statusCode !== 200) {
+ socket.on('error', () => {}).destroy()
+ callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`))
+ }
+ if (opts.protocol !== 'https:') {
+ callback(null, socket)
+ return
+ }
+ let servername
+ if (this[kRequestTls]) {
+ servername = this[kRequestTls].servername
+ } else {
+ servername = opts.servername
+ }
+ this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback)
+ } catch (err) {
+ callback(err)
+ }
+ }
+ })
+ }
+
+ dispatch (opts, handler) {
+ const { host } = new URL(opts.origin)
+ const headers = buildHeaders(opts.headers)
+ throwIfProxyAuthIsSent(headers)
+ return this[kAgent].dispatch(
+ {
+ ...opts,
+ headers: {
+ ...headers,
+ host
+ }
+ },
+ handler
+ )
+ }
+
+ async [kClose] () {
+ await this[kAgent].close()
+ await this[kClient].close()
+ }
+
+ async [kDestroy] () {
+ await this[kAgent].destroy()
+ await this[kClient].destroy()
+ }
+}
+
+/**
+ * @param {string[] | Record} headers
+ * @returns {Record}
+ */
+function buildHeaders (headers) {
+ // When using undici.fetch, the headers list is stored
+ // as an array.
+ if (Array.isArray(headers)) {
+ /** @type {Record} */
+ const headersPair = {}
+
+ for (let i = 0; i < headers.length; i += 2) {
+ headersPair[headers[i]] = headers[i + 1]
+ }
+
+ return headersPair
+ }
+
+ return headers
+}
+
+/**
+ * @param {Record} headers
+ *
+ * Previous versions of ProxyAgent suggests the Proxy-Authorization in request headers
+ * Nevertheless, it was changed and to avoid a security vulnerability by end users
+ * this check was created.
+ * It should be removed in the next major version for performance reasons
+ */
+function throwIfProxyAuthIsSent (headers) {
+ const existProxyAuth = headers && Object.keys(headers)
+ .find((key) => key.toLowerCase() === 'proxy-authorization')
+ if (existProxyAuth) {
+ throw new InvalidArgumentError('Proxy-Authorization should be sent in ProxyAgent constructor')
+ }
+}
+
+module.exports = ProxyAgent
+
+
+/***/ }),
+
+/***/ 9459:
+/***/ ((module) => {
+
+"use strict";
+
+
+let fastNow = Date.now()
+let fastNowTimeout
+
+const fastTimers = []
+
+function onTimeout () {
+ fastNow = Date.now()
+
+ let len = fastTimers.length
+ let idx = 0
+ while (idx < len) {
+ const timer = fastTimers[idx]
+
+ if (timer.state === 0) {
+ timer.state = fastNow + timer.delay
+ } else if (timer.state > 0 && fastNow >= timer.state) {
+ timer.state = -1
+ timer.callback(timer.opaque)
+ }
+
+ if (timer.state === -1) {
+ timer.state = -2
+ if (idx !== len - 1) {
+ fastTimers[idx] = fastTimers.pop()
+ } else {
+ fastTimers.pop()
+ }
+ len -= 1
+ } else {
+ idx += 1
+ }
+ }
+
+ if (fastTimers.length > 0) {
+ refreshTimeout()
+ }
+}
+
+function refreshTimeout () {
+ if (fastNowTimeout && fastNowTimeout.refresh) {
+ fastNowTimeout.refresh()
+ } else {
+ clearTimeout(fastNowTimeout)
+ fastNowTimeout = setTimeout(onTimeout, 1e3)
+ if (fastNowTimeout.unref) {
+ fastNowTimeout.unref()
+ }
+ }
+}
+
+class Timeout {
+ constructor (callback, delay, opaque) {
+ this.callback = callback
+ this.delay = delay
+ this.opaque = opaque
+
+ // -2 not in timer list
+ // -1 in timer list but inactive
+ // 0 in timer list waiting for time
+ // > 0 in timer list waiting for time to expire
+ this.state = -2
+
+ this.refresh()
+ }
+
+ refresh () {
+ if (this.state === -2) {
+ fastTimers.push(this)
+ if (!fastNowTimeout || fastTimers.length === 1) {
+ refreshTimeout()
+ }
+ }
+
+ this.state = 0
+ }
+
+ clear () {
+ this.state = -1
+ }
+}
+
+module.exports = {
+ setTimeout (callback, delay, opaque) {
+ return delay < 1e3
+ ? setTimeout(callback, delay, opaque)
+ : new Timeout(callback, delay, opaque)
+ },
+ clearTimeout (timeout) {
+ if (timeout instanceof Timeout) {
+ timeout.clear()
+ } else {
+ clearTimeout(timeout)
+ }
+ }
+}
+
+
+/***/ }),
+
+/***/ 5354:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const diagnosticsChannel = __nccwpck_require__(7643)
+const { uid, states } = __nccwpck_require__(9188)
+const {
+ kReadyState,
+ kSentClose,
+ kByteParser,
+ kReceivedClose
+} = __nccwpck_require__(7578)
+const { fireEvent, failWebsocketConnection } = __nccwpck_require__(5515)
+const { CloseEvent } = __nccwpck_require__(2611)
+const { makeRequest } = __nccwpck_require__(8359)
+const { fetching } = __nccwpck_require__(4881)
+const { Headers } = __nccwpck_require__(554)
+const { getGlobalDispatcher } = __nccwpck_require__(1892)
+const { kHeadersList } = __nccwpck_require__(2785)
+
+const channels = {}
+channels.open = diagnosticsChannel.channel('undici:websocket:open')
+channels.close = diagnosticsChannel.channel('undici:websocket:close')
+channels.socketError = diagnosticsChannel.channel('undici:websocket:socket_error')
+
+/** @type {import('crypto')} */
+let crypto
+try {
+ crypto = __nccwpck_require__(6113)
+} catch {
+
+}
+
+/**
+ * @see https://websockets.spec.whatwg.org/#concept-websocket-establish
+ * @param {URL} url
+ * @param {string|string[]} protocols
+ * @param {import('./websocket').WebSocket} ws
+ * @param {(response: any) => void} onEstablish
+ * @param {Partial} options
+ */
+function establishWebSocketConnection (url, protocols, ws, onEstablish, options) {
+ // 1. Let requestURL be a copy of url, with its scheme set to "http", if url’s
+ // scheme is "ws", and to "https" otherwise.
+ const requestURL = url
+
+ requestURL.protocol = url.protocol === 'ws:' ? 'http:' : 'https:'
+
+ // 2. Let request be a new request, whose URL is requestURL, client is client,
+ // service-workers mode is "none", referrer is "no-referrer", mode is
+ // "websocket", credentials mode is "include", cache mode is "no-store" ,
+ // and redirect mode is "error".
+ const request = makeRequest({
+ urlList: [requestURL],
+ serviceWorkers: 'none',
+ referrer: 'no-referrer',
+ mode: 'websocket',
+ credentials: 'include',
+ cache: 'no-store',
+ redirect: 'error'
+ })
+
+ // Note: undici extension, allow setting custom headers.
+ if (options.headers) {
+ const headersList = new Headers(options.headers)[kHeadersList]
+
+ request.headersList = headersList
+ }
+
+ // 3. Append (`Upgrade`, `websocket`) to request’s header list.
+ // 4. Append (`Connection`, `Upgrade`) to request’s header list.
+ // Note: both of these are handled by undici currently.
+ // https://github.com/nodejs/undici/blob/68c269c4144c446f3f1220951338daef4a6b5ec4/lib/client.js#L1397
+
+ // 5. Let keyValue be a nonce consisting of a randomly selected
+ // 16-byte value that has been forgiving-base64-encoded and
+ // isomorphic encoded.
+ const keyValue = crypto.randomBytes(16).toString('base64')
+
+ // 6. Append (`Sec-WebSocket-Key`, keyValue) to request’s
+ // header list.
+ request.headersList.append('sec-websocket-key', keyValue)
+
+ // 7. Append (`Sec-WebSocket-Version`, `13`) to request’s
+ // header list.
+ request.headersList.append('sec-websocket-version', '13')
+
+ // 8. For each protocol in protocols, combine
+ // (`Sec-WebSocket-Protocol`, protocol) in request’s header
+ // list.
+ for (const protocol of protocols) {
+ request.headersList.append('sec-websocket-protocol', protocol)
+ }
+
+ // 9. Let permessageDeflate be a user-agent defined
+ // "permessage-deflate" extension header value.
+ // https://github.com/mozilla/gecko-dev/blob/ce78234f5e653a5d3916813ff990f053510227bc/netwerk/protocol/websocket/WebSocketChannel.cpp#L2673
+ // TODO: enable once permessage-deflate is supported
+ const permessageDeflate = '' // 'permessage-deflate; 15'
+
+ // 10. Append (`Sec-WebSocket-Extensions`, permessageDeflate) to
+ // request’s header list.
+ // request.headersList.append('sec-websocket-extensions', permessageDeflate)
+
+ // 11. Fetch request with useParallelQueue set to true, and
+ // processResponse given response being these steps:
+ const controller = fetching({
+ request,
+ useParallelQueue: true,
+ dispatcher: options.dispatcher ?? getGlobalDispatcher(),
+ processResponse (response) {
+ // 1. If response is a network error or its status is not 101,
+ // fail the WebSocket connection.
+ if (response.type === 'error' || response.status !== 101) {
+ failWebsocketConnection(ws, 'Received network error or non-101 status code.')
+ return
+ }
+
+ // 2. If protocols is not the empty list and extracting header
+ // list values given `Sec-WebSocket-Protocol` and response’s
+ // header list results in null, failure, or the empty byte
+ // sequence, then fail the WebSocket connection.
+ if (protocols.length !== 0 && !response.headersList.get('Sec-WebSocket-Protocol')) {
+ failWebsocketConnection(ws, 'Server did not respond with sent protocols.')
+ return
+ }
+
+ // 3. Follow the requirements stated step 2 to step 6, inclusive,
+ // of the last set of steps in section 4.1 of The WebSocket
+ // Protocol to validate response. This either results in fail
+ // the WebSocket connection or the WebSocket connection is
+ // established.
+
+ // 2. If the response lacks an |Upgrade| header field or the |Upgrade|
+ // header field contains a value that is not an ASCII case-
+ // insensitive match for the value "websocket", the client MUST
+ // _Fail the WebSocket Connection_.
+ if (response.headersList.get('Upgrade')?.toLowerCase() !== 'websocket') {
+ failWebsocketConnection(ws, 'Server did not set Upgrade header to "websocket".')
+ return
+ }
+
+ // 3. If the response lacks a |Connection| header field or the
+ // |Connection| header field doesn't contain a token that is an
+ // ASCII case-insensitive match for the value "Upgrade", the client
+ // MUST _Fail the WebSocket Connection_.
+ if (response.headersList.get('Connection')?.toLowerCase() !== 'upgrade') {
+ failWebsocketConnection(ws, 'Server did not set Connection header to "upgrade".')
+ return
+ }
+
+ // 4. If the response lacks a |Sec-WebSocket-Accept| header field or
+ // the |Sec-WebSocket-Accept| contains a value other than the
+ // base64-encoded SHA-1 of the concatenation of the |Sec-WebSocket-
+ // Key| (as a string, not base64-decoded) with the string "258EAFA5-
+ // E914-47DA-95CA-C5AB0DC85B11" but ignoring any leading and
+ // trailing whitespace, the client MUST _Fail the WebSocket
+ // Connection_.
+ const secWSAccept = response.headersList.get('Sec-WebSocket-Accept')
+ const digest = crypto.createHash('sha1').update(keyValue + uid).digest('base64')
+ if (secWSAccept !== digest) {
+ failWebsocketConnection(ws, 'Incorrect hash received in Sec-WebSocket-Accept header.')
+ return
+ }
+
+ // 5. If the response includes a |Sec-WebSocket-Extensions| header
+ // field and this header field indicates the use of an extension
+ // that was not present in the client's handshake (the server has
+ // indicated an extension not requested by the client), the client
+ // MUST _Fail the WebSocket Connection_. (The parsing of this
+ // header field to determine which extensions are requested is
+ // discussed in Section 9.1.)
+ const secExtension = response.headersList.get('Sec-WebSocket-Extensions')
+
+ if (secExtension !== null && secExtension !== permessageDeflate) {
+ failWebsocketConnection(ws, 'Received different permessage-deflate than the one set.')
+ return
+ }
+
+ // 6. If the response includes a |Sec-WebSocket-Protocol| header field
+ // and this header field indicates the use of a subprotocol that was
+ // not present in the client's handshake (the server has indicated a
+ // subprotocol not requested by the client), the client MUST _Fail
+ // the WebSocket Connection_.
+ const secProtocol = response.headersList.get('Sec-WebSocket-Protocol')
+
+ if (secProtocol !== null && secProtocol !== request.headersList.get('Sec-WebSocket-Protocol')) {
+ failWebsocketConnection(ws, 'Protocol was not set in the opening handshake.')
+ return
+ }
+
+ response.socket.on('data', onSocketData)
+ response.socket.on('close', onSocketClose)
+ response.socket.on('error', onSocketError)
+
+ if (channels.open.hasSubscribers) {
+ channels.open.publish({
+ address: response.socket.address(),
+ protocol: secProtocol,
+ extensions: secExtension
+ })
+ }
+
+ onEstablish(response)
+ }
+ })
+
+ return controller
+}
+
+/**
+ * @param {Buffer} chunk
+ */
+function onSocketData (chunk) {
+ if (!this.ws[kByteParser].write(chunk)) {
+ this.pause()
+ }
+}
+
+/**
+ * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol
+ * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.4
+ */
+function onSocketClose () {
+ const { ws } = this
+
+ // If the TCP connection was closed after the
+ // WebSocket closing handshake was completed, the WebSocket connection
+ // is said to have been closed _cleanly_.
+ const wasClean = ws[kSentClose] && ws[kReceivedClose]
+
+ let code = 1005
+ let reason = ''
+
+ const result = ws[kByteParser].closingInfo
+
+ if (result) {
+ code = result.code ?? 1005
+ reason = result.reason
+ } else if (!ws[kSentClose]) {
+ // If _The WebSocket
+ // Connection is Closed_ and no Close control frame was received by the
+ // endpoint (such as could occur if the underlying transport connection
+ // is lost), _The WebSocket Connection Close Code_ is considered to be
+ // 1006.
+ code = 1006
+ }
+
+ // 1. Change the ready state to CLOSED (3).
+ ws[kReadyState] = states.CLOSED
+
+ // 2. If the user agent was required to fail the WebSocket
+ // connection, or if the WebSocket connection was closed
+ // after being flagged as full, fire an event named error
+ // at the WebSocket object.
+ // TODO
+
+ // 3. Fire an event named close at the WebSocket object,
+ // using CloseEvent, with the wasClean attribute
+ // initialized to true if the connection closed cleanly
+ // and false otherwise, the code attribute initialized to
+ // the WebSocket connection close code, and the reason
+ // attribute initialized to the result of applying UTF-8
+ // decode without BOM to the WebSocket connection close
+ // reason.
+ fireEvent('close', ws, CloseEvent, {
+ wasClean, code, reason
+ })
+
+ if (channels.close.hasSubscribers) {
+ channels.close.publish({
+ websocket: ws,
+ code,
+ reason
+ })
+ }
+}
+
+function onSocketError (error) {
+ const { ws } = this
+
+ ws[kReadyState] = states.CLOSING
+
+ if (channels.socketError.hasSubscribers) {
+ channels.socketError.publish(error)
+ }
+
+ this.destroy()
+}
+
+module.exports = {
+ establishWebSocketConnection
+}
+
+
+/***/ }),
+
+/***/ 9188:
+/***/ ((module) => {
+
+"use strict";
+
+
+// This is a Globally Unique Identifier unique used
+// to validate that the endpoint accepts websocket
+// connections.
+// See https://www.rfc-editor.org/rfc/rfc6455.html#section-1.3
+const uid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
+
+/** @type {PropertyDescriptor} */
+const staticPropertyDescriptors = {
+ enumerable: true,
+ writable: false,
+ configurable: false
+}
+
+const states = {
+ CONNECTING: 0,
+ OPEN: 1,
+ CLOSING: 2,
+ CLOSED: 3
+}
+
+const opcodes = {
+ CONTINUATION: 0x0,
+ TEXT: 0x1,
+ BINARY: 0x2,
+ CLOSE: 0x8,
+ PING: 0x9,
+ PONG: 0xA
+}
+
+const maxUnsigned16Bit = 2 ** 16 - 1 // 65535
+
+const parserStates = {
+ INFO: 0,
+ PAYLOADLENGTH_16: 2,
+ PAYLOADLENGTH_64: 3,
+ READ_DATA: 4
+}
+
+const emptyBuffer = Buffer.allocUnsafe(0)
+
+module.exports = {
+ uid,
+ staticPropertyDescriptors,
+ states,
+ opcodes,
+ maxUnsigned16Bit,
+ parserStates,
+ emptyBuffer
+}
+
+
+/***/ }),
+
+/***/ 2611:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { webidl } = __nccwpck_require__(1744)
+const { kEnumerableProperty } = __nccwpck_require__(3983)
+const { MessagePort } = __nccwpck_require__(1267)
+
+/**
+ * @see https://html.spec.whatwg.org/multipage/comms.html#messageevent
+ */
+class MessageEvent extends Event {
+ #eventInit
+
+ constructor (type, eventInitDict = {}) {
+ webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent constructor' })
+
+ type = webidl.converters.DOMString(type)
+ eventInitDict = webidl.converters.MessageEventInit(eventInitDict)
+
+ super(type, eventInitDict)
+
+ this.#eventInit = eventInitDict
+ }
+
+ get data () {
+ webidl.brandCheck(this, MessageEvent)
+
+ return this.#eventInit.data
+ }
+
+ get origin () {
+ webidl.brandCheck(this, MessageEvent)
+
+ return this.#eventInit.origin
+ }
+
+ get lastEventId () {
+ webidl.brandCheck(this, MessageEvent)
+
+ return this.#eventInit.lastEventId
+ }
+
+ get source () {
+ webidl.brandCheck(this, MessageEvent)
+
+ return this.#eventInit.source
+ }
+
+ get ports () {
+ webidl.brandCheck(this, MessageEvent)
+
+ if (!Object.isFrozen(this.#eventInit.ports)) {
+ Object.freeze(this.#eventInit.ports)
+ }
+
+ return this.#eventInit.ports
+ }
+
+ initMessageEvent (
+ type,
+ bubbles = false,
+ cancelable = false,
+ data = null,
+ origin = '',
+ lastEventId = '',
+ source = null,
+ ports = []
+ ) {
+ webidl.brandCheck(this, MessageEvent)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent.initMessageEvent' })
+
+ return new MessageEvent(type, {
+ bubbles, cancelable, data, origin, lastEventId, source, ports
+ })
+ }
+}
+
+/**
+ * @see https://websockets.spec.whatwg.org/#the-closeevent-interface
+ */
+class CloseEvent extends Event {
+ #eventInit
+
+ constructor (type, eventInitDict = {}) {
+ webidl.argumentLengthCheck(arguments, 1, { header: 'CloseEvent constructor' })
+
+ type = webidl.converters.DOMString(type)
+ eventInitDict = webidl.converters.CloseEventInit(eventInitDict)
+
+ super(type, eventInitDict)
+
+ this.#eventInit = eventInitDict
+ }
+
+ get wasClean () {
+ webidl.brandCheck(this, CloseEvent)
+
+ return this.#eventInit.wasClean
+ }
+
+ get code () {
+ webidl.brandCheck(this, CloseEvent)
+
+ return this.#eventInit.code
+ }
+
+ get reason () {
+ webidl.brandCheck(this, CloseEvent)
+
+ return this.#eventInit.reason
+ }
+}
+
+// https://html.spec.whatwg.org/multipage/webappapis.html#the-errorevent-interface
+class ErrorEvent extends Event {
+ #eventInit
+
+ constructor (type, eventInitDict) {
+ webidl.argumentLengthCheck(arguments, 1, { header: 'ErrorEvent constructor' })
+
+ super(type, eventInitDict)
+
+ type = webidl.converters.DOMString(type)
+ eventInitDict = webidl.converters.ErrorEventInit(eventInitDict ?? {})
+
+ this.#eventInit = eventInitDict
+ }
+
+ get message () {
+ webidl.brandCheck(this, ErrorEvent)
+
+ return this.#eventInit.message
+ }
+
+ get filename () {
+ webidl.brandCheck(this, ErrorEvent)
+
+ return this.#eventInit.filename
+ }
+
+ get lineno () {
+ webidl.brandCheck(this, ErrorEvent)
+
+ return this.#eventInit.lineno
+ }
+
+ get colno () {
+ webidl.brandCheck(this, ErrorEvent)
+
+ return this.#eventInit.colno
+ }
+
+ get error () {
+ webidl.brandCheck(this, ErrorEvent)
+
+ return this.#eventInit.error
+ }
+}
+
+Object.defineProperties(MessageEvent.prototype, {
+ [Symbol.toStringTag]: {
+ value: 'MessageEvent',
+ configurable: true
+ },
+ data: kEnumerableProperty,
+ origin: kEnumerableProperty,
+ lastEventId: kEnumerableProperty,
+ source: kEnumerableProperty,
+ ports: kEnumerableProperty,
+ initMessageEvent: kEnumerableProperty
+})
+
+Object.defineProperties(CloseEvent.prototype, {
+ [Symbol.toStringTag]: {
+ value: 'CloseEvent',
+ configurable: true
+ },
+ reason: kEnumerableProperty,
+ code: kEnumerableProperty,
+ wasClean: kEnumerableProperty
+})
+
+Object.defineProperties(ErrorEvent.prototype, {
+ [Symbol.toStringTag]: {
+ value: 'ErrorEvent',
+ configurable: true
+ },
+ message: kEnumerableProperty,
+ filename: kEnumerableProperty,
+ lineno: kEnumerableProperty,
+ colno: kEnumerableProperty,
+ error: kEnumerableProperty
+})
+
+webidl.converters.MessagePort = webidl.interfaceConverter(MessagePort)
+
+webidl.converters['sequence'] = webidl.sequenceConverter(
+ webidl.converters.MessagePort
+)
+
+const eventInit = [
+ {
+ key: 'bubbles',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: 'cancelable',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: 'composed',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ }
+]
+
+webidl.converters.MessageEventInit = webidl.dictionaryConverter([
+ ...eventInit,
+ {
+ key: 'data',
+ converter: webidl.converters.any,
+ defaultValue: null
+ },
+ {
+ key: 'origin',
+ converter: webidl.converters.USVString,
+ defaultValue: ''
+ },
+ {
+ key: 'lastEventId',
+ converter: webidl.converters.DOMString,
+ defaultValue: ''
+ },
+ {
+ key: 'source',
+ // Node doesn't implement WindowProxy or ServiceWorker, so the only
+ // valid value for source is a MessagePort.
+ converter: webidl.nullableConverter(webidl.converters.MessagePort),
+ defaultValue: null
+ },
+ {
+ key: 'ports',
+ converter: webidl.converters['sequence'],
+ get defaultValue () {
+ return []
+ }
+ }
+])
+
+webidl.converters.CloseEventInit = webidl.dictionaryConverter([
+ ...eventInit,
+ {
+ key: 'wasClean',
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: 'code',
+ converter: webidl.converters['unsigned short'],
+ defaultValue: 0
+ },
+ {
+ key: 'reason',
+ converter: webidl.converters.USVString,
+ defaultValue: ''
+ }
+])
+
+webidl.converters.ErrorEventInit = webidl.dictionaryConverter([
+ ...eventInit,
+ {
+ key: 'message',
+ converter: webidl.converters.DOMString,
+ defaultValue: ''
+ },
+ {
+ key: 'filename',
+ converter: webidl.converters.USVString,
+ defaultValue: ''
+ },
+ {
+ key: 'lineno',
+ converter: webidl.converters['unsigned long'],
+ defaultValue: 0
+ },
+ {
+ key: 'colno',
+ converter: webidl.converters['unsigned long'],
+ defaultValue: 0
+ },
+ {
+ key: 'error',
+ converter: webidl.converters.any
+ }
+])
+
+module.exports = {
+ MessageEvent,
+ CloseEvent,
+ ErrorEvent
+}
+
+
+/***/ }),
+
+/***/ 5444:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { maxUnsigned16Bit } = __nccwpck_require__(9188)
+
+/** @type {import('crypto')} */
+let crypto
+try {
+ crypto = __nccwpck_require__(6113)
+} catch {
+
+}
+
+class WebsocketFrameSend {
+ /**
+ * @param {Buffer|undefined} data
+ */
+ constructor (data) {
+ this.frameData = data
+ this.maskKey = crypto.randomBytes(4)
+ }
+
+ createFrame (opcode) {
+ const bodyLength = this.frameData?.byteLength ?? 0
+
+ /** @type {number} */
+ let payloadLength = bodyLength // 0-125
+ let offset = 6
+
+ if (bodyLength > maxUnsigned16Bit) {
+ offset += 8 // payload length is next 8 bytes
+ payloadLength = 127
+ } else if (bodyLength > 125) {
+ offset += 2 // payload length is next 2 bytes
+ payloadLength = 126
+ }
+
+ const buffer = Buffer.allocUnsafe(bodyLength + offset)
+
+ // Clear first 2 bytes, everything else is overwritten
+ buffer[0] = buffer[1] = 0
+ buffer[0] |= 0x80 // FIN
+ buffer[0] = (buffer[0] & 0xF0) + opcode // opcode
+
+ /*! ws. MIT License. Einar Otto Stangvik */
+ buffer[offset - 4] = this.maskKey[0]
+ buffer[offset - 3] = this.maskKey[1]
+ buffer[offset - 2] = this.maskKey[2]
+ buffer[offset - 1] = this.maskKey[3]
+
+ buffer[1] = payloadLength
+
+ if (payloadLength === 126) {
+ buffer.writeUInt16BE(bodyLength, 2)
+ } else if (payloadLength === 127) {
+ // Clear extended payload length
+ buffer[2] = buffer[3] = 0
+ buffer.writeUIntBE(bodyLength, 4, 6)
+ }
+
+ buffer[1] |= 0x80 // MASK
+
+ // mask body
+ for (let i = 0; i < bodyLength; i++) {
+ buffer[offset + i] = this.frameData[i] ^ this.maskKey[i % 4]
+ }
+
+ return buffer
+ }
+}
+
+module.exports = {
+ WebsocketFrameSend
+}
+
+
+/***/ }),
+
+/***/ 1688:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { Writable } = __nccwpck_require__(2781)
+const diagnosticsChannel = __nccwpck_require__(7643)
+const { parserStates, opcodes, states, emptyBuffer } = __nccwpck_require__(9188)
+const { kReadyState, kSentClose, kResponse, kReceivedClose } = __nccwpck_require__(7578)
+const { isValidStatusCode, failWebsocketConnection, websocketMessageReceived } = __nccwpck_require__(5515)
+const { WebsocketFrameSend } = __nccwpck_require__(5444)
+
+// This code was influenced by ws released under the MIT license.
+// Copyright (c) 2011 Einar Otto Stangvik
+// Copyright (c) 2013 Arnout Kazemier and contributors
+// Copyright (c) 2016 Luigi Pinca and contributors
+
+const channels = {}
+channels.ping = diagnosticsChannel.channel('undici:websocket:ping')
+channels.pong = diagnosticsChannel.channel('undici:websocket:pong')
+
+class ByteParser extends Writable {
+ #buffers = []
+ #byteOffset = 0
+
+ #state = parserStates.INFO
+
+ #info = {}
+ #fragments = []
+
+ constructor (ws) {
+ super()
+
+ this.ws = ws
+ }
+
+ /**
+ * @param {Buffer} chunk
+ * @param {() => void} callback
+ */
+ _write (chunk, _, callback) {
+ this.#buffers.push(chunk)
+ this.#byteOffset += chunk.length
+
+ this.run(callback)
+ }
+
+ /**
+ * Runs whenever a new chunk is received.
+ * Callback is called whenever there are no more chunks buffering,
+ * or not enough bytes are buffered to parse.
+ */
+ run (callback) {
+ while (true) {
+ if (this.#state === parserStates.INFO) {
+ // If there aren't enough bytes to parse the payload length, etc.
+ if (this.#byteOffset < 2) {
+ return callback()
+ }
+
+ const buffer = this.consume(2)
+
+ this.#info.fin = (buffer[0] & 0x80) !== 0
+ this.#info.opcode = buffer[0] & 0x0F
+
+ // If we receive a fragmented message, we use the type of the first
+ // frame to parse the full message as binary/text, when it's terminated
+ this.#info.originalOpcode ??= this.#info.opcode
+
+ this.#info.fragmented = !this.#info.fin && this.#info.opcode !== opcodes.CONTINUATION
+
+ if (this.#info.fragmented && this.#info.opcode !== opcodes.BINARY && this.#info.opcode !== opcodes.TEXT) {
+ // Only text and binary frames can be fragmented
+ failWebsocketConnection(this.ws, 'Invalid frame type was fragmented.')
+ return
+ }
+
+ const payloadLength = buffer[1] & 0x7F
+
+ if (payloadLength <= 125) {
+ this.#info.payloadLength = payloadLength
+ this.#state = parserStates.READ_DATA
+ } else if (payloadLength === 126) {
+ this.#state = parserStates.PAYLOADLENGTH_16
+ } else if (payloadLength === 127) {
+ this.#state = parserStates.PAYLOADLENGTH_64
+ }
+
+ if (this.#info.fragmented && payloadLength > 125) {
+ // A fragmented frame can't be fragmented itself
+ failWebsocketConnection(this.ws, 'Fragmented frame exceeded 125 bytes.')
+ return
+ } else if (
+ (this.#info.opcode === opcodes.PING ||
+ this.#info.opcode === opcodes.PONG ||
+ this.#info.opcode === opcodes.CLOSE) &&
+ payloadLength > 125
+ ) {
+ // Control frames can have a payload length of 125 bytes MAX
+ failWebsocketConnection(this.ws, 'Payload length for control frame exceeded 125 bytes.')
+ return
+ } else if (this.#info.opcode === opcodes.CLOSE) {
+ if (payloadLength === 1) {
+ failWebsocketConnection(this.ws, 'Received close frame with a 1-byte body.')
+ return
+ }
+
+ const body = this.consume(payloadLength)
+
+ this.#info.closeInfo = this.parseCloseBody(false, body)
+
+ if (!this.ws[kSentClose]) {
+ // If an endpoint receives a Close frame and did not previously send a
+ // Close frame, the endpoint MUST send a Close frame in response. (When
+ // sending a Close frame in response, the endpoint typically echos the
+ // status code it received.)
+ const body = Buffer.allocUnsafe(2)
+ body.writeUInt16BE(this.#info.closeInfo.code, 0)
+ const closeFrame = new WebsocketFrameSend(body)
+
+ this.ws[kResponse].socket.write(
+ closeFrame.createFrame(opcodes.CLOSE),
+ (err) => {
+ if (!err) {
+ this.ws[kSentClose] = true
+ }
+ }
+ )
+ }
+
+ // Upon either sending or receiving a Close control frame, it is said
+ // that _The WebSocket Closing Handshake is Started_ and that the
+ // WebSocket connection is in the CLOSING state.
+ this.ws[kReadyState] = states.CLOSING
+ this.ws[kReceivedClose] = true
+
+ this.end()
+
+ return
+ } else if (this.#info.opcode === opcodes.PING) {
+ // Upon receipt of a Ping frame, an endpoint MUST send a Pong frame in
+ // response, unless it already received a Close frame.
+ // A Pong frame sent in response to a Ping frame must have identical
+ // "Application data"
+
+ const body = this.consume(payloadLength)
+
+ if (!this.ws[kReceivedClose]) {
+ const frame = new WebsocketFrameSend(body)
+
+ this.ws[kResponse].socket.write(frame.createFrame(opcodes.PONG))
+
+ if (channels.ping.hasSubscribers) {
+ channels.ping.publish({
+ payload: body
+ })
+ }
+ }
+
+ this.#state = parserStates.INFO
+
+ if (this.#byteOffset > 0) {
+ continue
+ } else {
+ callback()
+ return
+ }
+ } else if (this.#info.opcode === opcodes.PONG) {
+ // A Pong frame MAY be sent unsolicited. This serves as a
+ // unidirectional heartbeat. A response to an unsolicited Pong frame is
+ // not expected.
+
+ const body = this.consume(payloadLength)
+
+ if (channels.pong.hasSubscribers) {
+ channels.pong.publish({
+ payload: body
+ })
+ }
+
+ if (this.#byteOffset > 0) {
+ continue
+ } else {
+ callback()
+ return
+ }
+ }
+ } else if (this.#state === parserStates.PAYLOADLENGTH_16) {
+ if (this.#byteOffset < 2) {
+ return callback()
+ }
+
+ const buffer = this.consume(2)
+
+ this.#info.payloadLength = buffer.readUInt16BE(0)
+ this.#state = parserStates.READ_DATA
+ } else if (this.#state === parserStates.PAYLOADLENGTH_64) {
+ if (this.#byteOffset < 8) {
+ return callback()
+ }
+
+ const buffer = this.consume(8)
+ const upper = buffer.readUInt32BE(0)
+
+ // 2^31 is the maxinimum bytes an arraybuffer can contain
+ // on 32-bit systems. Although, on 64-bit systems, this is
+ // 2^53-1 bytes.
+ // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Invalid_array_length
+ // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/common/globals.h;drc=1946212ac0100668f14eb9e2843bdd846e510a1e;bpv=1;bpt=1;l=1275
+ // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/objects/js-array-buffer.h;l=34;drc=1946212ac0100668f14eb9e2843bdd846e510a1e
+ if (upper > 2 ** 31 - 1) {
+ failWebsocketConnection(this.ws, 'Received payload length > 2^31 bytes.')
+ return
+ }
+
+ const lower = buffer.readUInt32BE(4)
+
+ this.#info.payloadLength = (upper << 8) + lower
+ this.#state = parserStates.READ_DATA
+ } else if (this.#state === parserStates.READ_DATA) {
+ if (this.#byteOffset < this.#info.payloadLength) {
+ // If there is still more data in this chunk that needs to be read
+ return callback()
+ } else if (this.#byteOffset >= this.#info.payloadLength) {
+ // If the server sent multiple frames in a single chunk
+
+ const body = this.consume(this.#info.payloadLength)
+
+ this.#fragments.push(body)
+
+ // If the frame is unfragmented, or a fragmented frame was terminated,
+ // a message was received
+ if (!this.#info.fragmented || (this.#info.fin && this.#info.opcode === opcodes.CONTINUATION)) {
+ const fullMessage = Buffer.concat(this.#fragments)
+
+ websocketMessageReceived(this.ws, this.#info.originalOpcode, fullMessage)
+
+ this.#info = {}
+ this.#fragments.length = 0
+ }
+
+ this.#state = parserStates.INFO
+ }
+ }
+
+ if (this.#byteOffset > 0) {
+ continue
+ } else {
+ callback()
+ break
+ }
+ }
+ }
+
+ /**
+ * Take n bytes from the buffered Buffers
+ * @param {number} n
+ * @returns {Buffer|null}
+ */
+ consume (n) {
+ if (n > this.#byteOffset) {
+ return null
+ } else if (n === 0) {
+ return emptyBuffer
+ }
+
+ if (this.#buffers[0].length === n) {
+ this.#byteOffset -= this.#buffers[0].length
+ return this.#buffers.shift()
+ }
+
+ const buffer = Buffer.allocUnsafe(n)
+ let offset = 0
+
+ while (offset !== n) {
+ const next = this.#buffers[0]
+ const { length } = next
+
+ if (length + offset === n) {
+ buffer.set(this.#buffers.shift(), offset)
+ break
+ } else if (length + offset > n) {
+ buffer.set(next.subarray(0, n - offset), offset)
+ this.#buffers[0] = next.subarray(n - offset)
+ break
+ } else {
+ buffer.set(this.#buffers.shift(), offset)
+ offset += next.length
+ }
+ }
+
+ this.#byteOffset -= n
+
+ return buffer
+ }
+
+ parseCloseBody (onlyCode, data) {
+ // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.5
+ /** @type {number|undefined} */
+ let code
+
+ if (data.length >= 2) {
+ // _The WebSocket Connection Close Code_ is
+ // defined as the status code (Section 7.4) contained in the first Close
+ // control frame received by the application
+ code = data.readUInt16BE(0)
+ }
+
+ if (onlyCode) {
+ if (!isValidStatusCode(code)) {
+ return null
+ }
+
+ return { code }
+ }
+
+ // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.6
+ /** @type {Buffer} */
+ let reason = data.subarray(2)
+
+ // Remove BOM
+ if (reason[0] === 0xEF && reason[1] === 0xBB && reason[2] === 0xBF) {
+ reason = reason.subarray(3)
+ }
+
+ if (code !== undefined && !isValidStatusCode(code)) {
+ return null
+ }
+
+ try {
+ // TODO: optimize this
+ reason = new TextDecoder('utf-8', { fatal: true }).decode(reason)
+ } catch {
+ return null
+ }
+
+ return { code, reason }
+ }
+
+ get closingInfo () {
+ return this.#info.closeInfo
+ }
+}
+
+module.exports = {
+ ByteParser
+}
+
+
+/***/ }),
+
+/***/ 7578:
+/***/ ((module) => {
+
+"use strict";
+
+
+module.exports = {
+ kWebSocketURL: Symbol('url'),
+ kReadyState: Symbol('ready state'),
+ kController: Symbol('controller'),
+ kResponse: Symbol('response'),
+ kBinaryType: Symbol('binary type'),
+ kSentClose: Symbol('sent close'),
+ kReceivedClose: Symbol('received close'),
+ kByteParser: Symbol('byte parser')
+}
+
+
+/***/ }),
+
+/***/ 5515:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { kReadyState, kController, kResponse, kBinaryType, kWebSocketURL } = __nccwpck_require__(7578)
+const { states, opcodes } = __nccwpck_require__(9188)
+const { MessageEvent, ErrorEvent } = __nccwpck_require__(2611)
+
+/* globals Blob */
+
+/**
+ * @param {import('./websocket').WebSocket} ws
+ */
+function isEstablished (ws) {
+ // If the server's response is validated as provided for above, it is
+ // said that _The WebSocket Connection is Established_ and that the
+ // WebSocket Connection is in the OPEN state.
+ return ws[kReadyState] === states.OPEN
+}
+
+/**
+ * @param {import('./websocket').WebSocket} ws
+ */
+function isClosing (ws) {
+ // Upon either sending or receiving a Close control frame, it is said
+ // that _The WebSocket Closing Handshake is Started_ and that the
+ // WebSocket connection is in the CLOSING state.
+ return ws[kReadyState] === states.CLOSING
+}
+
+/**
+ * @param {import('./websocket').WebSocket} ws
+ */
+function isClosed (ws) {
+ return ws[kReadyState] === states.CLOSED
+}
+
+/**
+ * @see https://dom.spec.whatwg.org/#concept-event-fire
+ * @param {string} e
+ * @param {EventTarget} target
+ * @param {EventInit | undefined} eventInitDict
+ */
+function fireEvent (e, target, eventConstructor = Event, eventInitDict) {
+ // 1. If eventConstructor is not given, then let eventConstructor be Event.
+
+ // 2. Let event be the result of creating an event given eventConstructor,
+ // in the relevant realm of target.
+ // 3. Initialize event’s type attribute to e.
+ const event = new eventConstructor(e, eventInitDict) // eslint-disable-line new-cap
+
+ // 4. Initialize any other IDL attributes of event as described in the
+ // invocation of this algorithm.
+
+ // 5. Return the result of dispatching event at target, with legacy target
+ // override flag set if set.
+ target.dispatchEvent(event)
+}
+
+/**
+ * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol
+ * @param {import('./websocket').WebSocket} ws
+ * @param {number} type Opcode
+ * @param {Buffer} data application data
+ */
+function websocketMessageReceived (ws, type, data) {
+ // 1. If ready state is not OPEN (1), then return.
+ if (ws[kReadyState] !== states.OPEN) {
+ return
+ }
+
+ // 2. Let dataForEvent be determined by switching on type and binary type:
+ let dataForEvent
+
+ if (type === opcodes.TEXT) {
+ // -> type indicates that the data is Text
+ // a new DOMString containing data
+ try {
+ dataForEvent = new TextDecoder('utf-8', { fatal: true }).decode(data)
+ } catch {
+ failWebsocketConnection(ws, 'Received invalid UTF-8 in text frame.')
+ return
+ }
+ } else if (type === opcodes.BINARY) {
+ if (ws[kBinaryType] === 'blob') {
+ // -> type indicates that the data is Binary and binary type is "blob"
+ // a new Blob object, created in the relevant Realm of the WebSocket
+ // object, that represents data as its raw data
+ dataForEvent = new Blob([data])
+ } else {
+ // -> type indicates that the data is Binary and binary type is "arraybuffer"
+ // a new ArrayBuffer object, created in the relevant Realm of the
+ // WebSocket object, whose contents are data
+ dataForEvent = new Uint8Array(data).buffer
+ }
+ }
+
+ // 3. Fire an event named message at the WebSocket object, using MessageEvent,
+ // with the origin attribute initialized to the serialization of the WebSocket
+ // object’s url's origin, and the data attribute initialized to dataForEvent.
+ fireEvent('message', ws, MessageEvent, {
+ origin: ws[kWebSocketURL].origin,
+ data: dataForEvent
+ })
+}
+
+/**
+ * @see https://datatracker.ietf.org/doc/html/rfc6455
+ * @see https://datatracker.ietf.org/doc/html/rfc2616
+ * @see https://bugs.chromium.org/p/chromium/issues/detail?id=398407
+ * @param {string} protocol
+ */
+function isValidSubprotocol (protocol) {
+ // If present, this value indicates one
+ // or more comma-separated subprotocol the client wishes to speak,
+ // ordered by preference. The elements that comprise this value
+ // MUST be non-empty strings with characters in the range U+0021 to
+ // U+007E not including separator characters as defined in
+ // [RFC2616] and MUST all be unique strings.
+ if (protocol.length === 0) {
+ return false
+ }
+
+ for (const char of protocol) {
+ const code = char.charCodeAt(0)
+
+ if (
+ code < 0x21 ||
+ code > 0x7E ||
+ char === '(' ||
+ char === ')' ||
+ char === '<' ||
+ char === '>' ||
+ char === '@' ||
+ char === ',' ||
+ char === ';' ||
+ char === ':' ||
+ char === '\\' ||
+ char === '"' ||
+ char === '/' ||
+ char === '[' ||
+ char === ']' ||
+ char === '?' ||
+ char === '=' ||
+ char === '{' ||
+ char === '}' ||
+ code === 32 || // SP
+ code === 9 // HT
+ ) {
+ return false
+ }
+ }
+
+ return true
+}
+
+/**
+ * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7-4
+ * @param {number} code
+ */
+function isValidStatusCode (code) {
+ if (code >= 1000 && code < 1015) {
+ return (
+ code !== 1004 && // reserved
+ code !== 1005 && // "MUST NOT be set as a status code"
+ code !== 1006 // "MUST NOT be set as a status code"
+ )
+ }
+
+ return code >= 3000 && code <= 4999
+}
+
+/**
+ * @param {import('./websocket').WebSocket} ws
+ * @param {string|undefined} reason
+ */
+function failWebsocketConnection (ws, reason) {
+ const { [kController]: controller, [kResponse]: response } = ws
+
+ controller.abort()
+
+ if (response?.socket && !response.socket.destroyed) {
+ response.socket.destroy()
+ }
+
+ if (reason) {
+ fireEvent('error', ws, ErrorEvent, {
+ error: new Error(reason)
+ })
+ }
+}
+
+module.exports = {
+ isEstablished,
+ isClosing,
+ isClosed,
+ fireEvent,
+ isValidSubprotocol,
+ isValidStatusCode,
+ failWebsocketConnection,
+ websocketMessageReceived
+}
+
+
+/***/ }),
+
+/***/ 4284:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const { webidl } = __nccwpck_require__(1744)
+const { DOMException } = __nccwpck_require__(1037)
+const { URLSerializer } = __nccwpck_require__(685)
+const { getGlobalOrigin } = __nccwpck_require__(1246)
+const { staticPropertyDescriptors, states, opcodes, emptyBuffer } = __nccwpck_require__(9188)
+const {
+ kWebSocketURL,
+ kReadyState,
+ kController,
+ kBinaryType,
+ kResponse,
+ kSentClose,
+ kByteParser
+} = __nccwpck_require__(7578)
+const { isEstablished, isClosing, isValidSubprotocol, failWebsocketConnection, fireEvent } = __nccwpck_require__(5515)
+const { establishWebSocketConnection } = __nccwpck_require__(5354)
+const { WebsocketFrameSend } = __nccwpck_require__(5444)
+const { ByteParser } = __nccwpck_require__(1688)
+const { kEnumerableProperty, isBlobLike } = __nccwpck_require__(3983)
+const { getGlobalDispatcher } = __nccwpck_require__(1892)
+const { types } = __nccwpck_require__(3837)
+
+let experimentalWarned = false
+
+// https://websockets.spec.whatwg.org/#interface-definition
+class WebSocket extends EventTarget {
+ #events = {
+ open: null,
+ error: null,
+ close: null,
+ message: null
+ }
+
+ #bufferedAmount = 0
+ #protocol = ''
+ #extensions = ''
+
+ /**
+ * @param {string} url
+ * @param {string|string[]} protocols
+ */
+ constructor (url, protocols = []) {
+ super()
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket constructor' })
+
+ if (!experimentalWarned) {
+ experimentalWarned = true
+ process.emitWarning('WebSockets are experimental, expect them to change at any time.', {
+ code: 'UNDICI-WS'
+ })
+ }
+
+ const options = webidl.converters['DOMString or sequence or WebSocketInit'](protocols)
+
+ url = webidl.converters.USVString(url)
+ protocols = options.protocols
+
+ // 1. Let baseURL be this's relevant settings object's API base URL.
+ const baseURL = getGlobalOrigin()
+
+ // 1. Let urlRecord be the result of applying the URL parser to url with baseURL.
+ let urlRecord
+
+ try {
+ urlRecord = new URL(url, baseURL)
+ } catch (e) {
+ // 3. If urlRecord is failure, then throw a "SyntaxError" DOMException.
+ throw new DOMException(e, 'SyntaxError')
+ }
+
+ // 4. If urlRecord’s scheme is "http", then set urlRecord’s scheme to "ws".
+ if (urlRecord.protocol === 'http:') {
+ urlRecord.protocol = 'ws:'
+ } else if (urlRecord.protocol === 'https:') {
+ // 5. Otherwise, if urlRecord’s scheme is "https", set urlRecord’s scheme to "wss".
+ urlRecord.protocol = 'wss:'
+ }
+
+ // 6. If urlRecord’s scheme is not "ws" or "wss", then throw a "SyntaxError" DOMException.
+ if (urlRecord.protocol !== 'ws:' && urlRecord.protocol !== 'wss:') {
+ throw new DOMException(
+ `Expected a ws: or wss: protocol, got ${urlRecord.protocol}`,
+ 'SyntaxError'
+ )
+ }
+
+ // 7. If urlRecord’s fragment is non-null, then throw a "SyntaxError"
+ // DOMException.
+ if (urlRecord.hash || urlRecord.href.endsWith('#')) {
+ throw new DOMException('Got fragment', 'SyntaxError')
+ }
+
+ // 8. If protocols is a string, set protocols to a sequence consisting
+ // of just that string.
+ if (typeof protocols === 'string') {
+ protocols = [protocols]
+ }
+
+ // 9. If any of the values in protocols occur more than once or otherwise
+ // fail to match the requirements for elements that comprise the value
+ // of `Sec-WebSocket-Protocol` fields as defined by The WebSocket
+ // protocol, then throw a "SyntaxError" DOMException.
+ if (protocols.length !== new Set(protocols.map(p => p.toLowerCase())).size) {
+ throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError')
+ }
+
+ if (protocols.length > 0 && !protocols.every(p => isValidSubprotocol(p))) {
+ throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError')
+ }
+
+ // 10. Set this's url to urlRecord.
+ this[kWebSocketURL] = new URL(urlRecord.href)
+
+ // 11. Let client be this's relevant settings object.
+
+ // 12. Run this step in parallel:
+
+ // 1. Establish a WebSocket connection given urlRecord, protocols,
+ // and client.
+ this[kController] = establishWebSocketConnection(
+ urlRecord,
+ protocols,
+ this,
+ (response) => this.#onConnectionEstablished(response),
+ options
+ )
+
+ // Each WebSocket object has an associated ready state, which is a
+ // number representing the state of the connection. Initially it must
+ // be CONNECTING (0).
+ this[kReadyState] = WebSocket.CONNECTING
+
+ // The extensions attribute must initially return the empty string.
+
+ // The protocol attribute must initially return the empty string.
+
+ // Each WebSocket object has an associated binary type, which is a
+ // BinaryType. Initially it must be "blob".
+ this[kBinaryType] = 'blob'
+ }
+
+ /**
+ * @see https://websockets.spec.whatwg.org/#dom-websocket-close
+ * @param {number|undefined} code
+ * @param {string|undefined} reason
+ */
+ close (code = undefined, reason = undefined) {
+ webidl.brandCheck(this, WebSocket)
+
+ if (code !== undefined) {
+ code = webidl.converters['unsigned short'](code, { clamp: true })
+ }
+
+ if (reason !== undefined) {
+ reason = webidl.converters.USVString(reason)
+ }
+
+ // 1. If code is present, but is neither an integer equal to 1000 nor an
+ // integer in the range 3000 to 4999, inclusive, throw an
+ // "InvalidAccessError" DOMException.
+ if (code !== undefined) {
+ if (code !== 1000 && (code < 3000 || code > 4999)) {
+ throw new DOMException('invalid code', 'InvalidAccessError')
+ }
+ }
+
+ let reasonByteLength = 0
+
+ // 2. If reason is present, then run these substeps:
+ if (reason !== undefined) {
+ // 1. Let reasonBytes be the result of encoding reason.
+ // 2. If reasonBytes is longer than 123 bytes, then throw a
+ // "SyntaxError" DOMException.
+ reasonByteLength = Buffer.byteLength(reason)
+
+ if (reasonByteLength > 123) {
+ throw new DOMException(
+ `Reason must be less than 123 bytes; received ${reasonByteLength}`,
+ 'SyntaxError'
+ )
+ }
+ }
+
+ // 3. Run the first matching steps from the following list:
+ if (this[kReadyState] === WebSocket.CLOSING || this[kReadyState] === WebSocket.CLOSED) {
+ // If this's ready state is CLOSING (2) or CLOSED (3)
+ // Do nothing.
+ } else if (!isEstablished(this)) {
+ // If the WebSocket connection is not yet established
+ // Fail the WebSocket connection and set this's ready state
+ // to CLOSING (2).
+ failWebsocketConnection(this, 'Connection was closed before it was established.')
+ this[kReadyState] = WebSocket.CLOSING
+ } else if (!isClosing(this)) {
+ // If the WebSocket closing handshake has not yet been started
+ // Start the WebSocket closing handshake and set this's ready
+ // state to CLOSING (2).
+ // - If neither code nor reason is present, the WebSocket Close
+ // message must not have a body.
+ // - If code is present, then the status code to use in the
+ // WebSocket Close message must be the integer given by code.
+ // - If reason is also present, then reasonBytes must be
+ // provided in the Close message after the status code.
+
+ const frame = new WebsocketFrameSend()
+
+ // If neither code nor reason is present, the WebSocket Close
+ // message must not have a body.
+
+ // If code is present, then the status code to use in the
+ // WebSocket Close message must be the integer given by code.
+ if (code !== undefined && reason === undefined) {
+ frame.frameData = Buffer.allocUnsafe(2)
+ frame.frameData.writeUInt16BE(code, 0)
+ } else if (code !== undefined && reason !== undefined) {
+ // If reason is also present, then reasonBytes must be
+ // provided in the Close message after the status code.
+ frame.frameData = Buffer.allocUnsafe(2 + reasonByteLength)
+ frame.frameData.writeUInt16BE(code, 0)
+ // the body MAY contain UTF-8-encoded data with value /reason/
+ frame.frameData.write(reason, 2, 'utf-8')
+ } else {
+ frame.frameData = emptyBuffer
+ }
+
+ /** @type {import('stream').Duplex} */
+ const socket = this[kResponse].socket
+
+ socket.write(frame.createFrame(opcodes.CLOSE), (err) => {
+ if (!err) {
+ this[kSentClose] = true
+ }
+ })
+
+ // Upon either sending or receiving a Close control frame, it is said
+ // that _The WebSocket Closing Handshake is Started_ and that the
+ // WebSocket connection is in the CLOSING state.
+ this[kReadyState] = states.CLOSING
+ } else {
+ // Otherwise
+ // Set this's ready state to CLOSING (2).
+ this[kReadyState] = WebSocket.CLOSING
+ }
+ }
+
+ /**
+ * @see https://websockets.spec.whatwg.org/#dom-websocket-send
+ * @param {NodeJS.TypedArray|ArrayBuffer|Blob|string} data
+ */
+ send (data) {
+ webidl.brandCheck(this, WebSocket)
+
+ webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket.send' })
+
+ data = webidl.converters.WebSocketSendData(data)
+
+ // 1. If this's ready state is CONNECTING, then throw an
+ // "InvalidStateError" DOMException.
+ if (this[kReadyState] === WebSocket.CONNECTING) {
+ throw new DOMException('Sent before connected.', 'InvalidStateError')
+ }
+
+ // 2. Run the appropriate set of steps from the following list:
+ // https://datatracker.ietf.org/doc/html/rfc6455#section-6.1
+ // https://datatracker.ietf.org/doc/html/rfc6455#section-5.2
+
+ if (!isEstablished(this) || isClosing(this)) {
+ return
+ }
+
+ /** @type {import('stream').Duplex} */
+ const socket = this[kResponse].socket
+
+ // If data is a string
+ if (typeof data === 'string') {
+ // If the WebSocket connection is established and the WebSocket
+ // closing handshake has not yet started, then the user agent
+ // must send a WebSocket Message comprised of the data argument
+ // using a text frame opcode; if the data cannot be sent, e.g.
+ // because it would need to be buffered but the buffer is full,
+ // the user agent must flag the WebSocket as full and then close
+ // the WebSocket connection. Any invocation of this method with a
+ // string argument that does not throw an exception must increase
+ // the bufferedAmount attribute by the number of bytes needed to
+ // express the argument as UTF-8.
+
+ const value = Buffer.from(data)
+ const frame = new WebsocketFrameSend(value)
+ const buffer = frame.createFrame(opcodes.TEXT)
+
+ this.#bufferedAmount += value.byteLength
+ socket.write(buffer, () => {
+ this.#bufferedAmount -= value.byteLength
+ })
+ } else if (types.isArrayBuffer(data)) {
+ // If the WebSocket connection is established, and the WebSocket
+ // closing handshake has not yet started, then the user agent must
+ // send a WebSocket Message comprised of data using a binary frame
+ // opcode; if the data cannot be sent, e.g. because it would need
+ // to be buffered but the buffer is full, the user agent must flag
+ // the WebSocket as full and then close the WebSocket connection.
+ // The data to be sent is the data stored in the buffer described
+ // by the ArrayBuffer object. Any invocation of this method with an
+ // ArrayBuffer argument that does not throw an exception must
+ // increase the bufferedAmount attribute by the length of the
+ // ArrayBuffer in bytes.
+
+ const value = Buffer.from(data)
+ const frame = new WebsocketFrameSend(value)
+ const buffer = frame.createFrame(opcodes.BINARY)
+
+ this.#bufferedAmount += value.byteLength
+ socket.write(buffer, () => {
+ this.#bufferedAmount -= value.byteLength
+ })
+ } else if (ArrayBuffer.isView(data)) {
+ // If the WebSocket connection is established, and the WebSocket
+ // closing handshake has not yet started, then the user agent must
+ // send a WebSocket Message comprised of data using a binary frame
+ // opcode; if the data cannot be sent, e.g. because it would need to
+ // be buffered but the buffer is full, the user agent must flag the
+ // WebSocket as full and then close the WebSocket connection. The
+ // data to be sent is the data stored in the section of the buffer
+ // described by the ArrayBuffer object that data references. Any
+ // invocation of this method with this kind of argument that does
+ // not throw an exception must increase the bufferedAmount attribute
+ // by the length of data’s buffer in bytes.
+
+ const ab = Buffer.from(data, data.byteOffset, data.byteLength)
+
+ const frame = new WebsocketFrameSend(ab)
+ const buffer = frame.createFrame(opcodes.BINARY)
+
+ this.#bufferedAmount += ab.byteLength
+ socket.write(buffer, () => {
+ this.#bufferedAmount -= ab.byteLength
+ })
+ } else if (isBlobLike(data)) {
+ // If the WebSocket connection is established, and the WebSocket
+ // closing handshake has not yet started, then the user agent must
+ // send a WebSocket Message comprised of data using a binary frame
+ // opcode; if the data cannot be sent, e.g. because it would need to
+ // be buffered but the buffer is full, the user agent must flag the
+ // WebSocket as full and then close the WebSocket connection. The data
+ // to be sent is the raw data represented by the Blob object. Any
+ // invocation of this method with a Blob argument that does not throw
+ // an exception must increase the bufferedAmount attribute by the size
+ // of the Blob object’s raw data, in bytes.
+
+ const frame = new WebsocketFrameSend()
+
+ data.arrayBuffer().then((ab) => {
+ const value = Buffer.from(ab)
+ frame.frameData = value
+ const buffer = frame.createFrame(opcodes.BINARY)
+
+ this.#bufferedAmount += value.byteLength
+ socket.write(buffer, () => {
+ this.#bufferedAmount -= value.byteLength
+ })
+ })
+ }
+ }
+
+ get readyState () {
+ webidl.brandCheck(this, WebSocket)
+
+ // The readyState getter steps are to return this's ready state.
+ return this[kReadyState]
+ }
+
+ get bufferedAmount () {
+ webidl.brandCheck(this, WebSocket)
+
+ return this.#bufferedAmount
+ }
+
+ get url () {
+ webidl.brandCheck(this, WebSocket)
+
+ // The url getter steps are to return this's url, serialized.
+ return URLSerializer(this[kWebSocketURL])
+ }
+
+ get extensions () {
+ webidl.brandCheck(this, WebSocket)
+
+ return this.#extensions
+ }
+
+ get protocol () {
+ webidl.brandCheck(this, WebSocket)
+
+ return this.#protocol
+ }
+
+ get onopen () {
+ webidl.brandCheck(this, WebSocket)
+
+ return this.#events.open
+ }
+
+ set onopen (fn) {
+ webidl.brandCheck(this, WebSocket)
+
+ if (this.#events.open) {
+ this.removeEventListener('open', this.#events.open)
+ }
+
+ if (typeof fn === 'function') {
+ this.#events.open = fn
+ this.addEventListener('open', fn)
+ } else {
+ this.#events.open = null
+ }
+ }
+
+ get onerror () {
+ webidl.brandCheck(this, WebSocket)
+
+ return this.#events.error
+ }
+
+ set onerror (fn) {
+ webidl.brandCheck(this, WebSocket)
+
+ if (this.#events.error) {
+ this.removeEventListener('error', this.#events.error)
+ }
+
+ if (typeof fn === 'function') {
+ this.#events.error = fn
+ this.addEventListener('error', fn)
+ } else {
+ this.#events.error = null
+ }
+ }
+
+ get onclose () {
+ webidl.brandCheck(this, WebSocket)
+
+ return this.#events.close
+ }
+
+ set onclose (fn) {
+ webidl.brandCheck(this, WebSocket)
+
+ if (this.#events.close) {
+ this.removeEventListener('close', this.#events.close)
+ }
+
+ if (typeof fn === 'function') {
+ this.#events.close = fn
+ this.addEventListener('close', fn)
+ } else {
+ this.#events.close = null
+ }
+ }
+
+ get onmessage () {
+ webidl.brandCheck(this, WebSocket)
+
+ return this.#events.message
+ }
+
+ set onmessage (fn) {
+ webidl.brandCheck(this, WebSocket)
+
+ if (this.#events.message) {
+ this.removeEventListener('message', this.#events.message)
+ }
+
+ if (typeof fn === 'function') {
+ this.#events.message = fn
+ this.addEventListener('message', fn)
+ } else {
+ this.#events.message = null
+ }
+ }
+
+ get binaryType () {
+ webidl.brandCheck(this, WebSocket)
+
+ return this[kBinaryType]
+ }
+
+ set binaryType (type) {
+ webidl.brandCheck(this, WebSocket)
+
+ if (type !== 'blob' && type !== 'arraybuffer') {
+ this[kBinaryType] = 'blob'
+ } else {
+ this[kBinaryType] = type
+ }
+ }
+
+ /**
+ * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol
+ */
+ #onConnectionEstablished (response) {
+ // processResponse is called when the "response’s header list has been received and initialized."
+ // once this happens, the connection is open
+ this[kResponse] = response
+
+ const parser = new ByteParser(this)
+ parser.on('drain', function onParserDrain () {
+ this.ws[kResponse].socket.resume()
+ })
+
+ response.socket.ws = this
+ this[kByteParser] = parser
+
+ // 1. Change the ready state to OPEN (1).
+ this[kReadyState] = states.OPEN
+
+ // 2. Change the extensions attribute’s value to the extensions in use, if
+ // it is not the null value.
+ // https://datatracker.ietf.org/doc/html/rfc6455#section-9.1
+ const extensions = response.headersList.get('sec-websocket-extensions')
+
+ if (extensions !== null) {
+ this.#extensions = extensions
+ }
+
+ // 3. Change the protocol attribute’s value to the subprotocol in use, if
+ // it is not the null value.
+ // https://datatracker.ietf.org/doc/html/rfc6455#section-1.9
+ const protocol = response.headersList.get('sec-websocket-protocol')
+
+ if (protocol !== null) {
+ this.#protocol = protocol
+ }
+
+ // 4. Fire an event named open at the WebSocket object.
+ fireEvent('open', this)
+ }
+}
+
+// https://websockets.spec.whatwg.org/#dom-websocket-connecting
+WebSocket.CONNECTING = WebSocket.prototype.CONNECTING = states.CONNECTING
+// https://websockets.spec.whatwg.org/#dom-websocket-open
+WebSocket.OPEN = WebSocket.prototype.OPEN = states.OPEN
+// https://websockets.spec.whatwg.org/#dom-websocket-closing
+WebSocket.CLOSING = WebSocket.prototype.CLOSING = states.CLOSING
+// https://websockets.spec.whatwg.org/#dom-websocket-closed
+WebSocket.CLOSED = WebSocket.prototype.CLOSED = states.CLOSED
+
+Object.defineProperties(WebSocket.prototype, {
+ CONNECTING: staticPropertyDescriptors,
+ OPEN: staticPropertyDescriptors,
+ CLOSING: staticPropertyDescriptors,
+ CLOSED: staticPropertyDescriptors,
+ url: kEnumerableProperty,
+ readyState: kEnumerableProperty,
+ bufferedAmount: kEnumerableProperty,
+ onopen: kEnumerableProperty,
+ onerror: kEnumerableProperty,
+ onclose: kEnumerableProperty,
+ close: kEnumerableProperty,
+ onmessage: kEnumerableProperty,
+ binaryType: kEnumerableProperty,
+ send: kEnumerableProperty,
+ extensions: kEnumerableProperty,
+ protocol: kEnumerableProperty,
+ [Symbol.toStringTag]: {
+ value: 'WebSocket',
+ writable: false,
+ enumerable: false,
+ configurable: true
+ }
+})
+
+Object.defineProperties(WebSocket, {
+ CONNECTING: staticPropertyDescriptors,
+ OPEN: staticPropertyDescriptors,
+ CLOSING: staticPropertyDescriptors,
+ CLOSED: staticPropertyDescriptors
+})
+
+webidl.converters['sequence'] = webidl.sequenceConverter(
+ webidl.converters.DOMString
+)
+
+webidl.converters['DOMString or sequence'] = function (V) {
+ if (webidl.util.Type(V) === 'Object' && Symbol.iterator in V) {
+ return webidl.converters['sequence'](V)
+ }
+
+ return webidl.converters.DOMString(V)
+}
+
+// This implements the propsal made in https://github.com/whatwg/websockets/issues/42
+webidl.converters.WebSocketInit = webidl.dictionaryConverter([
+ {
+ key: 'protocols',
+ converter: webidl.converters['DOMString or sequence'],
+ get defaultValue () {
+ return []
+ }
+ },
+ {
+ key: 'dispatcher',
+ converter: (V) => V,
+ get defaultValue () {
+ return getGlobalDispatcher()
+ }
+ },
+ {
+ key: 'headers',
+ converter: webidl.nullableConverter(webidl.converters.HeadersInit)
+ }
+])
+
+webidl.converters['DOMString or sequence or WebSocketInit'] = function (V) {
+ if (webidl.util.Type(V) === 'Object' && !(Symbol.iterator in V)) {
+ return webidl.converters.WebSocketInit(V)
+ }
+
+ return { protocols: webidl.converters['DOMString or sequence'](V) }
+}
+
+webidl.converters.WebSocketSendData = function (V) {
+ if (webidl.util.Type(V) === 'Object') {
+ if (isBlobLike(V)) {
+ return webidl.converters.Blob(V, { strict: false })
+ }
+
+ if (ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V)) {
+ return webidl.converters.BufferSource(V)
+ }
+ }
+
+ return webidl.converters.USVString(V)
+}
+
+module.exports = {
+ WebSocket
+}
+
+
/***/ }),
/***/ 5030:
@@ -29899,7 +53305,7 @@ function getUserAgent() {
return navigator.userAgent;
}
- if (typeof process === "object" && "version" in process) {
+ if (typeof process === "object" && process.version !== undefined) {
return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;
}
@@ -30556,1967 +53962,6 @@ function version(uuid) {
var _default = version;
exports["default"] = _default;
-/***/ }),
-
-/***/ 4886:
-/***/ ((module) => {
-
-"use strict";
-
-
-var conversions = {};
-module.exports = conversions;
-
-function sign(x) {
- return x < 0 ? -1 : 1;
-}
-
-function evenRound(x) {
- // Round x to the nearest integer, choosing the even integer if it lies halfway between two.
- if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor)
- return Math.floor(x);
- } else {
- return Math.round(x);
- }
-}
-
-function createNumberConversion(bitLength, typeOpts) {
- if (!typeOpts.unsigned) {
- --bitLength;
- }
- const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength);
- const upperBound = Math.pow(2, bitLength) - 1;
-
- const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength);
- const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1);
-
- return function(V, opts) {
- if (!opts) opts = {};
-
- let x = +V;
-
- if (opts.enforceRange) {
- if (!Number.isFinite(x)) {
- throw new TypeError("Argument is not a finite number");
- }
-
- x = sign(x) * Math.floor(Math.abs(x));
- if (x < lowerBound || x > upperBound) {
- throw new TypeError("Argument is not in byte range");
- }
-
- return x;
- }
-
- if (!isNaN(x) && opts.clamp) {
- x = evenRound(x);
-
- if (x < lowerBound) x = lowerBound;
- if (x > upperBound) x = upperBound;
- return x;
- }
-
- if (!Number.isFinite(x) || x === 0) {
- return 0;
- }
-
- x = sign(x) * Math.floor(Math.abs(x));
- x = x % moduloVal;
-
- if (!typeOpts.unsigned && x >= moduloBound) {
- return x - moduloVal;
- } else if (typeOpts.unsigned) {
- if (x < 0) {
- x += moduloVal;
- } else if (x === -0) { // don't return negative zero
- return 0;
- }
- }
-
- return x;
- }
-}
-
-conversions["void"] = function () {
- return undefined;
-};
-
-conversions["boolean"] = function (val) {
- return !!val;
-};
-
-conversions["byte"] = createNumberConversion(8, { unsigned: false });
-conversions["octet"] = createNumberConversion(8, { unsigned: true });
-
-conversions["short"] = createNumberConversion(16, { unsigned: false });
-conversions["unsigned short"] = createNumberConversion(16, { unsigned: true });
-
-conversions["long"] = createNumberConversion(32, { unsigned: false });
-conversions["unsigned long"] = createNumberConversion(32, { unsigned: true });
-
-conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 });
-conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 });
-
-conversions["double"] = function (V) {
- const x = +V;
-
- if (!Number.isFinite(x)) {
- throw new TypeError("Argument is not a finite floating-point value");
- }
-
- return x;
-};
-
-conversions["unrestricted double"] = function (V) {
- const x = +V;
-
- if (isNaN(x)) {
- throw new TypeError("Argument is NaN");
- }
-
- return x;
-};
-
-// not quite valid, but good enough for JS
-conversions["float"] = conversions["double"];
-conversions["unrestricted float"] = conversions["unrestricted double"];
-
-conversions["DOMString"] = function (V, opts) {
- if (!opts) opts = {};
-
- if (opts.treatNullAsEmptyString && V === null) {
- return "";
- }
-
- return String(V);
-};
-
-conversions["ByteString"] = function (V, opts) {
- const x = String(V);
- let c = undefined;
- for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) {
- if (c > 255) {
- throw new TypeError("Argument is not a valid bytestring");
- }
- }
-
- return x;
-};
-
-conversions["USVString"] = function (V) {
- const S = String(V);
- const n = S.length;
- const U = [];
- for (let i = 0; i < n; ++i) {
- const c = S.charCodeAt(i);
- if (c < 0xD800 || c > 0xDFFF) {
- U.push(String.fromCodePoint(c));
- } else if (0xDC00 <= c && c <= 0xDFFF) {
- U.push(String.fromCodePoint(0xFFFD));
- } else {
- if (i === n - 1) {
- U.push(String.fromCodePoint(0xFFFD));
- } else {
- const d = S.charCodeAt(i + 1);
- if (0xDC00 <= d && d <= 0xDFFF) {
- const a = c & 0x3FF;
- const b = d & 0x3FF;
- U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b));
- ++i;
- } else {
- U.push(String.fromCodePoint(0xFFFD));
- }
- }
- }
- }
-
- return U.join('');
-};
-
-conversions["Date"] = function (V, opts) {
- if (!(V instanceof Date)) {
- throw new TypeError("Argument is not a Date object");
- }
- if (isNaN(V)) {
- return undefined;
- }
-
- return V;
-};
-
-conversions["RegExp"] = function (V, opts) {
- if (!(V instanceof RegExp)) {
- V = new RegExp(V);
- }
-
- return V;
-};
-
-
-/***/ }),
-
-/***/ 7537:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
-
-"use strict";
-
-const usm = __nccwpck_require__(2158);
-
-exports.implementation = class URLImpl {
- constructor(constructorArgs) {
- const url = constructorArgs[0];
- const base = constructorArgs[1];
-
- let parsedBase = null;
- if (base !== undefined) {
- parsedBase = usm.basicURLParse(base);
- if (parsedBase === "failure") {
- throw new TypeError("Invalid base URL");
- }
- }
-
- const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase });
- if (parsedURL === "failure") {
- throw new TypeError("Invalid URL");
- }
-
- this._url = parsedURL;
-
- // TODO: query stuff
- }
-
- get href() {
- return usm.serializeURL(this._url);
- }
-
- set href(v) {
- const parsedURL = usm.basicURLParse(v);
- if (parsedURL === "failure") {
- throw new TypeError("Invalid URL");
- }
-
- this._url = parsedURL;
- }
-
- get origin() {
- return usm.serializeURLOrigin(this._url);
- }
-
- get protocol() {
- return this._url.scheme + ":";
- }
-
- set protocol(v) {
- usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" });
- }
-
- get username() {
- return this._url.username;
- }
-
- set username(v) {
- if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
- return;
- }
-
- usm.setTheUsername(this._url, v);
- }
-
- get password() {
- return this._url.password;
- }
-
- set password(v) {
- if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
- return;
- }
-
- usm.setThePassword(this._url, v);
- }
-
- get host() {
- const url = this._url;
-
- if (url.host === null) {
- return "";
- }
-
- if (url.port === null) {
- return usm.serializeHost(url.host);
- }
-
- return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port);
- }
-
- set host(v) {
- if (this._url.cannotBeABaseURL) {
- return;
- }
-
- usm.basicURLParse(v, { url: this._url, stateOverride: "host" });
- }
-
- get hostname() {
- if (this._url.host === null) {
- return "";
- }
-
- return usm.serializeHost(this._url.host);
- }
-
- set hostname(v) {
- if (this._url.cannotBeABaseURL) {
- return;
- }
-
- usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" });
- }
-
- get port() {
- if (this._url.port === null) {
- return "";
- }
-
- return usm.serializeInteger(this._url.port);
- }
-
- set port(v) {
- if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
- return;
- }
-
- if (v === "") {
- this._url.port = null;
- } else {
- usm.basicURLParse(v, { url: this._url, stateOverride: "port" });
- }
- }
-
- get pathname() {
- if (this._url.cannotBeABaseURL) {
- return this._url.path[0];
- }
-
- if (this._url.path.length === 0) {
- return "";
- }
-
- return "/" + this._url.path.join("/");
- }
-
- set pathname(v) {
- if (this._url.cannotBeABaseURL) {
- return;
- }
-
- this._url.path = [];
- usm.basicURLParse(v, { url: this._url, stateOverride: "path start" });
- }
-
- get search() {
- if (this._url.query === null || this._url.query === "") {
- return "";
- }
-
- return "?" + this._url.query;
- }
-
- set search(v) {
- // TODO: query stuff
-
- const url = this._url;
-
- if (v === "") {
- url.query = null;
- return;
- }
-
- const input = v[0] === "?" ? v.substring(1) : v;
- url.query = "";
- usm.basicURLParse(input, { url, stateOverride: "query" });
- }
-
- get hash() {
- if (this._url.fragment === null || this._url.fragment === "") {
- return "";
- }
-
- return "#" + this._url.fragment;
- }
-
- set hash(v) {
- if (v === "") {
- this._url.fragment = null;
- return;
- }
-
- const input = v[0] === "#" ? v.substring(1) : v;
- this._url.fragment = "";
- usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" });
- }
-
- toJSON() {
- return this.href;
- }
-};
-
-
-/***/ }),
-
-/***/ 3394:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
-
-
-const conversions = __nccwpck_require__(4886);
-const utils = __nccwpck_require__(3185);
-const Impl = __nccwpck_require__(7537);
-
-const impl = utils.implSymbol;
-
-function URL(url) {
- if (!this || this[impl] || !(this instanceof URL)) {
- throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function.");
- }
- if (arguments.length < 1) {
- throw new TypeError("Failed to construct 'URL': 1 argument required, but only " + arguments.length + " present.");
- }
- const args = [];
- for (let i = 0; i < arguments.length && i < 2; ++i) {
- args[i] = arguments[i];
- }
- args[0] = conversions["USVString"](args[0]);
- if (args[1] !== undefined) {
- args[1] = conversions["USVString"](args[1]);
- }
-
- module.exports.setup(this, args);
-}
-
-URL.prototype.toJSON = function toJSON() {
- if (!this || !module.exports.is(this)) {
- throw new TypeError("Illegal invocation");
- }
- const args = [];
- for (let i = 0; i < arguments.length && i < 0; ++i) {
- args[i] = arguments[i];
- }
- return this[impl].toJSON.apply(this[impl], args);
-};
-Object.defineProperty(URL.prototype, "href", {
- get() {
- return this[impl].href;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].href = V;
- },
- enumerable: true,
- configurable: true
-});
-
-URL.prototype.toString = function () {
- if (!this || !module.exports.is(this)) {
- throw new TypeError("Illegal invocation");
- }
- return this.href;
-};
-
-Object.defineProperty(URL.prototype, "origin", {
- get() {
- return this[impl].origin;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "protocol", {
- get() {
- return this[impl].protocol;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].protocol = V;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "username", {
- get() {
- return this[impl].username;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].username = V;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "password", {
- get() {
- return this[impl].password;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].password = V;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "host", {
- get() {
- return this[impl].host;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].host = V;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "hostname", {
- get() {
- return this[impl].hostname;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].hostname = V;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "port", {
- get() {
- return this[impl].port;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].port = V;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "pathname", {
- get() {
- return this[impl].pathname;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].pathname = V;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "search", {
- get() {
- return this[impl].search;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].search = V;
- },
- enumerable: true,
- configurable: true
-});
-
-Object.defineProperty(URL.prototype, "hash", {
- get() {
- return this[impl].hash;
- },
- set(V) {
- V = conversions["USVString"](V);
- this[impl].hash = V;
- },
- enumerable: true,
- configurable: true
-});
-
-
-module.exports = {
- is(obj) {
- return !!obj && obj[impl] instanceof Impl.implementation;
- },
- create(constructorArgs, privateData) {
- let obj = Object.create(URL.prototype);
- this.setup(obj, constructorArgs, privateData);
- return obj;
- },
- setup(obj, constructorArgs, privateData) {
- if (!privateData) privateData = {};
- privateData.wrapper = obj;
-
- obj[impl] = new Impl.implementation(constructorArgs, privateData);
- obj[impl][utils.wrapperSymbol] = obj;
- },
- interface: URL,
- expose: {
- Window: { URL: URL },
- Worker: { URL: URL }
- }
-};
-
-
-
-/***/ }),
-
-/***/ 8665:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
-
-"use strict";
-
-
-exports.URL = __nccwpck_require__(3394)["interface"];
-exports.serializeURL = __nccwpck_require__(2158).serializeURL;
-exports.serializeURLOrigin = __nccwpck_require__(2158).serializeURLOrigin;
-exports.basicURLParse = __nccwpck_require__(2158).basicURLParse;
-exports.setTheUsername = __nccwpck_require__(2158).setTheUsername;
-exports.setThePassword = __nccwpck_require__(2158).setThePassword;
-exports.serializeHost = __nccwpck_require__(2158).serializeHost;
-exports.serializeInteger = __nccwpck_require__(2158).serializeInteger;
-exports.parseURL = __nccwpck_require__(2158).parseURL;
-
-
-/***/ }),
-
-/***/ 2158:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-"use strict";
-
-const punycode = __nccwpck_require__(5477);
-const tr46 = __nccwpck_require__(4256);
-
-const specialSchemes = {
- ftp: 21,
- file: null,
- gopher: 70,
- http: 80,
- https: 443,
- ws: 80,
- wss: 443
-};
-
-const failure = Symbol("failure");
-
-function countSymbols(str) {
- return punycode.ucs2.decode(str).length;
-}
-
-function at(input, idx) {
- const c = input[idx];
- return isNaN(c) ? undefined : String.fromCodePoint(c);
-}
-
-function isASCIIDigit(c) {
- return c >= 0x30 && c <= 0x39;
-}
-
-function isASCIIAlpha(c) {
- return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A);
-}
-
-function isASCIIAlphanumeric(c) {
- return isASCIIAlpha(c) || isASCIIDigit(c);
-}
-
-function isASCIIHex(c) {
- return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66);
-}
-
-function isSingleDot(buffer) {
- return buffer === "." || buffer.toLowerCase() === "%2e";
-}
-
-function isDoubleDot(buffer) {
- buffer = buffer.toLowerCase();
- return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e";
-}
-
-function isWindowsDriveLetterCodePoints(cp1, cp2) {
- return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124);
-}
-
-function isWindowsDriveLetterString(string) {
- return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|");
-}
-
-function isNormalizedWindowsDriveLetterString(string) {
- return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":";
-}
-
-function containsForbiddenHostCodePoint(string) {
- return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1;
-}
-
-function containsForbiddenHostCodePointExcludingPercent(string) {
- return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1;
-}
-
-function isSpecialScheme(scheme) {
- return specialSchemes[scheme] !== undefined;
-}
-
-function isSpecial(url) {
- return isSpecialScheme(url.scheme);
-}
-
-function defaultPort(scheme) {
- return specialSchemes[scheme];
-}
-
-function percentEncode(c) {
- let hex = c.toString(16).toUpperCase();
- if (hex.length === 1) {
- hex = "0" + hex;
- }
-
- return "%" + hex;
-}
-
-function utf8PercentEncode(c) {
- const buf = new Buffer(c);
-
- let str = "";
-
- for (let i = 0; i < buf.length; ++i) {
- str += percentEncode(buf[i]);
- }
-
- return str;
-}
-
-function utf8PercentDecode(str) {
- const input = new Buffer(str);
- const output = [];
- for (let i = 0; i < input.length; ++i) {
- if (input[i] !== 37) {
- output.push(input[i]);
- } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) {
- output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16));
- i += 2;
- } else {
- output.push(input[i]);
- }
- }
- return new Buffer(output).toString();
-}
-
-function isC0ControlPercentEncode(c) {
- return c <= 0x1F || c > 0x7E;
-}
-
-const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]);
-function isPathPercentEncode(c) {
- return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c);
-}
-
-const extraUserinfoPercentEncodeSet =
- new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]);
-function isUserinfoPercentEncode(c) {
- return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c);
-}
-
-function percentEncodeChar(c, encodeSetPredicate) {
- const cStr = String.fromCodePoint(c);
-
- if (encodeSetPredicate(c)) {
- return utf8PercentEncode(cStr);
- }
-
- return cStr;
-}
-
-function parseIPv4Number(input) {
- let R = 10;
-
- if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") {
- input = input.substring(2);
- R = 16;
- } else if (input.length >= 2 && input.charAt(0) === "0") {
- input = input.substring(1);
- R = 8;
- }
-
- if (input === "") {
- return 0;
- }
-
- const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/);
- if (regex.test(input)) {
- return failure;
- }
-
- return parseInt(input, R);
-}
-
-function parseIPv4(input) {
- const parts = input.split(".");
- if (parts[parts.length - 1] === "") {
- if (parts.length > 1) {
- parts.pop();
- }
- }
-
- if (parts.length > 4) {
- return input;
- }
-
- const numbers = [];
- for (const part of parts) {
- if (part === "") {
- return input;
- }
- const n = parseIPv4Number(part);
- if (n === failure) {
- return input;
- }
-
- numbers.push(n);
- }
-
- for (let i = 0; i < numbers.length - 1; ++i) {
- if (numbers[i] > 255) {
- return failure;
- }
- }
- if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) {
- return failure;
- }
-
- let ipv4 = numbers.pop();
- let counter = 0;
-
- for (const n of numbers) {
- ipv4 += n * Math.pow(256, 3 - counter);
- ++counter;
- }
-
- return ipv4;
-}
-
-function serializeIPv4(address) {
- let output = "";
- let n = address;
-
- for (let i = 1; i <= 4; ++i) {
- output = String(n % 256) + output;
- if (i !== 4) {
- output = "." + output;
- }
- n = Math.floor(n / 256);
- }
-
- return output;
-}
-
-function parseIPv6(input) {
- const address = [0, 0, 0, 0, 0, 0, 0, 0];
- let pieceIndex = 0;
- let compress = null;
- let pointer = 0;
-
- input = punycode.ucs2.decode(input);
-
- if (input[pointer] === 58) {
- if (input[pointer + 1] !== 58) {
- return failure;
- }
-
- pointer += 2;
- ++pieceIndex;
- compress = pieceIndex;
- }
-
- while (pointer < input.length) {
- if (pieceIndex === 8) {
- return failure;
- }
-
- if (input[pointer] === 58) {
- if (compress !== null) {
- return failure;
- }
- ++pointer;
- ++pieceIndex;
- compress = pieceIndex;
- continue;
- }
-
- let value = 0;
- let length = 0;
-
- while (length < 4 && isASCIIHex(input[pointer])) {
- value = value * 0x10 + parseInt(at(input, pointer), 16);
- ++pointer;
- ++length;
- }
-
- if (input[pointer] === 46) {
- if (length === 0) {
- return failure;
- }
-
- pointer -= length;
-
- if (pieceIndex > 6) {
- return failure;
- }
-
- let numbersSeen = 0;
-
- while (input[pointer] !== undefined) {
- let ipv4Piece = null;
-
- if (numbersSeen > 0) {
- if (input[pointer] === 46 && numbersSeen < 4) {
- ++pointer;
- } else {
- return failure;
- }
- }
-
- if (!isASCIIDigit(input[pointer])) {
- return failure;
- }
-
- while (isASCIIDigit(input[pointer])) {
- const number = parseInt(at(input, pointer));
- if (ipv4Piece === null) {
- ipv4Piece = number;
- } else if (ipv4Piece === 0) {
- return failure;
- } else {
- ipv4Piece = ipv4Piece * 10 + number;
- }
- if (ipv4Piece > 255) {
- return failure;
- }
- ++pointer;
- }
-
- address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece;
-
- ++numbersSeen;
-
- if (numbersSeen === 2 || numbersSeen === 4) {
- ++pieceIndex;
- }
- }
-
- if (numbersSeen !== 4) {
- return failure;
- }
-
- break;
- } else if (input[pointer] === 58) {
- ++pointer;
- if (input[pointer] === undefined) {
- return failure;
- }
- } else if (input[pointer] !== undefined) {
- return failure;
- }
-
- address[pieceIndex] = value;
- ++pieceIndex;
- }
-
- if (compress !== null) {
- let swaps = pieceIndex - compress;
- pieceIndex = 7;
- while (pieceIndex !== 0 && swaps > 0) {
- const temp = address[compress + swaps - 1];
- address[compress + swaps - 1] = address[pieceIndex];
- address[pieceIndex] = temp;
- --pieceIndex;
- --swaps;
- }
- } else if (compress === null && pieceIndex !== 8) {
- return failure;
- }
-
- return address;
-}
-
-function serializeIPv6(address) {
- let output = "";
- const seqResult = findLongestZeroSequence(address);
- const compress = seqResult.idx;
- let ignore0 = false;
-
- for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) {
- if (ignore0 && address[pieceIndex] === 0) {
- continue;
- } else if (ignore0) {
- ignore0 = false;
- }
-
- if (compress === pieceIndex) {
- const separator = pieceIndex === 0 ? "::" : ":";
- output += separator;
- ignore0 = true;
- continue;
- }
-
- output += address[pieceIndex].toString(16);
-
- if (pieceIndex !== 7) {
- output += ":";
- }
- }
-
- return output;
-}
-
-function parseHost(input, isSpecialArg) {
- if (input[0] === "[") {
- if (input[input.length - 1] !== "]") {
- return failure;
- }
-
- return parseIPv6(input.substring(1, input.length - 1));
- }
-
- if (!isSpecialArg) {
- return parseOpaqueHost(input);
- }
-
- const domain = utf8PercentDecode(input);
- const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false);
- if (asciiDomain === null) {
- return failure;
- }
-
- if (containsForbiddenHostCodePoint(asciiDomain)) {
- return failure;
- }
-
- const ipv4Host = parseIPv4(asciiDomain);
- if (typeof ipv4Host === "number" || ipv4Host === failure) {
- return ipv4Host;
- }
-
- return asciiDomain;
-}
-
-function parseOpaqueHost(input) {
- if (containsForbiddenHostCodePointExcludingPercent(input)) {
- return failure;
- }
-
- let output = "";
- const decoded = punycode.ucs2.decode(input);
- for (let i = 0; i < decoded.length; ++i) {
- output += percentEncodeChar(decoded[i], isC0ControlPercentEncode);
- }
- return output;
-}
-
-function findLongestZeroSequence(arr) {
- let maxIdx = null;
- let maxLen = 1; // only find elements > 1
- let currStart = null;
- let currLen = 0;
-
- for (let i = 0; i < arr.length; ++i) {
- if (arr[i] !== 0) {
- if (currLen > maxLen) {
- maxIdx = currStart;
- maxLen = currLen;
- }
-
- currStart = null;
- currLen = 0;
- } else {
- if (currStart === null) {
- currStart = i;
- }
- ++currLen;
- }
- }
-
- // if trailing zeros
- if (currLen > maxLen) {
- maxIdx = currStart;
- maxLen = currLen;
- }
-
- return {
- idx: maxIdx,
- len: maxLen
- };
-}
-
-function serializeHost(host) {
- if (typeof host === "number") {
- return serializeIPv4(host);
- }
-
- // IPv6 serializer
- if (host instanceof Array) {
- return "[" + serializeIPv6(host) + "]";
- }
-
- return host;
-}
-
-function trimControlChars(url) {
- return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, "");
-}
-
-function trimTabAndNewline(url) {
- return url.replace(/\u0009|\u000A|\u000D/g, "");
-}
-
-function shortenPath(url) {
- const path = url.path;
- if (path.length === 0) {
- return;
- }
- if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) {
- return;
- }
-
- path.pop();
-}
-
-function includesCredentials(url) {
- return url.username !== "" || url.password !== "";
-}
-
-function cannotHaveAUsernamePasswordPort(url) {
- return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file";
-}
-
-function isNormalizedWindowsDriveLetter(string) {
- return /^[A-Za-z]:$/.test(string);
-}
-
-function URLStateMachine(input, base, encodingOverride, url, stateOverride) {
- this.pointer = 0;
- this.input = input;
- this.base = base || null;
- this.encodingOverride = encodingOverride || "utf-8";
- this.stateOverride = stateOverride;
- this.url = url;
- this.failure = false;
- this.parseError = false;
-
- if (!this.url) {
- this.url = {
- scheme: "",
- username: "",
- password: "",
- host: null,
- port: null,
- path: [],
- query: null,
- fragment: null,
-
- cannotBeABaseURL: false
- };
-
- const res = trimControlChars(this.input);
- if (res !== this.input) {
- this.parseError = true;
- }
- this.input = res;
- }
-
- const res = trimTabAndNewline(this.input);
- if (res !== this.input) {
- this.parseError = true;
- }
- this.input = res;
-
- this.state = stateOverride || "scheme start";
-
- this.buffer = "";
- this.atFlag = false;
- this.arrFlag = false;
- this.passwordTokenSeenFlag = false;
-
- this.input = punycode.ucs2.decode(this.input);
-
- for (; this.pointer <= this.input.length; ++this.pointer) {
- const c = this.input[this.pointer];
- const cStr = isNaN(c) ? undefined : String.fromCodePoint(c);
-
- // exec state machine
- const ret = this["parse " + this.state](c, cStr);
- if (!ret) {
- break; // terminate algorithm
- } else if (ret === failure) {
- this.failure = true;
- break;
- }
- }
-}
-
-URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) {
- if (isASCIIAlpha(c)) {
- this.buffer += cStr.toLowerCase();
- this.state = "scheme";
- } else if (!this.stateOverride) {
- this.state = "no scheme";
- --this.pointer;
- } else {
- this.parseError = true;
- return failure;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) {
- if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) {
- this.buffer += cStr.toLowerCase();
- } else if (c === 58) {
- if (this.stateOverride) {
- if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) {
- return false;
- }
-
- if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) {
- return false;
- }
-
- if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") {
- return false;
- }
-
- if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) {
- return false;
- }
- }
- this.url.scheme = this.buffer;
- this.buffer = "";
- if (this.stateOverride) {
- return false;
- }
- if (this.url.scheme === "file") {
- if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) {
- this.parseError = true;
- }
- this.state = "file";
- } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) {
- this.state = "special relative or authority";
- } else if (isSpecial(this.url)) {
- this.state = "special authority slashes";
- } else if (this.input[this.pointer + 1] === 47) {
- this.state = "path or authority";
- ++this.pointer;
- } else {
- this.url.cannotBeABaseURL = true;
- this.url.path.push("");
- this.state = "cannot-be-a-base-URL path";
- }
- } else if (!this.stateOverride) {
- this.buffer = "";
- this.state = "no scheme";
- this.pointer = -1;
- } else {
- this.parseError = true;
- return failure;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) {
- if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) {
- return failure;
- } else if (this.base.cannotBeABaseURL && c === 35) {
- this.url.scheme = this.base.scheme;
- this.url.path = this.base.path.slice();
- this.url.query = this.base.query;
- this.url.fragment = "";
- this.url.cannotBeABaseURL = true;
- this.state = "fragment";
- } else if (this.base.scheme === "file") {
- this.state = "file";
- --this.pointer;
- } else {
- this.state = "relative";
- --this.pointer;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) {
- if (c === 47 && this.input[this.pointer + 1] === 47) {
- this.state = "special authority ignore slashes";
- ++this.pointer;
- } else {
- this.parseError = true;
- this.state = "relative";
- --this.pointer;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) {
- if (c === 47) {
- this.state = "authority";
- } else {
- this.state = "path";
- --this.pointer;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse relative"] = function parseRelative(c) {
- this.url.scheme = this.base.scheme;
- if (isNaN(c)) {
- this.url.username = this.base.username;
- this.url.password = this.base.password;
- this.url.host = this.base.host;
- this.url.port = this.base.port;
- this.url.path = this.base.path.slice();
- this.url.query = this.base.query;
- } else if (c === 47) {
- this.state = "relative slash";
- } else if (c === 63) {
- this.url.username = this.base.username;
- this.url.password = this.base.password;
- this.url.host = this.base.host;
- this.url.port = this.base.port;
- this.url.path = this.base.path.slice();
- this.url.query = "";
- this.state = "query";
- } else if (c === 35) {
- this.url.username = this.base.username;
- this.url.password = this.base.password;
- this.url.host = this.base.host;
- this.url.port = this.base.port;
- this.url.path = this.base.path.slice();
- this.url.query = this.base.query;
- this.url.fragment = "";
- this.state = "fragment";
- } else if (isSpecial(this.url) && c === 92) {
- this.parseError = true;
- this.state = "relative slash";
- } else {
- this.url.username = this.base.username;
- this.url.password = this.base.password;
- this.url.host = this.base.host;
- this.url.port = this.base.port;
- this.url.path = this.base.path.slice(0, this.base.path.length - 1);
-
- this.state = "path";
- --this.pointer;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) {
- if (isSpecial(this.url) && (c === 47 || c === 92)) {
- if (c === 92) {
- this.parseError = true;
- }
- this.state = "special authority ignore slashes";
- } else if (c === 47) {
- this.state = "authority";
- } else {
- this.url.username = this.base.username;
- this.url.password = this.base.password;
- this.url.host = this.base.host;
- this.url.port = this.base.port;
- this.state = "path";
- --this.pointer;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) {
- if (c === 47 && this.input[this.pointer + 1] === 47) {
- this.state = "special authority ignore slashes";
- ++this.pointer;
- } else {
- this.parseError = true;
- this.state = "special authority ignore slashes";
- --this.pointer;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) {
- if (c !== 47 && c !== 92) {
- this.state = "authority";
- --this.pointer;
- } else {
- this.parseError = true;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) {
- if (c === 64) {
- this.parseError = true;
- if (this.atFlag) {
- this.buffer = "%40" + this.buffer;
- }
- this.atFlag = true;
-
- // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars
- const len = countSymbols(this.buffer);
- for (let pointer = 0; pointer < len; ++pointer) {
- const codePoint = this.buffer.codePointAt(pointer);
-
- if (codePoint === 58 && !this.passwordTokenSeenFlag) {
- this.passwordTokenSeenFlag = true;
- continue;
- }
- const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode);
- if (this.passwordTokenSeenFlag) {
- this.url.password += encodedCodePoints;
- } else {
- this.url.username += encodedCodePoints;
- }
- }
- this.buffer = "";
- } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||
- (isSpecial(this.url) && c === 92)) {
- if (this.atFlag && this.buffer === "") {
- this.parseError = true;
- return failure;
- }
- this.pointer -= countSymbols(this.buffer) + 1;
- this.buffer = "";
- this.state = "host";
- } else {
- this.buffer += cStr;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse hostname"] =
-URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) {
- if (this.stateOverride && this.url.scheme === "file") {
- --this.pointer;
- this.state = "file host";
- } else if (c === 58 && !this.arrFlag) {
- if (this.buffer === "") {
- this.parseError = true;
- return failure;
- }
-
- const host = parseHost(this.buffer, isSpecial(this.url));
- if (host === failure) {
- return failure;
- }
-
- this.url.host = host;
- this.buffer = "";
- this.state = "port";
- if (this.stateOverride === "hostname") {
- return false;
- }
- } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||
- (isSpecial(this.url) && c === 92)) {
- --this.pointer;
- if (isSpecial(this.url) && this.buffer === "") {
- this.parseError = true;
- return failure;
- } else if (this.stateOverride && this.buffer === "" &&
- (includesCredentials(this.url) || this.url.port !== null)) {
- this.parseError = true;
- return false;
- }
-
- const host = parseHost(this.buffer, isSpecial(this.url));
- if (host === failure) {
- return failure;
- }
-
- this.url.host = host;
- this.buffer = "";
- this.state = "path start";
- if (this.stateOverride) {
- return false;
- }
- } else {
- if (c === 91) {
- this.arrFlag = true;
- } else if (c === 93) {
- this.arrFlag = false;
- }
- this.buffer += cStr;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) {
- if (isASCIIDigit(c)) {
- this.buffer += cStr;
- } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||
- (isSpecial(this.url) && c === 92) ||
- this.stateOverride) {
- if (this.buffer !== "") {
- const port = parseInt(this.buffer);
- if (port > Math.pow(2, 16) - 1) {
- this.parseError = true;
- return failure;
- }
- this.url.port = port === defaultPort(this.url.scheme) ? null : port;
- this.buffer = "";
- }
- if (this.stateOverride) {
- return false;
- }
- this.state = "path start";
- --this.pointer;
- } else {
- this.parseError = true;
- return failure;
- }
-
- return true;
-};
-
-const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]);
-
-URLStateMachine.prototype["parse file"] = function parseFile(c) {
- this.url.scheme = "file";
-
- if (c === 47 || c === 92) {
- if (c === 92) {
- this.parseError = true;
- }
- this.state = "file slash";
- } else if (this.base !== null && this.base.scheme === "file") {
- if (isNaN(c)) {
- this.url.host = this.base.host;
- this.url.path = this.base.path.slice();
- this.url.query = this.base.query;
- } else if (c === 63) {
- this.url.host = this.base.host;
- this.url.path = this.base.path.slice();
- this.url.query = "";
- this.state = "query";
- } else if (c === 35) {
- this.url.host = this.base.host;
- this.url.path = this.base.path.slice();
- this.url.query = this.base.query;
- this.url.fragment = "";
- this.state = "fragment";
- } else {
- if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points
- !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) ||
- (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points
- !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) {
- this.url.host = this.base.host;
- this.url.path = this.base.path.slice();
- shortenPath(this.url);
- } else {
- this.parseError = true;
- }
-
- this.state = "path";
- --this.pointer;
- }
- } else {
- this.state = "path";
- --this.pointer;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) {
- if (c === 47 || c === 92) {
- if (c === 92) {
- this.parseError = true;
- }
- this.state = "file host";
- } else {
- if (this.base !== null && this.base.scheme === "file") {
- if (isNormalizedWindowsDriveLetterString(this.base.path[0])) {
- this.url.path.push(this.base.path[0]);
- } else {
- this.url.host = this.base.host;
- }
- }
- this.state = "path";
- --this.pointer;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) {
- if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) {
- --this.pointer;
- if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) {
- this.parseError = true;
- this.state = "path";
- } else if (this.buffer === "") {
- this.url.host = "";
- if (this.stateOverride) {
- return false;
- }
- this.state = "path start";
- } else {
- let host = parseHost(this.buffer, isSpecial(this.url));
- if (host === failure) {
- return failure;
- }
- if (host === "localhost") {
- host = "";
- }
- this.url.host = host;
-
- if (this.stateOverride) {
- return false;
- }
-
- this.buffer = "";
- this.state = "path start";
- }
- } else {
- this.buffer += cStr;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse path start"] = function parsePathStart(c) {
- if (isSpecial(this.url)) {
- if (c === 92) {
- this.parseError = true;
- }
- this.state = "path";
-
- if (c !== 47 && c !== 92) {
- --this.pointer;
- }
- } else if (!this.stateOverride && c === 63) {
- this.url.query = "";
- this.state = "query";
- } else if (!this.stateOverride && c === 35) {
- this.url.fragment = "";
- this.state = "fragment";
- } else if (c !== undefined) {
- this.state = "path";
- if (c !== 47) {
- --this.pointer;
- }
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse path"] = function parsePath(c) {
- if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) ||
- (!this.stateOverride && (c === 63 || c === 35))) {
- if (isSpecial(this.url) && c === 92) {
- this.parseError = true;
- }
-
- if (isDoubleDot(this.buffer)) {
- shortenPath(this.url);
- if (c !== 47 && !(isSpecial(this.url) && c === 92)) {
- this.url.path.push("");
- }
- } else if (isSingleDot(this.buffer) && c !== 47 &&
- !(isSpecial(this.url) && c === 92)) {
- this.url.path.push("");
- } else if (!isSingleDot(this.buffer)) {
- if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) {
- if (this.url.host !== "" && this.url.host !== null) {
- this.parseError = true;
- this.url.host = "";
- }
- this.buffer = this.buffer[0] + ":";
- }
- this.url.path.push(this.buffer);
- }
- this.buffer = "";
- if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) {
- while (this.url.path.length > 1 && this.url.path[0] === "") {
- this.parseError = true;
- this.url.path.shift();
- }
- }
- if (c === 63) {
- this.url.query = "";
- this.state = "query";
- }
- if (c === 35) {
- this.url.fragment = "";
- this.state = "fragment";
- }
- } else {
- // TODO: If c is not a URL code point and not "%", parse error.
-
- if (c === 37 &&
- (!isASCIIHex(this.input[this.pointer + 1]) ||
- !isASCIIHex(this.input[this.pointer + 2]))) {
- this.parseError = true;
- }
-
- this.buffer += percentEncodeChar(c, isPathPercentEncode);
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) {
- if (c === 63) {
- this.url.query = "";
- this.state = "query";
- } else if (c === 35) {
- this.url.fragment = "";
- this.state = "fragment";
- } else {
- // TODO: Add: not a URL code point
- if (!isNaN(c) && c !== 37) {
- this.parseError = true;
- }
-
- if (c === 37 &&
- (!isASCIIHex(this.input[this.pointer + 1]) ||
- !isASCIIHex(this.input[this.pointer + 2]))) {
- this.parseError = true;
- }
-
- if (!isNaN(c)) {
- this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode);
- }
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) {
- if (isNaN(c) || (!this.stateOverride && c === 35)) {
- if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") {
- this.encodingOverride = "utf-8";
- }
-
- const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead
- for (let i = 0; i < buffer.length; ++i) {
- if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 ||
- buffer[i] === 0x3C || buffer[i] === 0x3E) {
- this.url.query += percentEncode(buffer[i]);
- } else {
- this.url.query += String.fromCodePoint(buffer[i]);
- }
- }
-
- this.buffer = "";
- if (c === 35) {
- this.url.fragment = "";
- this.state = "fragment";
- }
- } else {
- // TODO: If c is not a URL code point and not "%", parse error.
- if (c === 37 &&
- (!isASCIIHex(this.input[this.pointer + 1]) ||
- !isASCIIHex(this.input[this.pointer + 2]))) {
- this.parseError = true;
- }
-
- this.buffer += cStr;
- }
-
- return true;
-};
-
-URLStateMachine.prototype["parse fragment"] = function parseFragment(c) {
- if (isNaN(c)) { // do nothing
- } else if (c === 0x0) {
- this.parseError = true;
- } else {
- // TODO: If c is not a URL code point and not "%", parse error.
- if (c === 37 &&
- (!isASCIIHex(this.input[this.pointer + 1]) ||
- !isASCIIHex(this.input[this.pointer + 2]))) {
- this.parseError = true;
- }
-
- this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode);
- }
-
- return true;
-};
-
-function serializeURL(url, excludeFragment) {
- let output = url.scheme + ":";
- if (url.host !== null) {
- output += "//";
-
- if (url.username !== "" || url.password !== "") {
- output += url.username;
- if (url.password !== "") {
- output += ":" + url.password;
- }
- output += "@";
- }
-
- output += serializeHost(url.host);
-
- if (url.port !== null) {
- output += ":" + url.port;
- }
- } else if (url.host === null && url.scheme === "file") {
- output += "//";
- }
-
- if (url.cannotBeABaseURL) {
- output += url.path[0];
- } else {
- for (const string of url.path) {
- output += "/" + string;
- }
- }
-
- if (url.query !== null) {
- output += "?" + url.query;
- }
-
- if (!excludeFragment && url.fragment !== null) {
- output += "#" + url.fragment;
- }
-
- return output;
-}
-
-function serializeOrigin(tuple) {
- let result = tuple.scheme + "://";
- result += serializeHost(tuple.host);
-
- if (tuple.port !== null) {
- result += ":" + tuple.port;
- }
-
- return result;
-}
-
-module.exports.serializeURL = serializeURL;
-
-module.exports.serializeURLOrigin = function (url) {
- // https://url.spec.whatwg.org/#concept-url-origin
- switch (url.scheme) {
- case "blob":
- try {
- return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0]));
- } catch (e) {
- // serializing an opaque origin returns "null"
- return "null";
- }
- case "ftp":
- case "gopher":
- case "http":
- case "https":
- case "ws":
- case "wss":
- return serializeOrigin({
- scheme: url.scheme,
- host: url.host,
- port: url.port
- });
- case "file":
- // spec says "exercise to the reader", chrome says "file://"
- return "file://";
- default:
- // serializing an opaque origin returns "null"
- return "null";
- }
-};
-
-module.exports.basicURLParse = function (input, options) {
- if (options === undefined) {
- options = {};
- }
-
- const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride);
- if (usm.failure) {
- return "failure";
- }
-
- return usm.url;
-};
-
-module.exports.setTheUsername = function (url, username) {
- url.username = "";
- const decoded = punycode.ucs2.decode(username);
- for (let i = 0; i < decoded.length; ++i) {
- url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode);
- }
-};
-
-module.exports.setThePassword = function (url, password) {
- url.password = "";
- const decoded = punycode.ucs2.decode(password);
- for (let i = 0; i < decoded.length; ++i) {
- url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode);
- }
-};
-
-module.exports.serializeHost = serializeHost;
-
-module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort;
-
-module.exports.serializeInteger = function (integer) {
- return String(integer);
-};
-
-module.exports.parseURL = function (input, options) {
- if (options === undefined) {
- options = {};
- }
-
- // We don't handle blobs, so this just delegates:
- return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride });
-};
-
-
-/***/ }),
-
-/***/ 3185:
-/***/ ((module) => {
-
-"use strict";
-
-
-module.exports.mixin = function mixin(target, source) {
- const keys = Object.getOwnPropertyNames(source);
- for (let i = 0; i < keys.length; ++i) {
- Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i]));
- }
-};
-
-module.exports.wrapperSymbol = Symbol("wrapper");
-module.exports.implSymbol = Symbol("impl");
-
-module.exports.wrapperForImpl = function (impl) {
- return impl[module.exports.wrapperSymbol];
-};
-
-module.exports.implForWrapper = function (wrapper) {
- return wrapper[module.exports.implSymbol];
-};
-
-
-
/***/ }),
/***/ 2940:
@@ -32797,7 +54242,7 @@ function wrappy (fn, cb) {
// Generated by CoffeeScript 1.12.7
(function() {
"use strict";
- var bom, defaults, events, isEmpty, processItem, processors, sax, setImmediate,
+ var bom, defaults, defineProperty, events, isEmpty, processItem, processors, sax, setImmediate,
bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; },
extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
hasProp = {}.hasOwnProperty;
@@ -32827,6 +54272,16 @@ function wrappy (fn, cb) {
return item;
};
+ defineProperty = function(obj, key, value) {
+ var descriptor;
+ descriptor = Object.create(null);
+ descriptor.value = value;
+ descriptor.writable = true;
+ descriptor.enumerable = true;
+ descriptor.configurable = true;
+ return Object.defineProperty(obj, key, descriptor);
+ };
+
exports.Parser = (function(superClass) {
extend(Parser, superClass);
@@ -32890,13 +54345,13 @@ function wrappy (fn, cb) {
Parser.prototype.assignOrPush = function(obj, key, newValue) {
if (!(key in obj)) {
if (!this.options.explicitArray) {
- return obj[key] = newValue;
+ return defineProperty(obj, key, newValue);
} else {
- return obj[key] = [newValue];
+ return defineProperty(obj, key, [newValue]);
}
} else {
if (!(obj[key] instanceof Array)) {
- obj[key] = [obj[key]];
+ defineProperty(obj, key, [obj[key]]);
}
return obj[key].push(newValue);
}
@@ -32937,21 +54392,21 @@ function wrappy (fn, cb) {
this.saxParser.onopentag = (function(_this) {
return function(node) {
var key, newValue, obj, processedKey, ref;
- obj = Object.create(null);
+ obj = {};
obj[charkey] = "";
if (!_this.options.ignoreAttrs) {
ref = node.attributes;
for (key in ref) {
if (!hasProp.call(ref, key)) continue;
if (!(attrkey in obj) && !_this.options.mergeAttrs) {
- obj[attrkey] = Object.create(null);
+ obj[attrkey] = {};
}
newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key];
processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key;
if (_this.options.mergeAttrs) {
_this.assignOrPush(obj, processedKey, newValue);
} else {
- obj[attrkey][processedKey] = newValue;
+ defineProperty(obj[attrkey], processedKey, newValue);
}
}
}
@@ -33022,7 +54477,7 @@ function wrappy (fn, cb) {
}
if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') {
if (!_this.options.preserveChildrenOrder) {
- node = Object.create(null);
+ node = {};
if (_this.options.attrkey in obj) {
node[_this.options.attrkey] = obj[_this.options.attrkey];
delete obj[_this.options.attrkey];
@@ -33037,10 +54492,10 @@ function wrappy (fn, cb) {
obj = node;
} else if (s) {
s[_this.options.childkey] = s[_this.options.childkey] || [];
- objClone = Object.create(null);
+ objClone = {};
for (key in obj) {
if (!hasProp.call(obj, key)) continue;
- objClone[key] = obj[key];
+ defineProperty(objClone, key, obj[key]);
}
s[_this.options.childkey].push(objClone);
delete obj["#name"];
@@ -33054,8 +54509,8 @@ function wrappy (fn, cb) {
} else {
if (_this.options.explicitRoot) {
old = obj;
- obj = Object.create(null);
- obj[nodeName] = old;
+ obj = {};
+ defineProperty(obj, nodeName, old);
}
_this.resultObject = obj;
_this.saxParser.ended = true;
@@ -37564,14 +59019,6 @@ function wrappy (fn, cb) {
}).call(this);
-/***/ }),
-
-/***/ 2877:
-/***/ ((module) => {
-
-module.exports = eval("require")("encoding");
-
-
/***/ }),
/***/ 2941:
@@ -37590,6 +59037,14 @@ module.exports = require("assert");
/***/ }),
+/***/ 852:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("async_hooks");
+
+/***/ }),
+
/***/ 4300:
/***/ ((module) => {
@@ -37606,6 +59061,14 @@ module.exports = require("child_process");
/***/ }),
+/***/ 6206:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("console");
+
+/***/ }),
+
/***/ 6113:
/***/ ((module) => {
@@ -37614,6 +59077,14 @@ module.exports = require("crypto");
/***/ }),
+/***/ 7643:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("diagnostics_channel");
+
+/***/ }),
+
/***/ 9523:
/***/ ((module) => {
@@ -37670,6 +59141,30 @@ module.exports = require("net");
/***/ }),
+/***/ 5673:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("node:events");
+
+/***/ }),
+
+/***/ 4492:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("node:stream");
+
+/***/ }),
+
+/***/ 7261:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("node:util");
+
+/***/ }),
+
/***/ 2037:
/***/ ((module) => {
@@ -37686,11 +59181,19 @@ module.exports = require("path");
/***/ }),
-/***/ 5477:
+/***/ 4074:
/***/ ((module) => {
"use strict";
-module.exports = require("punycode");
+module.exports = require("perf_hooks");
+
+/***/ }),
+
+/***/ 3477:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("querystring");
/***/ }),
@@ -37702,6 +59205,14 @@ module.exports = require("stream");
/***/ }),
+/***/ 5356:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("stream/web");
+
+/***/ }),
+
/***/ 1576:
/***/ ((module) => {
@@ -37742,6 +59253,22 @@ module.exports = require("util");
/***/ }),
+/***/ 9830:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("util/types");
+
+/***/ }),
+
+/***/ 1267:
+/***/ ((module) => {
+
+"use strict";
+module.exports = require("worker_threads");
+
+/***/ }),
+
/***/ 9796:
/***/ ((module) => {
@@ -37750,11 +59277,1622 @@ module.exports = require("zlib");
/***/ }),
-/***/ 2020:
+/***/ 2960:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const WritableStream = (__nccwpck_require__(4492).Writable)
+const inherits = (__nccwpck_require__(7261).inherits)
+
+const StreamSearch = __nccwpck_require__(1142)
+
+const PartStream = __nccwpck_require__(1620)
+const HeaderParser = __nccwpck_require__(2032)
+
+const DASH = 45
+const B_ONEDASH = Buffer.from('-')
+const B_CRLF = Buffer.from('\r\n')
+const EMPTY_FN = function () {}
+
+function Dicer (cfg) {
+ if (!(this instanceof Dicer)) { return new Dicer(cfg) }
+ WritableStream.call(this, cfg)
+
+ if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') }
+
+ if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined }
+
+ this._headerFirst = cfg.headerFirst
+
+ this._dashes = 0
+ this._parts = 0
+ this._finished = false
+ this._realFinish = false
+ this._isPreamble = true
+ this._justMatched = false
+ this._firstWrite = true
+ this._inHeader = true
+ this._part = undefined
+ this._cb = undefined
+ this._ignoreData = false
+ this._partOpts = { highWaterMark: cfg.partHwm }
+ this._pause = false
+
+ const self = this
+ this._hparser = new HeaderParser(cfg)
+ this._hparser.on('header', function (header) {
+ self._inHeader = false
+ self._part.emit('header', header)
+ })
+}
+inherits(Dicer, WritableStream)
+
+Dicer.prototype.emit = function (ev) {
+ if (ev === 'finish' && !this._realFinish) {
+ if (!this._finished) {
+ const self = this
+ process.nextTick(function () {
+ self.emit('error', new Error('Unexpected end of multipart data'))
+ if (self._part && !self._ignoreData) {
+ const type = (self._isPreamble ? 'Preamble' : 'Part')
+ self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data'))
+ self._part.push(null)
+ process.nextTick(function () {
+ self._realFinish = true
+ self.emit('finish')
+ self._realFinish = false
+ })
+ return
+ }
+ self._realFinish = true
+ self.emit('finish')
+ self._realFinish = false
+ })
+ }
+ } else { WritableStream.prototype.emit.apply(this, arguments) }
+}
+
+Dicer.prototype._write = function (data, encoding, cb) {
+ // ignore unexpected data (e.g. extra trailer data after finished)
+ if (!this._hparser && !this._bparser) { return cb() }
+
+ if (this._headerFirst && this._isPreamble) {
+ if (!this._part) {
+ this._part = new PartStream(this._partOpts)
+ if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() }
+ }
+ const r = this._hparser.push(data)
+ if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() }
+ }
+
+ // allows for "easier" testing
+ if (this._firstWrite) {
+ this._bparser.push(B_CRLF)
+ this._firstWrite = false
+ }
+
+ this._bparser.push(data)
+
+ if (this._pause) { this._cb = cb } else { cb() }
+}
+
+Dicer.prototype.reset = function () {
+ this._part = undefined
+ this._bparser = undefined
+ this._hparser = undefined
+}
+
+Dicer.prototype.setBoundary = function (boundary) {
+ const self = this
+ this._bparser = new StreamSearch('\r\n--' + boundary)
+ this._bparser.on('info', function (isMatch, data, start, end) {
+ self._oninfo(isMatch, data, start, end)
+ })
+}
+
+Dicer.prototype._ignore = function () {
+ if (this._part && !this._ignoreData) {
+ this._ignoreData = true
+ this._part.on('error', EMPTY_FN)
+ // we must perform some kind of read on the stream even though we are
+ // ignoring the data, otherwise node's Readable stream will not emit 'end'
+ // after pushing null to the stream
+ this._part.resume()
+ }
+}
+
+Dicer.prototype._oninfo = function (isMatch, data, start, end) {
+ let buf; const self = this; let i = 0; let r; let shouldWriteMore = true
+
+ if (!this._part && this._justMatched && data) {
+ while (this._dashes < 2 && (start + i) < end) {
+ if (data[start + i] === DASH) {
+ ++i
+ ++this._dashes
+ } else {
+ if (this._dashes) { buf = B_ONEDASH }
+ this._dashes = 0
+ break
+ }
+ }
+ if (this._dashes === 2) {
+ if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) }
+ this.reset()
+ this._finished = true
+ // no more parts will be added
+ if (self._parts === 0) {
+ self._realFinish = true
+ self.emit('finish')
+ self._realFinish = false
+ }
+ }
+ if (this._dashes) { return }
+ }
+ if (this._justMatched) { this._justMatched = false }
+ if (!this._part) {
+ this._part = new PartStream(this._partOpts)
+ this._part._read = function (n) {
+ self._unpause()
+ }
+ if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() }
+ if (!this._isPreamble) { this._inHeader = true }
+ }
+ if (data && start < end && !this._ignoreData) {
+ if (this._isPreamble || !this._inHeader) {
+ if (buf) { shouldWriteMore = this._part.push(buf) }
+ shouldWriteMore = this._part.push(data.slice(start, end))
+ if (!shouldWriteMore) { this._pause = true }
+ } else if (!this._isPreamble && this._inHeader) {
+ if (buf) { this._hparser.push(buf) }
+ r = this._hparser.push(data.slice(start, end))
+ if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) }
+ }
+ }
+ if (isMatch) {
+ this._hparser.reset()
+ if (this._isPreamble) { this._isPreamble = false } else {
+ if (start !== end) {
+ ++this._parts
+ this._part.on('end', function () {
+ if (--self._parts === 0) {
+ if (self._finished) {
+ self._realFinish = true
+ self.emit('finish')
+ self._realFinish = false
+ } else {
+ self._unpause()
+ }
+ }
+ })
+ }
+ }
+ this._part.push(null)
+ this._part = undefined
+ this._ignoreData = false
+ this._justMatched = true
+ this._dashes = 0
+ }
+}
+
+Dicer.prototype._unpause = function () {
+ if (!this._pause) { return }
+
+ this._pause = false
+ if (this._cb) {
+ const cb = this._cb
+ this._cb = undefined
+ cb()
+ }
+}
+
+module.exports = Dicer
+
+
+/***/ }),
+
+/***/ 2032:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const EventEmitter = (__nccwpck_require__(5673).EventEmitter)
+const inherits = (__nccwpck_require__(7261).inherits)
+const getLimit = __nccwpck_require__(1467)
+
+const StreamSearch = __nccwpck_require__(1142)
+
+const B_DCRLF = Buffer.from('\r\n\r\n')
+const RE_CRLF = /\r\n/g
+const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex
+
+function HeaderParser (cfg) {
+ EventEmitter.call(this)
+
+ cfg = cfg || {}
+ const self = this
+ this.nread = 0
+ this.maxed = false
+ this.npairs = 0
+ this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000)
+ this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024)
+ this.buffer = ''
+ this.header = {}
+ this.finished = false
+ this.ss = new StreamSearch(B_DCRLF)
+ this.ss.on('info', function (isMatch, data, start, end) {
+ if (data && !self.maxed) {
+ if (self.nread + end - start >= self.maxHeaderSize) {
+ end = self.maxHeaderSize - self.nread + start
+ self.nread = self.maxHeaderSize
+ self.maxed = true
+ } else { self.nread += (end - start) }
+
+ self.buffer += data.toString('binary', start, end)
+ }
+ if (isMatch) { self._finish() }
+ })
+}
+inherits(HeaderParser, EventEmitter)
+
+HeaderParser.prototype.push = function (data) {
+ const r = this.ss.push(data)
+ if (this.finished) { return r }
+}
+
+HeaderParser.prototype.reset = function () {
+ this.finished = false
+ this.buffer = ''
+ this.header = {}
+ this.ss.reset()
+}
+
+HeaderParser.prototype._finish = function () {
+ if (this.buffer) { this._parseHeader() }
+ this.ss.matches = this.ss.maxMatches
+ const header = this.header
+ this.header = {}
+ this.buffer = ''
+ this.finished = true
+ this.nread = this.npairs = 0
+ this.maxed = false
+ this.emit('header', header)
+}
+
+HeaderParser.prototype._parseHeader = function () {
+ if (this.npairs === this.maxHeaderPairs) { return }
+
+ const lines = this.buffer.split(RE_CRLF)
+ const len = lines.length
+ let m, h
+
+ for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
+ if (lines[i].length === 0) { continue }
+ if (lines[i][0] === '\t' || lines[i][0] === ' ') {
+ // folded header content
+ // RFC2822 says to just remove the CRLF and not the whitespace following
+ // it, so we follow the RFC and include the leading whitespace ...
+ if (h) {
+ this.header[h][this.header[h].length - 1] += lines[i]
+ continue
+ }
+ }
+
+ const posColon = lines[i].indexOf(':')
+ if (
+ posColon === -1 ||
+ posColon === 0
+ ) {
+ return
+ }
+ m = RE_HDR.exec(lines[i])
+ h = m[1].toLowerCase()
+ this.header[h] = this.header[h] || []
+ this.header[h].push((m[2] || ''))
+ if (++this.npairs === this.maxHeaderPairs) { break }
+ }
+}
+
+module.exports = HeaderParser
+
+
+/***/ }),
+
+/***/ 1620:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const inherits = (__nccwpck_require__(7261).inherits)
+const ReadableStream = (__nccwpck_require__(4492).Readable)
+
+function PartStream (opts) {
+ ReadableStream.call(this, opts)
+}
+inherits(PartStream, ReadableStream)
+
+PartStream.prototype._read = function (n) {}
+
+module.exports = PartStream
+
+
+/***/ }),
+
+/***/ 1142:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+/**
+ * Copyright Brian White. All rights reserved.
+ *
+ * @see https://github.com/mscdex/streamsearch
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+ * IN THE SOFTWARE.
+ *
+ * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation
+ * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool
+ */
+const EventEmitter = (__nccwpck_require__(5673).EventEmitter)
+const inherits = (__nccwpck_require__(7261).inherits)
+
+function SBMH (needle) {
+ if (typeof needle === 'string') {
+ needle = Buffer.from(needle)
+ }
+
+ if (!Buffer.isBuffer(needle)) {
+ throw new TypeError('The needle has to be a String or a Buffer.')
+ }
+
+ const needleLength = needle.length
+
+ if (needleLength === 0) {
+ throw new Error('The needle cannot be an empty String/Buffer.')
+ }
+
+ if (needleLength > 256) {
+ throw new Error('The needle cannot have a length bigger than 256.')
+ }
+
+ this.maxMatches = Infinity
+ this.matches = 0
+
+ this._occ = new Array(256)
+ .fill(needleLength) // Initialize occurrence table.
+ this._lookbehind_size = 0
+ this._needle = needle
+ this._bufpos = 0
+
+ this._lookbehind = Buffer.alloc(needleLength)
+
+ // Populate occurrence table with analysis of the needle,
+ // ignoring last letter.
+ for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var
+ this._occ[needle[i]] = needleLength - 1 - i
+ }
+}
+inherits(SBMH, EventEmitter)
+
+SBMH.prototype.reset = function () {
+ this._lookbehind_size = 0
+ this.matches = 0
+ this._bufpos = 0
+}
+
+SBMH.prototype.push = function (chunk, pos) {
+ if (!Buffer.isBuffer(chunk)) {
+ chunk = Buffer.from(chunk, 'binary')
+ }
+ const chlen = chunk.length
+ this._bufpos = pos || 0
+ let r
+ while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) }
+ return r
+}
+
+SBMH.prototype._sbmh_feed = function (data) {
+ const len = data.length
+ const needle = this._needle
+ const needleLength = needle.length
+ const lastNeedleChar = needle[needleLength - 1]
+
+ // Positive: points to a position in `data`
+ // pos == 3 points to data[3]
+ // Negative: points to a position in the lookbehind buffer
+ // pos == -2 points to lookbehind[lookbehind_size - 2]
+ let pos = -this._lookbehind_size
+ let ch
+
+ if (pos < 0) {
+ // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool
+ // search with character lookup code that considers both the
+ // lookbehind buffer and the current round's haystack data.
+ //
+ // Loop until
+ // there is a match.
+ // or until
+ // we've moved past the position that requires the
+ // lookbehind buffer. In this case we switch to the
+ // optimized loop.
+ // or until
+ // the character to look at lies outside the haystack.
+ while (pos < 0 && pos <= len - needleLength) {
+ ch = this._sbmh_lookup_char(data, pos + needleLength - 1)
+
+ if (
+ ch === lastNeedleChar &&
+ this._sbmh_memcmp(data, pos, needleLength - 1)
+ ) {
+ this._lookbehind_size = 0
+ ++this.matches
+ this.emit('info', true)
+
+ return (this._bufpos = pos + needleLength)
+ }
+ pos += this._occ[ch]
+ }
+
+ // No match.
+
+ if (pos < 0) {
+ // There's too few data for Boyer-Moore-Horspool to run,
+ // so let's use a different algorithm to skip as much as
+ // we can.
+ // Forward pos until
+ // the trailing part of lookbehind + data
+ // looks like the beginning of the needle
+ // or until
+ // pos == 0
+ while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos }
+ }
+
+ if (pos >= 0) {
+ // Discard lookbehind buffer.
+ this.emit('info', false, this._lookbehind, 0, this._lookbehind_size)
+ this._lookbehind_size = 0
+ } else {
+ // Cut off part of the lookbehind buffer that has
+ // been processed and append the entire haystack
+ // into it.
+ const bytesToCutOff = this._lookbehind_size + pos
+ if (bytesToCutOff > 0) {
+ // The cut off data is guaranteed not to contain the needle.
+ this.emit('info', false, this._lookbehind, 0, bytesToCutOff)
+ }
+
+ this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff,
+ this._lookbehind_size - bytesToCutOff)
+ this._lookbehind_size -= bytesToCutOff
+
+ data.copy(this._lookbehind, this._lookbehind_size)
+ this._lookbehind_size += len
+
+ this._bufpos = len
+ return len
+ }
+ }
+
+ pos += (pos >= 0) * this._bufpos
+
+ // Lookbehind buffer is now empty. We only need to check if the
+ // needle is in the haystack.
+ if (data.indexOf(needle, pos) !== -1) {
+ pos = data.indexOf(needle, pos)
+ ++this.matches
+ if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) }
+
+ return (this._bufpos = pos + needleLength)
+ } else {
+ pos = len - needleLength
+ }
+
+ // There was no match. If there's trailing haystack data that we cannot
+ // match yet using the Boyer-Moore-Horspool algorithm (because the trailing
+ // data is less than the needle size) then match using a modified
+ // algorithm that starts matching from the beginning instead of the end.
+ // Whatever trailing data is left after running this algorithm is added to
+ // the lookbehind buffer.
+ while (
+ pos < len &&
+ (
+ data[pos] !== needle[0] ||
+ (
+ (Buffer.compare(
+ data.subarray(pos, pos + len - pos),
+ needle.subarray(0, len - pos)
+ ) !== 0)
+ )
+ )
+ ) {
+ ++pos
+ }
+ if (pos < len) {
+ data.copy(this._lookbehind, 0, pos, pos + (len - pos))
+ this._lookbehind_size = len - pos
+ }
+
+ // Everything until pos is guaranteed not to contain needle data.
+ if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) }
+
+ this._bufpos = len
+ return len
+}
+
+SBMH.prototype._sbmh_lookup_char = function (data, pos) {
+ return (pos < 0)
+ ? this._lookbehind[this._lookbehind_size + pos]
+ : data[pos]
+}
+
+SBMH.prototype._sbmh_memcmp = function (data, pos, len) {
+ for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
+ if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false }
+ }
+ return true
+}
+
+module.exports = SBMH
+
+
+/***/ }),
+
+/***/ 727:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const WritableStream = (__nccwpck_require__(4492).Writable)
+const { inherits } = __nccwpck_require__(7261)
+const Dicer = __nccwpck_require__(2960)
+
+const MultipartParser = __nccwpck_require__(2183)
+const UrlencodedParser = __nccwpck_require__(8306)
+const parseParams = __nccwpck_require__(1854)
+
+function Busboy (opts) {
+ if (!(this instanceof Busboy)) { return new Busboy(opts) }
+
+ if (typeof opts !== 'object') {
+ throw new TypeError('Busboy expected an options-Object.')
+ }
+ if (typeof opts.headers !== 'object') {
+ throw new TypeError('Busboy expected an options-Object with headers-attribute.')
+ }
+ if (typeof opts.headers['content-type'] !== 'string') {
+ throw new TypeError('Missing Content-Type-header.')
+ }
+
+ const {
+ headers,
+ ...streamOptions
+ } = opts
+
+ this.opts = {
+ autoDestroy: false,
+ ...streamOptions
+ }
+ WritableStream.call(this, this.opts)
+
+ this._done = false
+ this._parser = this.getParserByHeaders(headers)
+ this._finished = false
+}
+inherits(Busboy, WritableStream)
+
+Busboy.prototype.emit = function (ev) {
+ if (ev === 'finish') {
+ if (!this._done) {
+ this._parser?.end()
+ return
+ } else if (this._finished) {
+ return
+ }
+ this._finished = true
+ }
+ WritableStream.prototype.emit.apply(this, arguments)
+}
+
+Busboy.prototype.getParserByHeaders = function (headers) {
+ const parsed = parseParams(headers['content-type'])
+
+ const cfg = {
+ defCharset: this.opts.defCharset,
+ fileHwm: this.opts.fileHwm,
+ headers,
+ highWaterMark: this.opts.highWaterMark,
+ isPartAFile: this.opts.isPartAFile,
+ limits: this.opts.limits,
+ parsedConType: parsed,
+ preservePath: this.opts.preservePath
+ }
+
+ if (MultipartParser.detect.test(parsed[0])) {
+ return new MultipartParser(this, cfg)
+ }
+ if (UrlencodedParser.detect.test(parsed[0])) {
+ return new UrlencodedParser(this, cfg)
+ }
+ throw new Error('Unsupported Content-Type.')
+}
+
+Busboy.prototype._write = function (chunk, encoding, cb) {
+ this._parser.write(chunk, cb)
+}
+
+module.exports = Busboy
+module.exports["default"] = Busboy
+module.exports.Busboy = Busboy
+
+module.exports.Dicer = Dicer
+
+
+/***/ }),
+
+/***/ 2183:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+// TODO:
+// * support 1 nested multipart level
+// (see second multipart example here:
+// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data)
+// * support limits.fieldNameSize
+// -- this will require modifications to utils.parseParams
+
+const { Readable } = __nccwpck_require__(4492)
+const { inherits } = __nccwpck_require__(7261)
+
+const Dicer = __nccwpck_require__(2960)
+
+const parseParams = __nccwpck_require__(1854)
+const decodeText = __nccwpck_require__(4619)
+const basename = __nccwpck_require__(8647)
+const getLimit = __nccwpck_require__(1467)
+
+const RE_BOUNDARY = /^boundary$/i
+const RE_FIELD = /^form-data$/i
+const RE_CHARSET = /^charset$/i
+const RE_FILENAME = /^filename$/i
+const RE_NAME = /^name$/i
+
+Multipart.detect = /^multipart\/form-data/i
+function Multipart (boy, cfg) {
+ let i
+ let len
+ const self = this
+ let boundary
+ const limits = cfg.limits
+ const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined))
+ const parsedConType = cfg.parsedConType || []
+ const defCharset = cfg.defCharset || 'utf8'
+ const preservePath = cfg.preservePath
+ const fileOpts = { highWaterMark: cfg.fileHwm }
+
+ for (i = 0, len = parsedConType.length; i < len; ++i) {
+ if (Array.isArray(parsedConType[i]) &&
+ RE_BOUNDARY.test(parsedConType[i][0])) {
+ boundary = parsedConType[i][1]
+ break
+ }
+ }
+
+ function checkFinished () {
+ if (nends === 0 && finished && !boy._done) {
+ finished = false
+ self.end()
+ }
+ }
+
+ if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') }
+
+ const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
+ const fileSizeLimit = getLimit(limits, 'fileSize', Infinity)
+ const filesLimit = getLimit(limits, 'files', Infinity)
+ const fieldsLimit = getLimit(limits, 'fields', Infinity)
+ const partsLimit = getLimit(limits, 'parts', Infinity)
+ const headerPairsLimit = getLimit(limits, 'headerPairs', 2000)
+ const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024)
+
+ let nfiles = 0
+ let nfields = 0
+ let nends = 0
+ let curFile
+ let curField
+ let finished = false
+
+ this._needDrain = false
+ this._pause = false
+ this._cb = undefined
+ this._nparts = 0
+ this._boy = boy
+
+ const parserCfg = {
+ boundary,
+ maxHeaderPairs: headerPairsLimit,
+ maxHeaderSize: headerSizeLimit,
+ partHwm: fileOpts.highWaterMark,
+ highWaterMark: cfg.highWaterMark
+ }
+
+ this.parser = new Dicer(parserCfg)
+ this.parser.on('drain', function () {
+ self._needDrain = false
+ if (self._cb && !self._pause) {
+ const cb = self._cb
+ self._cb = undefined
+ cb()
+ }
+ }).on('part', function onPart (part) {
+ if (++self._nparts > partsLimit) {
+ self.parser.removeListener('part', onPart)
+ self.parser.on('part', skipPart)
+ boy.hitPartsLimit = true
+ boy.emit('partsLimit')
+ return skipPart(part)
+ }
+
+ // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let
+ // us emit 'end' early since we know the part has ended if we are already
+ // seeing the next part
+ if (curField) {
+ const field = curField
+ field.emit('end')
+ field.removeAllListeners('end')
+ }
+
+ part.on('header', function (header) {
+ let contype
+ let fieldname
+ let parsed
+ let charset
+ let encoding
+ let filename
+ let nsize = 0
+
+ if (header['content-type']) {
+ parsed = parseParams(header['content-type'][0])
+ if (parsed[0]) {
+ contype = parsed[0].toLowerCase()
+ for (i = 0, len = parsed.length; i < len; ++i) {
+ if (RE_CHARSET.test(parsed[i][0])) {
+ charset = parsed[i][1].toLowerCase()
+ break
+ }
+ }
+ }
+ }
+
+ if (contype === undefined) { contype = 'text/plain' }
+ if (charset === undefined) { charset = defCharset }
+
+ if (header['content-disposition']) {
+ parsed = parseParams(header['content-disposition'][0])
+ if (!RE_FIELD.test(parsed[0])) { return skipPart(part) }
+ for (i = 0, len = parsed.length; i < len; ++i) {
+ if (RE_NAME.test(parsed[i][0])) {
+ fieldname = parsed[i][1]
+ } else if (RE_FILENAME.test(parsed[i][0])) {
+ filename = parsed[i][1]
+ if (!preservePath) { filename = basename(filename) }
+ }
+ }
+ } else { return skipPart(part) }
+
+ if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' }
+
+ let onData,
+ onEnd
+
+ if (isPartAFile(fieldname, contype, filename)) {
+ // file/binary field
+ if (nfiles === filesLimit) {
+ if (!boy.hitFilesLimit) {
+ boy.hitFilesLimit = true
+ boy.emit('filesLimit')
+ }
+ return skipPart(part)
+ }
+
+ ++nfiles
+
+ if (!boy._events.file) {
+ self.parser._ignore()
+ return
+ }
+
+ ++nends
+ const file = new FileStream(fileOpts)
+ curFile = file
+ file.on('end', function () {
+ --nends
+ self._pause = false
+ checkFinished()
+ if (self._cb && !self._needDrain) {
+ const cb = self._cb
+ self._cb = undefined
+ cb()
+ }
+ })
+ file._read = function (n) {
+ if (!self._pause) { return }
+ self._pause = false
+ if (self._cb && !self._needDrain) {
+ const cb = self._cb
+ self._cb = undefined
+ cb()
+ }
+ }
+ boy.emit('file', fieldname, file, filename, encoding, contype)
+
+ onData = function (data) {
+ if ((nsize += data.length) > fileSizeLimit) {
+ const extralen = fileSizeLimit - nsize + data.length
+ if (extralen > 0) { file.push(data.slice(0, extralen)) }
+ file.truncated = true
+ file.bytesRead = fileSizeLimit
+ part.removeAllListeners('data')
+ file.emit('limit')
+ return
+ } else if (!file.push(data)) { self._pause = true }
+
+ file.bytesRead = nsize
+ }
+
+ onEnd = function () {
+ curFile = undefined
+ file.push(null)
+ }
+ } else {
+ // non-file field
+ if (nfields === fieldsLimit) {
+ if (!boy.hitFieldsLimit) {
+ boy.hitFieldsLimit = true
+ boy.emit('fieldsLimit')
+ }
+ return skipPart(part)
+ }
+
+ ++nfields
+ ++nends
+ let buffer = ''
+ let truncated = false
+ curField = part
+
+ onData = function (data) {
+ if ((nsize += data.length) > fieldSizeLimit) {
+ const extralen = (fieldSizeLimit - (nsize - data.length))
+ buffer += data.toString('binary', 0, extralen)
+ truncated = true
+ part.removeAllListeners('data')
+ } else { buffer += data.toString('binary') }
+ }
+
+ onEnd = function () {
+ curField = undefined
+ if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) }
+ boy.emit('field', fieldname, buffer, false, truncated, encoding, contype)
+ --nends
+ checkFinished()
+ }
+ }
+
+ /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become
+ broken. Streams2/streams3 is a huge black box of confusion, but
+ somehow overriding the sync state seems to fix things again (and still
+ seems to work for previous node versions).
+ */
+ part._readableState.sync = false
+
+ part.on('data', onData)
+ part.on('end', onEnd)
+ }).on('error', function (err) {
+ if (curFile) { curFile.emit('error', err) }
+ })
+ }).on('error', function (err) {
+ boy.emit('error', err)
+ }).on('finish', function () {
+ finished = true
+ checkFinished()
+ })
+}
+
+Multipart.prototype.write = function (chunk, cb) {
+ const r = this.parser.write(chunk)
+ if (r && !this._pause) {
+ cb()
+ } else {
+ this._needDrain = !r
+ this._cb = cb
+ }
+}
+
+Multipart.prototype.end = function () {
+ const self = this
+
+ if (self.parser.writable) {
+ self.parser.end()
+ } else if (!self._boy._done) {
+ process.nextTick(function () {
+ self._boy._done = true
+ self._boy.emit('finish')
+ })
+ }
+}
+
+function skipPart (part) {
+ part.resume()
+}
+
+function FileStream (opts) {
+ Readable.call(this, opts)
+
+ this.bytesRead = 0
+
+ this.truncated = false
+}
+
+inherits(FileStream, Readable)
+
+FileStream.prototype._read = function (n) {}
+
+module.exports = Multipart
+
+
+/***/ }),
+
+/***/ 8306:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const Decoder = __nccwpck_require__(7100)
+const decodeText = __nccwpck_require__(4619)
+const getLimit = __nccwpck_require__(1467)
+
+const RE_CHARSET = /^charset$/i
+
+UrlEncoded.detect = /^application\/x-www-form-urlencoded/i
+function UrlEncoded (boy, cfg) {
+ const limits = cfg.limits
+ const parsedConType = cfg.parsedConType
+ this.boy = boy
+
+ this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
+ this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100)
+ this.fieldsLimit = getLimit(limits, 'fields', Infinity)
+
+ let charset
+ for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var
+ if (Array.isArray(parsedConType[i]) &&
+ RE_CHARSET.test(parsedConType[i][0])) {
+ charset = parsedConType[i][1].toLowerCase()
+ break
+ }
+ }
+
+ if (charset === undefined) { charset = cfg.defCharset || 'utf8' }
+
+ this.decoder = new Decoder()
+ this.charset = charset
+ this._fields = 0
+ this._state = 'key'
+ this._checkingBytes = true
+ this._bytesKey = 0
+ this._bytesVal = 0
+ this._key = ''
+ this._val = ''
+ this._keyTrunc = false
+ this._valTrunc = false
+ this._hitLimit = false
+}
+
+UrlEncoded.prototype.write = function (data, cb) {
+ if (this._fields === this.fieldsLimit) {
+ if (!this.boy.hitFieldsLimit) {
+ this.boy.hitFieldsLimit = true
+ this.boy.emit('fieldsLimit')
+ }
+ return cb()
+ }
+
+ let idxeq; let idxamp; let i; let p = 0; const len = data.length
+
+ while (p < len) {
+ if (this._state === 'key') {
+ idxeq = idxamp = undefined
+ for (i = p; i < len; ++i) {
+ if (!this._checkingBytes) { ++p }
+ if (data[i] === 0x3D/* = */) {
+ idxeq = i
+ break
+ } else if (data[i] === 0x26/* & */) {
+ idxamp = i
+ break
+ }
+ if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) {
+ this._hitLimit = true
+ break
+ } else if (this._checkingBytes) { ++this._bytesKey }
+ }
+
+ if (idxeq !== undefined) {
+ // key with assignment
+ if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) }
+ this._state = 'val'
+
+ this._hitLimit = false
+ this._checkingBytes = true
+ this._val = ''
+ this._bytesVal = 0
+ this._valTrunc = false
+ this.decoder.reset()
+
+ p = idxeq + 1
+ } else if (idxamp !== undefined) {
+ // key with no assignment
+ ++this._fields
+ let key; const keyTrunc = this._keyTrunc
+ if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key }
+
+ this._hitLimit = false
+ this._checkingBytes = true
+ this._key = ''
+ this._bytesKey = 0
+ this._keyTrunc = false
+ this.decoder.reset()
+
+ if (key.length) {
+ this.boy.emit('field', decodeText(key, 'binary', this.charset),
+ '',
+ keyTrunc,
+ false)
+ }
+
+ p = idxamp + 1
+ if (this._fields === this.fieldsLimit) { return cb() }
+ } else if (this._hitLimit) {
+ // we may not have hit the actual limit if there are encoded bytes...
+ if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) }
+ p = i
+ if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) {
+ // yep, we actually did hit the limit
+ this._checkingBytes = false
+ this._keyTrunc = true
+ }
+ } else {
+ if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) }
+ p = len
+ }
+ } else {
+ idxamp = undefined
+ for (i = p; i < len; ++i) {
+ if (!this._checkingBytes) { ++p }
+ if (data[i] === 0x26/* & */) {
+ idxamp = i
+ break
+ }
+ if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) {
+ this._hitLimit = true
+ break
+ } else if (this._checkingBytes) { ++this._bytesVal }
+ }
+
+ if (idxamp !== undefined) {
+ ++this._fields
+ if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) }
+ this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
+ decodeText(this._val, 'binary', this.charset),
+ this._keyTrunc,
+ this._valTrunc)
+ this._state = 'key'
+
+ this._hitLimit = false
+ this._checkingBytes = true
+ this._key = ''
+ this._bytesKey = 0
+ this._keyTrunc = false
+ this.decoder.reset()
+
+ p = idxamp + 1
+ if (this._fields === this.fieldsLimit) { return cb() }
+ } else if (this._hitLimit) {
+ // we may not have hit the actual limit if there are encoded bytes...
+ if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) }
+ p = i
+ if ((this._val === '' && this.fieldSizeLimit === 0) ||
+ (this._bytesVal = this._val.length) === this.fieldSizeLimit) {
+ // yep, we actually did hit the limit
+ this._checkingBytes = false
+ this._valTrunc = true
+ }
+ } else {
+ if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) }
+ p = len
+ }
+ }
+ }
+ cb()
+}
+
+UrlEncoded.prototype.end = function () {
+ if (this.boy._done) { return }
+
+ if (this._state === 'key' && this._key.length > 0) {
+ this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
+ '',
+ this._keyTrunc,
+ false)
+ } else if (this._state === 'val') {
+ this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
+ decodeText(this._val, 'binary', this.charset),
+ this._keyTrunc,
+ this._valTrunc)
+ }
+ this.boy._done = true
+ this.boy.emit('finish')
+}
+
+module.exports = UrlEncoded
+
+
+/***/ }),
+
+/***/ 7100:
/***/ ((module) => {
"use strict";
-module.exports = JSON.parse('[[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"],[[47,47],"disallowed_STD3_valid"],[[48,57],"valid"],[[58,64],"disallowed_STD3_valid"],[[65,65],"mapped",[97]],[[66,66],"mapped",[98]],[[67,67],"mapped",[99]],[[68,68],"mapped",[100]],[[69,69],"mapped",[101]],[[70,70],"mapped",[102]],[[71,71],"mapped",[103]],[[72,72],"mapped",[104]],[[73,73],"mapped",[105]],[[74,74],"mapped",[106]],[[75,75],"mapped",[107]],[[76,76],"mapped",[108]],[[77,77],"mapped",[109]],[[78,78],"mapped",[110]],[[79,79],"mapped",[111]],[[80,80],"mapped",[112]],[[81,81],"mapped",[113]],[[82,82],"mapped",[114]],[[83,83],"mapped",[115]],[[84,84],"mapped",[116]],[[85,85],"mapped",[117]],[[86,86],"mapped",[118]],[[87,87],"mapped",[119]],[[88,88],"mapped",[120]],[[89,89],"mapped",[121]],[[90,90],"mapped",[122]],[[91,96],"disallowed_STD3_valid"],[[97,122],"valid"],[[123,127],"disallowed_STD3_valid"],[[128,159],"disallowed"],[[160,160],"disallowed_STD3_mapped",[32]],[[161,167],"valid",[],"NV8"],[[168,168],"disallowed_STD3_mapped",[32,776]],[[169,169],"valid",[],"NV8"],[[170,170],"mapped",[97]],[[171,172],"valid",[],"NV8"],[[173,173],"ignored"],[[174,174],"valid",[],"NV8"],[[175,175],"disallowed_STD3_mapped",[32,772]],[[176,177],"valid",[],"NV8"],[[178,178],"mapped",[50]],[[179,179],"mapped",[51]],[[180,180],"disallowed_STD3_mapped",[32,769]],[[181,181],"mapped",[956]],[[182,182],"valid",[],"NV8"],[[183,183],"valid"],[[184,184],"disallowed_STD3_mapped",[32,807]],[[185,185],"mapped",[49]],[[186,186],"mapped",[111]],[[187,187],"valid",[],"NV8"],[[188,188],"mapped",[49,8260,52]],[[189,189],"mapped",[49,8260,50]],[[190,190],"mapped",[51,8260,52]],[[191,191],"valid",[],"NV8"],[[192,192],"mapped",[224]],[[193,193],"mapped",[225]],[[194,194],"mapped",[226]],[[195,195],"mapped",[227]],[[196,196],"mapped",[228]],[[197,197],"mapped",[229]],[[198,198],"mapped",[230]],[[199,199],"mapped",[231]],[[200,200],"mapped",[232]],[[201,201],"mapped",[233]],[[202,202],"mapped",[234]],[[203,203],"mapped",[235]],[[204,204],"mapped",[236]],[[205,205],"mapped",[237]],[[206,206],"mapped",[238]],[[207,207],"mapped",[239]],[[208,208],"mapped",[240]],[[209,209],"mapped",[241]],[[210,210],"mapped",[242]],[[211,211],"mapped",[243]],[[212,212],"mapped",[244]],[[213,213],"mapped",[245]],[[214,214],"mapped",[246]],[[215,215],"valid",[],"NV8"],[[216,216],"mapped",[248]],[[217,217],"mapped",[249]],[[218,218],"mapped",[250]],[[219,219],"mapped",[251]],[[220,220],"mapped",[252]],[[221,221],"mapped",[253]],[[222,222],"mapped",[254]],[[223,223],"deviation",[115,115]],[[224,246],"valid"],[[247,247],"valid",[],"NV8"],[[248,255],"valid"],[[256,256],"mapped",[257]],[[257,257],"valid"],[[258,258],"mapped",[259]],[[259,259],"valid"],[[260,260],"mapped",[261]],[[261,261],"valid"],[[262,262],"mapped",[263]],[[263,263],"valid"],[[264,264],"mapped",[265]],[[265,265],"valid"],[[266,266],"mapped",[267]],[[267,267],"valid"],[[268,268],"mapped",[269]],[[269,269],"valid"],[[270,270],"mapped",[271]],[[271,271],"valid"],[[272,272],"mapped",[273]],[[273,273],"valid"],[[274,274],"mapped",[275]],[[275,275],"valid"],[[276,276],"mapped",[277]],[[277,277],"valid"],[[278,278],"mapped",[279]],[[279,279],"valid"],[[280,280],"mapped",[281]],[[281,281],"valid"],[[282,282],"mapped",[283]],[[283,283],"valid"],[[284,284],"mapped",[285]],[[285,285],"valid"],[[286,286],"mapped",[287]],[[287,287],"valid"],[[288,288],"mapped",[289]],[[289,289],"valid"],[[290,290],"mapped",[291]],[[291,291],"valid"],[[292,292],"mapped",[293]],[[293,293],"valid"],[[294,294],"mapped",[295]],[[295,295],"valid"],[[296,296],"mapped",[297]],[[297,297],"valid"],[[298,298],"mapped",[299]],[[299,299],"valid"],[[300,300],"mapped",[301]],[[301,301],"valid"],[[302,302],"mapped",[303]],[[303,303],"valid"],[[304,304],"mapped",[105,775]],[[305,305],"valid"],[[306,307],"mapped",[105,106]],[[308,308],"mapped",[309]],[[309,309],"valid"],[[310,310],"mapped",[311]],[[311,312],"valid"],[[313,313],"mapped",[314]],[[314,314],"valid"],[[315,315],"mapped",[316]],[[316,316],"valid"],[[317,317],"mapped",[318]],[[318,318],"valid"],[[319,320],"mapped",[108,183]],[[321,321],"mapped",[322]],[[322,322],"valid"],[[323,323],"mapped",[324]],[[324,324],"valid"],[[325,325],"mapped",[326]],[[326,326],"valid"],[[327,327],"mapped",[328]],[[328,328],"valid"],[[329,329],"mapped",[700,110]],[[330,330],"mapped",[331]],[[331,331],"valid"],[[332,332],"mapped",[333]],[[333,333],"valid"],[[334,334],"mapped",[335]],[[335,335],"valid"],[[336,336],"mapped",[337]],[[337,337],"valid"],[[338,338],"mapped",[339]],[[339,339],"valid"],[[340,340],"mapped",[341]],[[341,341],"valid"],[[342,342],"mapped",[343]],[[343,343],"valid"],[[344,344],"mapped",[345]],[[345,345],"valid"],[[346,346],"mapped",[347]],[[347,347],"valid"],[[348,348],"mapped",[349]],[[349,349],"valid"],[[350,350],"mapped",[351]],[[351,351],"valid"],[[352,352],"mapped",[353]],[[353,353],"valid"],[[354,354],"mapped",[355]],[[355,355],"valid"],[[356,356],"mapped",[357]],[[357,357],"valid"],[[358,358],"mapped",[359]],[[359,359],"valid"],[[360,360],"mapped",[361]],[[361,361],"valid"],[[362,362],"mapped",[363]],[[363,363],"valid"],[[364,364],"mapped",[365]],[[365,365],"valid"],[[366,366],"mapped",[367]],[[367,367],"valid"],[[368,368],"mapped",[369]],[[369,369],"valid"],[[370,370],"mapped",[371]],[[371,371],"valid"],[[372,372],"mapped",[373]],[[373,373],"valid"],[[374,374],"mapped",[375]],[[375,375],"valid"],[[376,376],"mapped",[255]],[[377,377],"mapped",[378]],[[378,378],"valid"],[[379,379],"mapped",[380]],[[380,380],"valid"],[[381,381],"mapped",[382]],[[382,382],"valid"],[[383,383],"mapped",[115]],[[384,384],"valid"],[[385,385],"mapped",[595]],[[386,386],"mapped",[387]],[[387,387],"valid"],[[388,388],"mapped",[389]],[[389,389],"valid"],[[390,390],"mapped",[596]],[[391,391],"mapped",[392]],[[392,392],"valid"],[[393,393],"mapped",[598]],[[394,394],"mapped",[599]],[[395,395],"mapped",[396]],[[396,397],"valid"],[[398,398],"mapped",[477]],[[399,399],"mapped",[601]],[[400,400],"mapped",[603]],[[401,401],"mapped",[402]],[[402,402],"valid"],[[403,403],"mapped",[608]],[[404,404],"mapped",[611]],[[405,405],"valid"],[[406,406],"mapped",[617]],[[407,407],"mapped",[616]],[[408,408],"mapped",[409]],[[409,411],"valid"],[[412,412],"mapped",[623]],[[413,413],"mapped",[626]],[[414,414],"valid"],[[415,415],"mapped",[629]],[[416,416],"mapped",[417]],[[417,417],"valid"],[[418,418],"mapped",[419]],[[419,419],"valid"],[[420,420],"mapped",[421]],[[421,421],"valid"],[[422,422],"mapped",[640]],[[423,423],"mapped",[424]],[[424,424],"valid"],[[425,425],"mapped",[643]],[[426,427],"valid"],[[428,428],"mapped",[429]],[[429,429],"valid"],[[430,430],"mapped",[648]],[[431,431],"mapped",[432]],[[432,432],"valid"],[[433,433],"mapped",[650]],[[434,434],"mapped",[651]],[[435,435],"mapped",[436]],[[436,436],"valid"],[[437,437],"mapped",[438]],[[438,438],"valid"],[[439,439],"mapped",[658]],[[440,440],"mapped",[441]],[[441,443],"valid"],[[444,444],"mapped",[445]],[[445,451],"valid"],[[452,454],"mapped",[100,382]],[[455,457],"mapped",[108,106]],[[458,460],"mapped",[110,106]],[[461,461],"mapped",[462]],[[462,462],"valid"],[[463,463],"mapped",[464]],[[464,464],"valid"],[[465,465],"mapped",[466]],[[466,466],"valid"],[[467,467],"mapped",[468]],[[468,468],"valid"],[[469,469],"mapped",[470]],[[470,470],"valid"],[[471,471],"mapped",[472]],[[472,472],"valid"],[[473,473],"mapped",[474]],[[474,474],"valid"],[[475,475],"mapped",[476]],[[476,477],"valid"],[[478,478],"mapped",[479]],[[479,479],"valid"],[[480,480],"mapped",[481]],[[481,481],"valid"],[[482,482],"mapped",[483]],[[483,483],"valid"],[[484,484],"mapped",[485]],[[485,485],"valid"],[[486,486],"mapped",[487]],[[487,487],"valid"],[[488,488],"mapped",[489]],[[489,489],"valid"],[[490,490],"mapped",[491]],[[491,491],"valid"],[[492,492],"mapped",[493]],[[493,493],"valid"],[[494,494],"mapped",[495]],[[495,496],"valid"],[[497,499],"mapped",[100,122]],[[500,500],"mapped",[501]],[[501,501],"valid"],[[502,502],"mapped",[405]],[[503,503],"mapped",[447]],[[504,504],"mapped",[505]],[[505,505],"valid"],[[506,506],"mapped",[507]],[[507,507],"valid"],[[508,508],"mapped",[509]],[[509,509],"valid"],[[510,510],"mapped",[511]],[[511,511],"valid"],[[512,512],"mapped",[513]],[[513,513],"valid"],[[514,514],"mapped",[515]],[[515,515],"valid"],[[516,516],"mapped",[517]],[[517,517],"valid"],[[518,518],"mapped",[519]],[[519,519],"valid"],[[520,520],"mapped",[521]],[[521,521],"valid"],[[522,522],"mapped",[523]],[[523,523],"valid"],[[524,524],"mapped",[525]],[[525,525],"valid"],[[526,526],"mapped",[527]],[[527,527],"valid"],[[528,528],"mapped",[529]],[[529,529],"valid"],[[530,530],"mapped",[531]],[[531,531],"valid"],[[532,532],"mapped",[533]],[[533,533],"valid"],[[534,534],"mapped",[535]],[[535,535],"valid"],[[536,536],"mapped",[537]],[[537,537],"valid"],[[538,538],"mapped",[539]],[[539,539],"valid"],[[540,540],"mapped",[541]],[[541,541],"valid"],[[542,542],"mapped",[543]],[[543,543],"valid"],[[544,544],"mapped",[414]],[[545,545],"valid"],[[546,546],"mapped",[547]],[[547,547],"valid"],[[548,548],"mapped",[549]],[[549,549],"valid"],[[550,550],"mapped",[551]],[[551,551],"valid"],[[552,552],"mapped",[553]],[[553,553],"valid"],[[554,554],"mapped",[555]],[[555,555],"valid"],[[556,556],"mapped",[557]],[[557,557],"valid"],[[558,558],"mapped",[559]],[[559,559],"valid"],[[560,560],"mapped",[561]],[[561,561],"valid"],[[562,562],"mapped",[563]],[[563,563],"valid"],[[564,566],"valid"],[[567,569],"valid"],[[570,570],"mapped",[11365]],[[571,571],"mapped",[572]],[[572,572],"valid"],[[573,573],"mapped",[410]],[[574,574],"mapped",[11366]],[[575,576],"valid"],[[577,577],"mapped",[578]],[[578,578],"valid"],[[579,579],"mapped",[384]],[[580,580],"mapped",[649]],[[581,581],"mapped",[652]],[[582,582],"mapped",[583]],[[583,583],"valid"],[[584,584],"mapped",[585]],[[585,585],"valid"],[[586,586],"mapped",[587]],[[587,587],"valid"],[[588,588],"mapped",[589]],[[589,589],"valid"],[[590,590],"mapped",[591]],[[591,591],"valid"],[[592,680],"valid"],[[681,685],"valid"],[[686,687],"valid"],[[688,688],"mapped",[104]],[[689,689],"mapped",[614]],[[690,690],"mapped",[106]],[[691,691],"mapped",[114]],[[692,692],"mapped",[633]],[[693,693],"mapped",[635]],[[694,694],"mapped",[641]],[[695,695],"mapped",[119]],[[696,696],"mapped",[121]],[[697,705],"valid"],[[706,709],"valid",[],"NV8"],[[710,721],"valid"],[[722,727],"valid",[],"NV8"],[[728,728],"disallowed_STD3_mapped",[32,774]],[[729,729],"disallowed_STD3_mapped",[32,775]],[[730,730],"disallowed_STD3_mapped",[32,778]],[[731,731],"disallowed_STD3_mapped",[32,808]],[[732,732],"disallowed_STD3_mapped",[32,771]],[[733,733],"disallowed_STD3_mapped",[32,779]],[[734,734],"valid",[],"NV8"],[[735,735],"valid",[],"NV8"],[[736,736],"mapped",[611]],[[737,737],"mapped",[108]],[[738,738],"mapped",[115]],[[739,739],"mapped",[120]],[[740,740],"mapped",[661]],[[741,745],"valid",[],"NV8"],[[746,747],"valid",[],"NV8"],[[748,748],"valid"],[[749,749],"valid",[],"NV8"],[[750,750],"valid"],[[751,767],"valid",[],"NV8"],[[768,831],"valid"],[[832,832],"mapped",[768]],[[833,833],"mapped",[769]],[[834,834],"valid"],[[835,835],"mapped",[787]],[[836,836],"mapped",[776,769]],[[837,837],"mapped",[953]],[[838,846],"valid"],[[847,847],"ignored"],[[848,855],"valid"],[[856,860],"valid"],[[861,863],"valid"],[[864,865],"valid"],[[866,866],"valid"],[[867,879],"valid"],[[880,880],"mapped",[881]],[[881,881],"valid"],[[882,882],"mapped",[883]],[[883,883],"valid"],[[884,884],"mapped",[697]],[[885,885],"valid"],[[886,886],"mapped",[887]],[[887,887],"valid"],[[888,889],"disallowed"],[[890,890],"disallowed_STD3_mapped",[32,953]],[[891,893],"valid"],[[894,894],"disallowed_STD3_mapped",[59]],[[895,895],"mapped",[1011]],[[896,899],"disallowed"],[[900,900],"disallowed_STD3_mapped",[32,769]],[[901,901],"disallowed_STD3_mapped",[32,776,769]],[[902,902],"mapped",[940]],[[903,903],"mapped",[183]],[[904,904],"mapped",[941]],[[905,905],"mapped",[942]],[[906,906],"mapped",[943]],[[907,907],"disallowed"],[[908,908],"mapped",[972]],[[909,909],"disallowed"],[[910,910],"mapped",[973]],[[911,911],"mapped",[974]],[[912,912],"valid"],[[913,913],"mapped",[945]],[[914,914],"mapped",[946]],[[915,915],"mapped",[947]],[[916,916],"mapped",[948]],[[917,917],"mapped",[949]],[[918,918],"mapped",[950]],[[919,919],"mapped",[951]],[[920,920],"mapped",[952]],[[921,921],"mapped",[953]],[[922,922],"mapped",[954]],[[923,923],"mapped",[955]],[[924,924],"mapped",[956]],[[925,925],"mapped",[957]],[[926,926],"mapped",[958]],[[927,927],"mapped",[959]],[[928,928],"mapped",[960]],[[929,929],"mapped",[961]],[[930,930],"disallowed"],[[931,931],"mapped",[963]],[[932,932],"mapped",[964]],[[933,933],"mapped",[965]],[[934,934],"mapped",[966]],[[935,935],"mapped",[967]],[[936,936],"mapped",[968]],[[937,937],"mapped",[969]],[[938,938],"mapped",[970]],[[939,939],"mapped",[971]],[[940,961],"valid"],[[962,962],"deviation",[963]],[[963,974],"valid"],[[975,975],"mapped",[983]],[[976,976],"mapped",[946]],[[977,977],"mapped",[952]],[[978,978],"mapped",[965]],[[979,979],"mapped",[973]],[[980,980],"mapped",[971]],[[981,981],"mapped",[966]],[[982,982],"mapped",[960]],[[983,983],"valid"],[[984,984],"mapped",[985]],[[985,985],"valid"],[[986,986],"mapped",[987]],[[987,987],"valid"],[[988,988],"mapped",[989]],[[989,989],"valid"],[[990,990],"mapped",[991]],[[991,991],"valid"],[[992,992],"mapped",[993]],[[993,993],"valid"],[[994,994],"mapped",[995]],[[995,995],"valid"],[[996,996],"mapped",[997]],[[997,997],"valid"],[[998,998],"mapped",[999]],[[999,999],"valid"],[[1000,1000],"mapped",[1001]],[[1001,1001],"valid"],[[1002,1002],"mapped",[1003]],[[1003,1003],"valid"],[[1004,1004],"mapped",[1005]],[[1005,1005],"valid"],[[1006,1006],"mapped",[1007]],[[1007,1007],"valid"],[[1008,1008],"mapped",[954]],[[1009,1009],"mapped",[961]],[[1010,1010],"mapped",[963]],[[1011,1011],"valid"],[[1012,1012],"mapped",[952]],[[1013,1013],"mapped",[949]],[[1014,1014],"valid",[],"NV8"],[[1015,1015],"mapped",[1016]],[[1016,1016],"valid"],[[1017,1017],"mapped",[963]],[[1018,1018],"mapped",[1019]],[[1019,1019],"valid"],[[1020,1020],"valid"],[[1021,1021],"mapped",[891]],[[1022,1022],"mapped",[892]],[[1023,1023],"mapped",[893]],[[1024,1024],"mapped",[1104]],[[1025,1025],"mapped",[1105]],[[1026,1026],"mapped",[1106]],[[1027,1027],"mapped",[1107]],[[1028,1028],"mapped",[1108]],[[1029,1029],"mapped",[1109]],[[1030,1030],"mapped",[1110]],[[1031,1031],"mapped",[1111]],[[1032,1032],"mapped",[1112]],[[1033,1033],"mapped",[1113]],[[1034,1034],"mapped",[1114]],[[1035,1035],"mapped",[1115]],[[1036,1036],"mapped",[1116]],[[1037,1037],"mapped",[1117]],[[1038,1038],"mapped",[1118]],[[1039,1039],"mapped",[1119]],[[1040,1040],"mapped",[1072]],[[1041,1041],"mapped",[1073]],[[1042,1042],"mapped",[1074]],[[1043,1043],"mapped",[1075]],[[1044,1044],"mapped",[1076]],[[1045,1045],"mapped",[1077]],[[1046,1046],"mapped",[1078]],[[1047,1047],"mapped",[1079]],[[1048,1048],"mapped",[1080]],[[1049,1049],"mapped",[1081]],[[1050,1050],"mapped",[1082]],[[1051,1051],"mapped",[1083]],[[1052,1052],"mapped",[1084]],[[1053,1053],"mapped",[1085]],[[1054,1054],"mapped",[1086]],[[1055,1055],"mapped",[1087]],[[1056,1056],"mapped",[1088]],[[1057,1057],"mapped",[1089]],[[1058,1058],"mapped",[1090]],[[1059,1059],"mapped",[1091]],[[1060,1060],"mapped",[1092]],[[1061,1061],"mapped",[1093]],[[1062,1062],"mapped",[1094]],[[1063,1063],"mapped",[1095]],[[1064,1064],"mapped",[1096]],[[1065,1065],"mapped",[1097]],[[1066,1066],"mapped",[1098]],[[1067,1067],"mapped",[1099]],[[1068,1068],"mapped",[1100]],[[1069,1069],"mapped",[1101]],[[1070,1070],"mapped",[1102]],[[1071,1071],"mapped",[1103]],[[1072,1103],"valid"],[[1104,1104],"valid"],[[1105,1116],"valid"],[[1117,1117],"valid"],[[1118,1119],"valid"],[[1120,1120],"mapped",[1121]],[[1121,1121],"valid"],[[1122,1122],"mapped",[1123]],[[1123,1123],"valid"],[[1124,1124],"mapped",[1125]],[[1125,1125],"valid"],[[1126,1126],"mapped",[1127]],[[1127,1127],"valid"],[[1128,1128],"mapped",[1129]],[[1129,1129],"valid"],[[1130,1130],"mapped",[1131]],[[1131,1131],"valid"],[[1132,1132],"mapped",[1133]],[[1133,1133],"valid"],[[1134,1134],"mapped",[1135]],[[1135,1135],"valid"],[[1136,1136],"mapped",[1137]],[[1137,1137],"valid"],[[1138,1138],"mapped",[1139]],[[1139,1139],"valid"],[[1140,1140],"mapped",[1141]],[[1141,1141],"valid"],[[1142,1142],"mapped",[1143]],[[1143,1143],"valid"],[[1144,1144],"mapped",[1145]],[[1145,1145],"valid"],[[1146,1146],"mapped",[1147]],[[1147,1147],"valid"],[[1148,1148],"mapped",[1149]],[[1149,1149],"valid"],[[1150,1150],"mapped",[1151]],[[1151,1151],"valid"],[[1152,1152],"mapped",[1153]],[[1153,1153],"valid"],[[1154,1154],"valid",[],"NV8"],[[1155,1158],"valid"],[[1159,1159],"valid"],[[1160,1161],"valid",[],"NV8"],[[1162,1162],"mapped",[1163]],[[1163,1163],"valid"],[[1164,1164],"mapped",[1165]],[[1165,1165],"valid"],[[1166,1166],"mapped",[1167]],[[1167,1167],"valid"],[[1168,1168],"mapped",[1169]],[[1169,1169],"valid"],[[1170,1170],"mapped",[1171]],[[1171,1171],"valid"],[[1172,1172],"mapped",[1173]],[[1173,1173],"valid"],[[1174,1174],"mapped",[1175]],[[1175,1175],"valid"],[[1176,1176],"mapped",[1177]],[[1177,1177],"valid"],[[1178,1178],"mapped",[1179]],[[1179,1179],"valid"],[[1180,1180],"mapped",[1181]],[[1181,1181],"valid"],[[1182,1182],"mapped",[1183]],[[1183,1183],"valid"],[[1184,1184],"mapped",[1185]],[[1185,1185],"valid"],[[1186,1186],"mapped",[1187]],[[1187,1187],"valid"],[[1188,1188],"mapped",[1189]],[[1189,1189],"valid"],[[1190,1190],"mapped",[1191]],[[1191,1191],"valid"],[[1192,1192],"mapped",[1193]],[[1193,1193],"valid"],[[1194,1194],"mapped",[1195]],[[1195,1195],"valid"],[[1196,1196],"mapped",[1197]],[[1197,1197],"valid"],[[1198,1198],"mapped",[1199]],[[1199,1199],"valid"],[[1200,1200],"mapped",[1201]],[[1201,1201],"valid"],[[1202,1202],"mapped",[1203]],[[1203,1203],"valid"],[[1204,1204],"mapped",[1205]],[[1205,1205],"valid"],[[1206,1206],"mapped",[1207]],[[1207,1207],"valid"],[[1208,1208],"mapped",[1209]],[[1209,1209],"valid"],[[1210,1210],"mapped",[1211]],[[1211,1211],"valid"],[[1212,1212],"mapped",[1213]],[[1213,1213],"valid"],[[1214,1214],"mapped",[1215]],[[1215,1215],"valid"],[[1216,1216],"disallowed"],[[1217,1217],"mapped",[1218]],[[1218,1218],"valid"],[[1219,1219],"mapped",[1220]],[[1220,1220],"valid"],[[1221,1221],"mapped",[1222]],[[1222,1222],"valid"],[[1223,1223],"mapped",[1224]],[[1224,1224],"valid"],[[1225,1225],"mapped",[1226]],[[1226,1226],"valid"],[[1227,1227],"mapped",[1228]],[[1228,1228],"valid"],[[1229,1229],"mapped",[1230]],[[1230,1230],"valid"],[[1231,1231],"valid"],[[1232,1232],"mapped",[1233]],[[1233,1233],"valid"],[[1234,1234],"mapped",[1235]],[[1235,1235],"valid"],[[1236,1236],"mapped",[1237]],[[1237,1237],"valid"],[[1238,1238],"mapped",[1239]],[[1239,1239],"valid"],[[1240,1240],"mapped",[1241]],[[1241,1241],"valid"],[[1242,1242],"mapped",[1243]],[[1243,1243],"valid"],[[1244,1244],"mapped",[1245]],[[1245,1245],"valid"],[[1246,1246],"mapped",[1247]],[[1247,1247],"valid"],[[1248,1248],"mapped",[1249]],[[1249,1249],"valid"],[[1250,1250],"mapped",[1251]],[[1251,1251],"valid"],[[1252,1252],"mapped",[1253]],[[1253,1253],"valid"],[[1254,1254],"mapped",[1255]],[[1255,1255],"valid"],[[1256,1256],"mapped",[1257]],[[1257,1257],"valid"],[[1258,1258],"mapped",[1259]],[[1259,1259],"valid"],[[1260,1260],"mapped",[1261]],[[1261,1261],"valid"],[[1262,1262],"mapped",[1263]],[[1263,1263],"valid"],[[1264,1264],"mapped",[1265]],[[1265,1265],"valid"],[[1266,1266],"mapped",[1267]],[[1267,1267],"valid"],[[1268,1268],"mapped",[1269]],[[1269,1269],"valid"],[[1270,1270],"mapped",[1271]],[[1271,1271],"valid"],[[1272,1272],"mapped",[1273]],[[1273,1273],"valid"],[[1274,1274],"mapped",[1275]],[[1275,1275],"valid"],[[1276,1276],"mapped",[1277]],[[1277,1277],"valid"],[[1278,1278],"mapped",[1279]],[[1279,1279],"valid"],[[1280,1280],"mapped",[1281]],[[1281,1281],"valid"],[[1282,1282],"mapped",[1283]],[[1283,1283],"valid"],[[1284,1284],"mapped",[1285]],[[1285,1285],"valid"],[[1286,1286],"mapped",[1287]],[[1287,1287],"valid"],[[1288,1288],"mapped",[1289]],[[1289,1289],"valid"],[[1290,1290],"mapped",[1291]],[[1291,1291],"valid"],[[1292,1292],"mapped",[1293]],[[1293,1293],"valid"],[[1294,1294],"mapped",[1295]],[[1295,1295],"valid"],[[1296,1296],"mapped",[1297]],[[1297,1297],"valid"],[[1298,1298],"mapped",[1299]],[[1299,1299],"valid"],[[1300,1300],"mapped",[1301]],[[1301,1301],"valid"],[[1302,1302],"mapped",[1303]],[[1303,1303],"valid"],[[1304,1304],"mapped",[1305]],[[1305,1305],"valid"],[[1306,1306],"mapped",[1307]],[[1307,1307],"valid"],[[1308,1308],"mapped",[1309]],[[1309,1309],"valid"],[[1310,1310],"mapped",[1311]],[[1311,1311],"valid"],[[1312,1312],"mapped",[1313]],[[1313,1313],"valid"],[[1314,1314],"mapped",[1315]],[[1315,1315],"valid"],[[1316,1316],"mapped",[1317]],[[1317,1317],"valid"],[[1318,1318],"mapped",[1319]],[[1319,1319],"valid"],[[1320,1320],"mapped",[1321]],[[1321,1321],"valid"],[[1322,1322],"mapped",[1323]],[[1323,1323],"valid"],[[1324,1324],"mapped",[1325]],[[1325,1325],"valid"],[[1326,1326],"mapped",[1327]],[[1327,1327],"valid"],[[1328,1328],"disallowed"],[[1329,1329],"mapped",[1377]],[[1330,1330],"mapped",[1378]],[[1331,1331],"mapped",[1379]],[[1332,1332],"mapped",[1380]],[[1333,1333],"mapped",[1381]],[[1334,1334],"mapped",[1382]],[[1335,1335],"mapped",[1383]],[[1336,1336],"mapped",[1384]],[[1337,1337],"mapped",[1385]],[[1338,1338],"mapped",[1386]],[[1339,1339],"mapped",[1387]],[[1340,1340],"mapped",[1388]],[[1341,1341],"mapped",[1389]],[[1342,1342],"mapped",[1390]],[[1343,1343],"mapped",[1391]],[[1344,1344],"mapped",[1392]],[[1345,1345],"mapped",[1393]],[[1346,1346],"mapped",[1394]],[[1347,1347],"mapped",[1395]],[[1348,1348],"mapped",[1396]],[[1349,1349],"mapped",[1397]],[[1350,1350],"mapped",[1398]],[[1351,1351],"mapped",[1399]],[[1352,1352],"mapped",[1400]],[[1353,1353],"mapped",[1401]],[[1354,1354],"mapped",[1402]],[[1355,1355],"mapped",[1403]],[[1356,1356],"mapped",[1404]],[[1357,1357],"mapped",[1405]],[[1358,1358],"mapped",[1406]],[[1359,1359],"mapped",[1407]],[[1360,1360],"mapped",[1408]],[[1361,1361],"mapped",[1409]],[[1362,1362],"mapped",[1410]],[[1363,1363],"mapped",[1411]],[[1364,1364],"mapped",[1412]],[[1365,1365],"mapped",[1413]],[[1366,1366],"mapped",[1414]],[[1367,1368],"disallowed"],[[1369,1369],"valid"],[[1370,1375],"valid",[],"NV8"],[[1376,1376],"disallowed"],[[1377,1414],"valid"],[[1415,1415],"mapped",[1381,1410]],[[1416,1416],"disallowed"],[[1417,1417],"valid",[],"NV8"],[[1418,1418],"valid",[],"NV8"],[[1419,1420],"disallowed"],[[1421,1422],"valid",[],"NV8"],[[1423,1423],"valid",[],"NV8"],[[1424,1424],"disallowed"],[[1425,1441],"valid"],[[1442,1442],"valid"],[[1443,1455],"valid"],[[1456,1465],"valid"],[[1466,1466],"valid"],[[1467,1469],"valid"],[[1470,1470],"valid",[],"NV8"],[[1471,1471],"valid"],[[1472,1472],"valid",[],"NV8"],[[1473,1474],"valid"],[[1475,1475],"valid",[],"NV8"],[[1476,1476],"valid"],[[1477,1477],"valid"],[[1478,1478],"valid",[],"NV8"],[[1479,1479],"valid"],[[1480,1487],"disallowed"],[[1488,1514],"valid"],[[1515,1519],"disallowed"],[[1520,1524],"valid"],[[1525,1535],"disallowed"],[[1536,1539],"disallowed"],[[1540,1540],"disallowed"],[[1541,1541],"disallowed"],[[1542,1546],"valid",[],"NV8"],[[1547,1547],"valid",[],"NV8"],[[1548,1548],"valid",[],"NV8"],[[1549,1551],"valid",[],"NV8"],[[1552,1557],"valid"],[[1558,1562],"valid"],[[1563,1563],"valid",[],"NV8"],[[1564,1564],"disallowed"],[[1565,1565],"disallowed"],[[1566,1566],"valid",[],"NV8"],[[1567,1567],"valid",[],"NV8"],[[1568,1568],"valid"],[[1569,1594],"valid"],[[1595,1599],"valid"],[[1600,1600],"valid",[],"NV8"],[[1601,1618],"valid"],[[1619,1621],"valid"],[[1622,1624],"valid"],[[1625,1630],"valid"],[[1631,1631],"valid"],[[1632,1641],"valid"],[[1642,1645],"valid",[],"NV8"],[[1646,1647],"valid"],[[1648,1652],"valid"],[[1653,1653],"mapped",[1575,1652]],[[1654,1654],"mapped",[1608,1652]],[[1655,1655],"mapped",[1735,1652]],[[1656,1656],"mapped",[1610,1652]],[[1657,1719],"valid"],[[1720,1721],"valid"],[[1722,1726],"valid"],[[1727,1727],"valid"],[[1728,1742],"valid"],[[1743,1743],"valid"],[[1744,1747],"valid"],[[1748,1748],"valid",[],"NV8"],[[1749,1756],"valid"],[[1757,1757],"disallowed"],[[1758,1758],"valid",[],"NV8"],[[1759,1768],"valid"],[[1769,1769],"valid",[],"NV8"],[[1770,1773],"valid"],[[1774,1775],"valid"],[[1776,1785],"valid"],[[1786,1790],"valid"],[[1791,1791],"valid"],[[1792,1805],"valid",[],"NV8"],[[1806,1806],"disallowed"],[[1807,1807],"disallowed"],[[1808,1836],"valid"],[[1837,1839],"valid"],[[1840,1866],"valid"],[[1867,1868],"disallowed"],[[1869,1871],"valid"],[[1872,1901],"valid"],[[1902,1919],"valid"],[[1920,1968],"valid"],[[1969,1969],"valid"],[[1970,1983],"disallowed"],[[1984,2037],"valid"],[[2038,2042],"valid",[],"NV8"],[[2043,2047],"disallowed"],[[2048,2093],"valid"],[[2094,2095],"disallowed"],[[2096,2110],"valid",[],"NV8"],[[2111,2111],"disallowed"],[[2112,2139],"valid"],[[2140,2141],"disallowed"],[[2142,2142],"valid",[],"NV8"],[[2143,2207],"disallowed"],[[2208,2208],"valid"],[[2209,2209],"valid"],[[2210,2220],"valid"],[[2221,2226],"valid"],[[2227,2228],"valid"],[[2229,2274],"disallowed"],[[2275,2275],"valid"],[[2276,2302],"valid"],[[2303,2303],"valid"],[[2304,2304],"valid"],[[2305,2307],"valid"],[[2308,2308],"valid"],[[2309,2361],"valid"],[[2362,2363],"valid"],[[2364,2381],"valid"],[[2382,2382],"valid"],[[2383,2383],"valid"],[[2384,2388],"valid"],[[2389,2389],"valid"],[[2390,2391],"valid"],[[2392,2392],"mapped",[2325,2364]],[[2393,2393],"mapped",[2326,2364]],[[2394,2394],"mapped",[2327,2364]],[[2395,2395],"mapped",[2332,2364]],[[2396,2396],"mapped",[2337,2364]],[[2397,2397],"mapped",[2338,2364]],[[2398,2398],"mapped",[2347,2364]],[[2399,2399],"mapped",[2351,2364]],[[2400,2403],"valid"],[[2404,2405],"valid",[],"NV8"],[[2406,2415],"valid"],[[2416,2416],"valid",[],"NV8"],[[2417,2418],"valid"],[[2419,2423],"valid"],[[2424,2424],"valid"],[[2425,2426],"valid"],[[2427,2428],"valid"],[[2429,2429],"valid"],[[2430,2431],"valid"],[[2432,2432],"valid"],[[2433,2435],"valid"],[[2436,2436],"disallowed"],[[2437,2444],"valid"],[[2445,2446],"disallowed"],[[2447,2448],"valid"],[[2449,2450],"disallowed"],[[2451,2472],"valid"],[[2473,2473],"disallowed"],[[2474,2480],"valid"],[[2481,2481],"disallowed"],[[2482,2482],"valid"],[[2483,2485],"disallowed"],[[2486,2489],"valid"],[[2490,2491],"disallowed"],[[2492,2492],"valid"],[[2493,2493],"valid"],[[2494,2500],"valid"],[[2501,2502],"disallowed"],[[2503,2504],"valid"],[[2505,2506],"disallowed"],[[2507,2509],"valid"],[[2510,2510],"valid"],[[2511,2518],"disallowed"],[[2519,2519],"valid"],[[2520,2523],"disallowed"],[[2524,2524],"mapped",[2465,2492]],[[2525,2525],"mapped",[2466,2492]],[[2526,2526],"disallowed"],[[2527,2527],"mapped",[2479,2492]],[[2528,2531],"valid"],[[2532,2533],"disallowed"],[[2534,2545],"valid"],[[2546,2554],"valid",[],"NV8"],[[2555,2555],"valid",[],"NV8"],[[2556,2560],"disallowed"],[[2561,2561],"valid"],[[2562,2562],"valid"],[[2563,2563],"valid"],[[2564,2564],"disallowed"],[[2565,2570],"valid"],[[2571,2574],"disallowed"],[[2575,2576],"valid"],[[2577,2578],"disallowed"],[[2579,2600],"valid"],[[2601,2601],"disallowed"],[[2602,2608],"valid"],[[2609,2609],"disallowed"],[[2610,2610],"valid"],[[2611,2611],"mapped",[2610,2620]],[[2612,2612],"disallowed"],[[2613,2613],"valid"],[[2614,2614],"mapped",[2616,2620]],[[2615,2615],"disallowed"],[[2616,2617],"valid"],[[2618,2619],"disallowed"],[[2620,2620],"valid"],[[2621,2621],"disallowed"],[[2622,2626],"valid"],[[2627,2630],"disallowed"],[[2631,2632],"valid"],[[2633,2634],"disallowed"],[[2635,2637],"valid"],[[2638,2640],"disallowed"],[[2641,2641],"valid"],[[2642,2648],"disallowed"],[[2649,2649],"mapped",[2582,2620]],[[2650,2650],"mapped",[2583,2620]],[[2651,2651],"mapped",[2588,2620]],[[2652,2652],"valid"],[[2653,2653],"disallowed"],[[2654,2654],"mapped",[2603,2620]],[[2655,2661],"disallowed"],[[2662,2676],"valid"],[[2677,2677],"valid"],[[2678,2688],"disallowed"],[[2689,2691],"valid"],[[2692,2692],"disallowed"],[[2693,2699],"valid"],[[2700,2700],"valid"],[[2701,2701],"valid"],[[2702,2702],"disallowed"],[[2703,2705],"valid"],[[2706,2706],"disallowed"],[[2707,2728],"valid"],[[2729,2729],"disallowed"],[[2730,2736],"valid"],[[2737,2737],"disallowed"],[[2738,2739],"valid"],[[2740,2740],"disallowed"],[[2741,2745],"valid"],[[2746,2747],"disallowed"],[[2748,2757],"valid"],[[2758,2758],"disallowed"],[[2759,2761],"valid"],[[2762,2762],"disallowed"],[[2763,2765],"valid"],[[2766,2767],"disallowed"],[[2768,2768],"valid"],[[2769,2783],"disallowed"],[[2784,2784],"valid"],[[2785,2787],"valid"],[[2788,2789],"disallowed"],[[2790,2799],"valid"],[[2800,2800],"valid",[],"NV8"],[[2801,2801],"valid",[],"NV8"],[[2802,2808],"disallowed"],[[2809,2809],"valid"],[[2810,2816],"disallowed"],[[2817,2819],"valid"],[[2820,2820],"disallowed"],[[2821,2828],"valid"],[[2829,2830],"disallowed"],[[2831,2832],"valid"],[[2833,2834],"disallowed"],[[2835,2856],"valid"],[[2857,2857],"disallowed"],[[2858,2864],"valid"],[[2865,2865],"disallowed"],[[2866,2867],"valid"],[[2868,2868],"disallowed"],[[2869,2869],"valid"],[[2870,2873],"valid"],[[2874,2875],"disallowed"],[[2876,2883],"valid"],[[2884,2884],"valid"],[[2885,2886],"disallowed"],[[2887,2888],"valid"],[[2889,2890],"disallowed"],[[2891,2893],"valid"],[[2894,2901],"disallowed"],[[2902,2903],"valid"],[[2904,2907],"disallowed"],[[2908,2908],"mapped",[2849,2876]],[[2909,2909],"mapped",[2850,2876]],[[2910,2910],"disallowed"],[[2911,2913],"valid"],[[2914,2915],"valid"],[[2916,2917],"disallowed"],[[2918,2927],"valid"],[[2928,2928],"valid",[],"NV8"],[[2929,2929],"valid"],[[2930,2935],"valid",[],"NV8"],[[2936,2945],"disallowed"],[[2946,2947],"valid"],[[2948,2948],"disallowed"],[[2949,2954],"valid"],[[2955,2957],"disallowed"],[[2958,2960],"valid"],[[2961,2961],"disallowed"],[[2962,2965],"valid"],[[2966,2968],"disallowed"],[[2969,2970],"valid"],[[2971,2971],"disallowed"],[[2972,2972],"valid"],[[2973,2973],"disallowed"],[[2974,2975],"valid"],[[2976,2978],"disallowed"],[[2979,2980],"valid"],[[2981,2983],"disallowed"],[[2984,2986],"valid"],[[2987,2989],"disallowed"],[[2990,2997],"valid"],[[2998,2998],"valid"],[[2999,3001],"valid"],[[3002,3005],"disallowed"],[[3006,3010],"valid"],[[3011,3013],"disallowed"],[[3014,3016],"valid"],[[3017,3017],"disallowed"],[[3018,3021],"valid"],[[3022,3023],"disallowed"],[[3024,3024],"valid"],[[3025,3030],"disallowed"],[[3031,3031],"valid"],[[3032,3045],"disallowed"],[[3046,3046],"valid"],[[3047,3055],"valid"],[[3056,3058],"valid",[],"NV8"],[[3059,3066],"valid",[],"NV8"],[[3067,3071],"disallowed"],[[3072,3072],"valid"],[[3073,3075],"valid"],[[3076,3076],"disallowed"],[[3077,3084],"valid"],[[3085,3085],"disallowed"],[[3086,3088],"valid"],[[3089,3089],"disallowed"],[[3090,3112],"valid"],[[3113,3113],"disallowed"],[[3114,3123],"valid"],[[3124,3124],"valid"],[[3125,3129],"valid"],[[3130,3132],"disallowed"],[[3133,3133],"valid"],[[3134,3140],"valid"],[[3141,3141],"disallowed"],[[3142,3144],"valid"],[[3145,3145],"disallowed"],[[3146,3149],"valid"],[[3150,3156],"disallowed"],[[3157,3158],"valid"],[[3159,3159],"disallowed"],[[3160,3161],"valid"],[[3162,3162],"valid"],[[3163,3167],"disallowed"],[[3168,3169],"valid"],[[3170,3171],"valid"],[[3172,3173],"disallowed"],[[3174,3183],"valid"],[[3184,3191],"disallowed"],[[3192,3199],"valid",[],"NV8"],[[3200,3200],"disallowed"],[[3201,3201],"valid"],[[3202,3203],"valid"],[[3204,3204],"disallowed"],[[3205,3212],"valid"],[[3213,3213],"disallowed"],[[3214,3216],"valid"],[[3217,3217],"disallowed"],[[3218,3240],"valid"],[[3241,3241],"disallowed"],[[3242,3251],"valid"],[[3252,3252],"disallowed"],[[3253,3257],"valid"],[[3258,3259],"disallowed"],[[3260,3261],"valid"],[[3262,3268],"valid"],[[3269,3269],"disallowed"],[[3270,3272],"valid"],[[3273,3273],"disallowed"],[[3274,3277],"valid"],[[3278,3284],"disallowed"],[[3285,3286],"valid"],[[3287,3293],"disallowed"],[[3294,3294],"valid"],[[3295,3295],"disallowed"],[[3296,3297],"valid"],[[3298,3299],"valid"],[[3300,3301],"disallowed"],[[3302,3311],"valid"],[[3312,3312],"disallowed"],[[3313,3314],"valid"],[[3315,3328],"disallowed"],[[3329,3329],"valid"],[[3330,3331],"valid"],[[3332,3332],"disallowed"],[[3333,3340],"valid"],[[3341,3341],"disallowed"],[[3342,3344],"valid"],[[3345,3345],"disallowed"],[[3346,3368],"valid"],[[3369,3369],"valid"],[[3370,3385],"valid"],[[3386,3386],"valid"],[[3387,3388],"disallowed"],[[3389,3389],"valid"],[[3390,3395],"valid"],[[3396,3396],"valid"],[[3397,3397],"disallowed"],[[3398,3400],"valid"],[[3401,3401],"disallowed"],[[3402,3405],"valid"],[[3406,3406],"valid"],[[3407,3414],"disallowed"],[[3415,3415],"valid"],[[3416,3422],"disallowed"],[[3423,3423],"valid"],[[3424,3425],"valid"],[[3426,3427],"valid"],[[3428,3429],"disallowed"],[[3430,3439],"valid"],[[3440,3445],"valid",[],"NV8"],[[3446,3448],"disallowed"],[[3449,3449],"valid",[],"NV8"],[[3450,3455],"valid"],[[3456,3457],"disallowed"],[[3458,3459],"valid"],[[3460,3460],"disallowed"],[[3461,3478],"valid"],[[3479,3481],"disallowed"],[[3482,3505],"valid"],[[3506,3506],"disallowed"],[[3507,3515],"valid"],[[3516,3516],"disallowed"],[[3517,3517],"valid"],[[3518,3519],"disallowed"],[[3520,3526],"valid"],[[3527,3529],"disallowed"],[[3530,3530],"valid"],[[3531,3534],"disallowed"],[[3535,3540],"valid"],[[3541,3541],"disallowed"],[[3542,3542],"valid"],[[3543,3543],"disallowed"],[[3544,3551],"valid"],[[3552,3557],"disallowed"],[[3558,3567],"valid"],[[3568,3569],"disallowed"],[[3570,3571],"valid"],[[3572,3572],"valid",[],"NV8"],[[3573,3584],"disallowed"],[[3585,3634],"valid"],[[3635,3635],"mapped",[3661,3634]],[[3636,3642],"valid"],[[3643,3646],"disallowed"],[[3647,3647],"valid",[],"NV8"],[[3648,3662],"valid"],[[3663,3663],"valid",[],"NV8"],[[3664,3673],"valid"],[[3674,3675],"valid",[],"NV8"],[[3676,3712],"disallowed"],[[3713,3714],"valid"],[[3715,3715],"disallowed"],[[3716,3716],"valid"],[[3717,3718],"disallowed"],[[3719,3720],"valid"],[[3721,3721],"disallowed"],[[3722,3722],"valid"],[[3723,3724],"disallowed"],[[3725,3725],"valid"],[[3726,3731],"disallowed"],[[3732,3735],"valid"],[[3736,3736],"disallowed"],[[3737,3743],"valid"],[[3744,3744],"disallowed"],[[3745,3747],"valid"],[[3748,3748],"disallowed"],[[3749,3749],"valid"],[[3750,3750],"disallowed"],[[3751,3751],"valid"],[[3752,3753],"disallowed"],[[3754,3755],"valid"],[[3756,3756],"disallowed"],[[3757,3762],"valid"],[[3763,3763],"mapped",[3789,3762]],[[3764,3769],"valid"],[[3770,3770],"disallowed"],[[3771,3773],"valid"],[[3774,3775],"disallowed"],[[3776,3780],"valid"],[[3781,3781],"disallowed"],[[3782,3782],"valid"],[[3783,3783],"disallowed"],[[3784,3789],"valid"],[[3790,3791],"disallowed"],[[3792,3801],"valid"],[[3802,3803],"disallowed"],[[3804,3804],"mapped",[3755,3737]],[[3805,3805],"mapped",[3755,3745]],[[3806,3807],"valid"],[[3808,3839],"disallowed"],[[3840,3840],"valid"],[[3841,3850],"valid",[],"NV8"],[[3851,3851],"valid"],[[3852,3852],"mapped",[3851]],[[3853,3863],"valid",[],"NV8"],[[3864,3865],"valid"],[[3866,3871],"valid",[],"NV8"],[[3872,3881],"valid"],[[3882,3892],"valid",[],"NV8"],[[3893,3893],"valid"],[[3894,3894],"valid",[],"NV8"],[[3895,3895],"valid"],[[3896,3896],"valid",[],"NV8"],[[3897,3897],"valid"],[[3898,3901],"valid",[],"NV8"],[[3902,3906],"valid"],[[3907,3907],"mapped",[3906,4023]],[[3908,3911],"valid"],[[3912,3912],"disallowed"],[[3913,3916],"valid"],[[3917,3917],"mapped",[3916,4023]],[[3918,3921],"valid"],[[3922,3922],"mapped",[3921,4023]],[[3923,3926],"valid"],[[3927,3927],"mapped",[3926,4023]],[[3928,3931],"valid"],[[3932,3932],"mapped",[3931,4023]],[[3933,3944],"valid"],[[3945,3945],"mapped",[3904,4021]],[[3946,3946],"valid"],[[3947,3948],"valid"],[[3949,3952],"disallowed"],[[3953,3954],"valid"],[[3955,3955],"mapped",[3953,3954]],[[3956,3956],"valid"],[[3957,3957],"mapped",[3953,3956]],[[3958,3958],"mapped",[4018,3968]],[[3959,3959],"mapped",[4018,3953,3968]],[[3960,3960],"mapped",[4019,3968]],[[3961,3961],"mapped",[4019,3953,3968]],[[3962,3968],"valid"],[[3969,3969],"mapped",[3953,3968]],[[3970,3972],"valid"],[[3973,3973],"valid",[],"NV8"],[[3974,3979],"valid"],[[3980,3983],"valid"],[[3984,3986],"valid"],[[3987,3987],"mapped",[3986,4023]],[[3988,3989],"valid"],[[3990,3990],"valid"],[[3991,3991],"valid"],[[3992,3992],"disallowed"],[[3993,3996],"valid"],[[3997,3997],"mapped",[3996,4023]],[[3998,4001],"valid"],[[4002,4002],"mapped",[4001,4023]],[[4003,4006],"valid"],[[4007,4007],"mapped",[4006,4023]],[[4008,4011],"valid"],[[4012,4012],"mapped",[4011,4023]],[[4013,4013],"valid"],[[4014,4016],"valid"],[[4017,4023],"valid"],[[4024,4024],"valid"],[[4025,4025],"mapped",[3984,4021]],[[4026,4028],"valid"],[[4029,4029],"disallowed"],[[4030,4037],"valid",[],"NV8"],[[4038,4038],"valid"],[[4039,4044],"valid",[],"NV8"],[[4045,4045],"disallowed"],[[4046,4046],"valid",[],"NV8"],[[4047,4047],"valid",[],"NV8"],[[4048,4049],"valid",[],"NV8"],[[4050,4052],"valid",[],"NV8"],[[4053,4056],"valid",[],"NV8"],[[4057,4058],"valid",[],"NV8"],[[4059,4095],"disallowed"],[[4096,4129],"valid"],[[4130,4130],"valid"],[[4131,4135],"valid"],[[4136,4136],"valid"],[[4137,4138],"valid"],[[4139,4139],"valid"],[[4140,4146],"valid"],[[4147,4149],"valid"],[[4150,4153],"valid"],[[4154,4159],"valid"],[[4160,4169],"valid"],[[4170,4175],"valid",[],"NV8"],[[4176,4185],"valid"],[[4186,4249],"valid"],[[4250,4253],"valid"],[[4254,4255],"valid",[],"NV8"],[[4256,4293],"disallowed"],[[4294,4294],"disallowed"],[[4295,4295],"mapped",[11559]],[[4296,4300],"disallowed"],[[4301,4301],"mapped",[11565]],[[4302,4303],"disallowed"],[[4304,4342],"valid"],[[4343,4344],"valid"],[[4345,4346],"valid"],[[4347,4347],"valid",[],"NV8"],[[4348,4348],"mapped",[4316]],[[4349,4351],"valid"],[[4352,4441],"valid",[],"NV8"],[[4442,4446],"valid",[],"NV8"],[[4447,4448],"disallowed"],[[4449,4514],"valid",[],"NV8"],[[4515,4519],"valid",[],"NV8"],[[4520,4601],"valid",[],"NV8"],[[4602,4607],"valid",[],"NV8"],[[4608,4614],"valid"],[[4615,4615],"valid"],[[4616,4678],"valid"],[[4679,4679],"valid"],[[4680,4680],"valid"],[[4681,4681],"disallowed"],[[4682,4685],"valid"],[[4686,4687],"disallowed"],[[4688,4694],"valid"],[[4695,4695],"disallowed"],[[4696,4696],"valid"],[[4697,4697],"disallowed"],[[4698,4701],"valid"],[[4702,4703],"disallowed"],[[4704,4742],"valid"],[[4743,4743],"valid"],[[4744,4744],"valid"],[[4745,4745],"disallowed"],[[4746,4749],"valid"],[[4750,4751],"disallowed"],[[4752,4782],"valid"],[[4783,4783],"valid"],[[4784,4784],"valid"],[[4785,4785],"disallowed"],[[4786,4789],"valid"],[[4790,4791],"disallowed"],[[4792,4798],"valid"],[[4799,4799],"disallowed"],[[4800,4800],"valid"],[[4801,4801],"disallowed"],[[4802,4805],"valid"],[[4806,4807],"disallowed"],[[4808,4814],"valid"],[[4815,4815],"valid"],[[4816,4822],"valid"],[[4823,4823],"disallowed"],[[4824,4846],"valid"],[[4847,4847],"valid"],[[4848,4878],"valid"],[[4879,4879],"valid"],[[4880,4880],"valid"],[[4881,4881],"disallowed"],[[4882,4885],"valid"],[[4886,4887],"disallowed"],[[4888,4894],"valid"],[[4895,4895],"valid"],[[4896,4934],"valid"],[[4935,4935],"valid"],[[4936,4954],"valid"],[[4955,4956],"disallowed"],[[4957,4958],"valid"],[[4959,4959],"valid"],[[4960,4960],"valid",[],"NV8"],[[4961,4988],"valid",[],"NV8"],[[4989,4991],"disallowed"],[[4992,5007],"valid"],[[5008,5017],"valid",[],"NV8"],[[5018,5023],"disallowed"],[[5024,5108],"valid"],[[5109,5109],"valid"],[[5110,5111],"disallowed"],[[5112,5112],"mapped",[5104]],[[5113,5113],"mapped",[5105]],[[5114,5114],"mapped",[5106]],[[5115,5115],"mapped",[5107]],[[5116,5116],"mapped",[5108]],[[5117,5117],"mapped",[5109]],[[5118,5119],"disallowed"],[[5120,5120],"valid",[],"NV8"],[[5121,5740],"valid"],[[5741,5742],"valid",[],"NV8"],[[5743,5750],"valid"],[[5751,5759],"valid"],[[5760,5760],"disallowed"],[[5761,5786],"valid"],[[5787,5788],"valid",[],"NV8"],[[5789,5791],"disallowed"],[[5792,5866],"valid"],[[5867,5872],"valid",[],"NV8"],[[5873,5880],"valid"],[[5881,5887],"disallowed"],[[5888,5900],"valid"],[[5901,5901],"disallowed"],[[5902,5908],"valid"],[[5909,5919],"disallowed"],[[5920,5940],"valid"],[[5941,5942],"valid",[],"NV8"],[[5943,5951],"disallowed"],[[5952,5971],"valid"],[[5972,5983],"disallowed"],[[5984,5996],"valid"],[[5997,5997],"disallowed"],[[5998,6000],"valid"],[[6001,6001],"disallowed"],[[6002,6003],"valid"],[[6004,6015],"disallowed"],[[6016,6067],"valid"],[[6068,6069],"disallowed"],[[6070,6099],"valid"],[[6100,6102],"valid",[],"NV8"],[[6103,6103],"valid"],[[6104,6107],"valid",[],"NV8"],[[6108,6108],"valid"],[[6109,6109],"valid"],[[6110,6111],"disallowed"],[[6112,6121],"valid"],[[6122,6127],"disallowed"],[[6128,6137],"valid",[],"NV8"],[[6138,6143],"disallowed"],[[6144,6149],"valid",[],"NV8"],[[6150,6150],"disallowed"],[[6151,6154],"valid",[],"NV8"],[[6155,6157],"ignored"],[[6158,6158],"disallowed"],[[6159,6159],"disallowed"],[[6160,6169],"valid"],[[6170,6175],"disallowed"],[[6176,6263],"valid"],[[6264,6271],"disallowed"],[[6272,6313],"valid"],[[6314,6314],"valid"],[[6315,6319],"disallowed"],[[6320,6389],"valid"],[[6390,6399],"disallowed"],[[6400,6428],"valid"],[[6429,6430],"valid"],[[6431,6431],"disallowed"],[[6432,6443],"valid"],[[6444,6447],"disallowed"],[[6448,6459],"valid"],[[6460,6463],"disallowed"],[[6464,6464],"valid",[],"NV8"],[[6465,6467],"disallowed"],[[6468,6469],"valid",[],"NV8"],[[6470,6509],"valid"],[[6510,6511],"disallowed"],[[6512,6516],"valid"],[[6517,6527],"disallowed"],[[6528,6569],"valid"],[[6570,6571],"valid"],[[6572,6575],"disallowed"],[[6576,6601],"valid"],[[6602,6607],"disallowed"],[[6608,6617],"valid"],[[6618,6618],"valid",[],"XV8"],[[6619,6621],"disallowed"],[[6622,6623],"valid",[],"NV8"],[[6624,6655],"valid",[],"NV8"],[[6656,6683],"valid"],[[6684,6685],"disallowed"],[[6686,6687],"valid",[],"NV8"],[[6688,6750],"valid"],[[6751,6751],"disallowed"],[[6752,6780],"valid"],[[6781,6782],"disallowed"],[[6783,6793],"valid"],[[6794,6799],"disallowed"],[[6800,6809],"valid"],[[6810,6815],"disallowed"],[[6816,6822],"valid",[],"NV8"],[[6823,6823],"valid"],[[6824,6829],"valid",[],"NV8"],[[6830,6831],"disallowed"],[[6832,6845],"valid"],[[6846,6846],"valid",[],"NV8"],[[6847,6911],"disallowed"],[[6912,6987],"valid"],[[6988,6991],"disallowed"],[[6992,7001],"valid"],[[7002,7018],"valid",[],"NV8"],[[7019,7027],"valid"],[[7028,7036],"valid",[],"NV8"],[[7037,7039],"disallowed"],[[7040,7082],"valid"],[[7083,7085],"valid"],[[7086,7097],"valid"],[[7098,7103],"valid"],[[7104,7155],"valid"],[[7156,7163],"disallowed"],[[7164,7167],"valid",[],"NV8"],[[7168,7223],"valid"],[[7224,7226],"disallowed"],[[7227,7231],"valid",[],"NV8"],[[7232,7241],"valid"],[[7242,7244],"disallowed"],[[7245,7293],"valid"],[[7294,7295],"valid",[],"NV8"],[[7296,7359],"disallowed"],[[7360,7367],"valid",[],"NV8"],[[7368,7375],"disallowed"],[[7376,7378],"valid"],[[7379,7379],"valid",[],"NV8"],[[7380,7410],"valid"],[[7411,7414],"valid"],[[7415,7415],"disallowed"],[[7416,7417],"valid"],[[7418,7423],"disallowed"],[[7424,7467],"valid"],[[7468,7468],"mapped",[97]],[[7469,7469],"mapped",[230]],[[7470,7470],"mapped",[98]],[[7471,7471],"valid"],[[7472,7472],"mapped",[100]],[[7473,7473],"mapped",[101]],[[7474,7474],"mapped",[477]],[[7475,7475],"mapped",[103]],[[7476,7476],"mapped",[104]],[[7477,7477],"mapped",[105]],[[7478,7478],"mapped",[106]],[[7479,7479],"mapped",[107]],[[7480,7480],"mapped",[108]],[[7481,7481],"mapped",[109]],[[7482,7482],"mapped",[110]],[[7483,7483],"valid"],[[7484,7484],"mapped",[111]],[[7485,7485],"mapped",[547]],[[7486,7486],"mapped",[112]],[[7487,7487],"mapped",[114]],[[7488,7488],"mapped",[116]],[[7489,7489],"mapped",[117]],[[7490,7490],"mapped",[119]],[[7491,7491],"mapped",[97]],[[7492,7492],"mapped",[592]],[[7493,7493],"mapped",[593]],[[7494,7494],"mapped",[7426]],[[7495,7495],"mapped",[98]],[[7496,7496],"mapped",[100]],[[7497,7497],"mapped",[101]],[[7498,7498],"mapped",[601]],[[7499,7499],"mapped",[603]],[[7500,7500],"mapped",[604]],[[7501,7501],"mapped",[103]],[[7502,7502],"valid"],[[7503,7503],"mapped",[107]],[[7504,7504],"mapped",[109]],[[7505,7505],"mapped",[331]],[[7506,7506],"mapped",[111]],[[7507,7507],"mapped",[596]],[[7508,7508],"mapped",[7446]],[[7509,7509],"mapped",[7447]],[[7510,7510],"mapped",[112]],[[7511,7511],"mapped",[116]],[[7512,7512],"mapped",[117]],[[7513,7513],"mapped",[7453]],[[7514,7514],"mapped",[623]],[[7515,7515],"mapped",[118]],[[7516,7516],"mapped",[7461]],[[7517,7517],"mapped",[946]],[[7518,7518],"mapped",[947]],[[7519,7519],"mapped",[948]],[[7520,7520],"mapped",[966]],[[7521,7521],"mapped",[967]],[[7522,7522],"mapped",[105]],[[7523,7523],"mapped",[114]],[[7524,7524],"mapped",[117]],[[7525,7525],"mapped",[118]],[[7526,7526],"mapped",[946]],[[7527,7527],"mapped",[947]],[[7528,7528],"mapped",[961]],[[7529,7529],"mapped",[966]],[[7530,7530],"mapped",[967]],[[7531,7531],"valid"],[[7532,7543],"valid"],[[7544,7544],"mapped",[1085]],[[7545,7578],"valid"],[[7579,7579],"mapped",[594]],[[7580,7580],"mapped",[99]],[[7581,7581],"mapped",[597]],[[7582,7582],"mapped",[240]],[[7583,7583],"mapped",[604]],[[7584,7584],"mapped",[102]],[[7585,7585],"mapped",[607]],[[7586,7586],"mapped",[609]],[[7587,7587],"mapped",[613]],[[7588,7588],"mapped",[616]],[[7589,7589],"mapped",[617]],[[7590,7590],"mapped",[618]],[[7591,7591],"mapped",[7547]],[[7592,7592],"mapped",[669]],[[7593,7593],"mapped",[621]],[[7594,7594],"mapped",[7557]],[[7595,7595],"mapped",[671]],[[7596,7596],"mapped",[625]],[[7597,7597],"mapped",[624]],[[7598,7598],"mapped",[626]],[[7599,7599],"mapped",[627]],[[7600,7600],"mapped",[628]],[[7601,7601],"mapped",[629]],[[7602,7602],"mapped",[632]],[[7603,7603],"mapped",[642]],[[7604,7604],"mapped",[643]],[[7605,7605],"mapped",[427]],[[7606,7606],"mapped",[649]],[[7607,7607],"mapped",[650]],[[7608,7608],"mapped",[7452]],[[7609,7609],"mapped",[651]],[[7610,7610],"mapped",[652]],[[7611,7611],"mapped",[122]],[[7612,7612],"mapped",[656]],[[7613,7613],"mapped",[657]],[[7614,7614],"mapped",[658]],[[7615,7615],"mapped",[952]],[[7616,7619],"valid"],[[7620,7626],"valid"],[[7627,7654],"valid"],[[7655,7669],"valid"],[[7670,7675],"disallowed"],[[7676,7676],"valid"],[[7677,7677],"valid"],[[7678,7679],"valid"],[[7680,7680],"mapped",[7681]],[[7681,7681],"valid"],[[7682,7682],"mapped",[7683]],[[7683,7683],"valid"],[[7684,7684],"mapped",[7685]],[[7685,7685],"valid"],[[7686,7686],"mapped",[7687]],[[7687,7687],"valid"],[[7688,7688],"mapped",[7689]],[[7689,7689],"valid"],[[7690,7690],"mapped",[7691]],[[7691,7691],"valid"],[[7692,7692],"mapped",[7693]],[[7693,7693],"valid"],[[7694,7694],"mapped",[7695]],[[7695,7695],"valid"],[[7696,7696],"mapped",[7697]],[[7697,7697],"valid"],[[7698,7698],"mapped",[7699]],[[7699,7699],"valid"],[[7700,7700],"mapped",[7701]],[[7701,7701],"valid"],[[7702,7702],"mapped",[7703]],[[7703,7703],"valid"],[[7704,7704],"mapped",[7705]],[[7705,7705],"valid"],[[7706,7706],"mapped",[7707]],[[7707,7707],"valid"],[[7708,7708],"mapped",[7709]],[[7709,7709],"valid"],[[7710,7710],"mapped",[7711]],[[7711,7711],"valid"],[[7712,7712],"mapped",[7713]],[[7713,7713],"valid"],[[7714,7714],"mapped",[7715]],[[7715,7715],"valid"],[[7716,7716],"mapped",[7717]],[[7717,7717],"valid"],[[7718,7718],"mapped",[7719]],[[7719,7719],"valid"],[[7720,7720],"mapped",[7721]],[[7721,7721],"valid"],[[7722,7722],"mapped",[7723]],[[7723,7723],"valid"],[[7724,7724],"mapped",[7725]],[[7725,7725],"valid"],[[7726,7726],"mapped",[7727]],[[7727,7727],"valid"],[[7728,7728],"mapped",[7729]],[[7729,7729],"valid"],[[7730,7730],"mapped",[7731]],[[7731,7731],"valid"],[[7732,7732],"mapped",[7733]],[[7733,7733],"valid"],[[7734,7734],"mapped",[7735]],[[7735,7735],"valid"],[[7736,7736],"mapped",[7737]],[[7737,7737],"valid"],[[7738,7738],"mapped",[7739]],[[7739,7739],"valid"],[[7740,7740],"mapped",[7741]],[[7741,7741],"valid"],[[7742,7742],"mapped",[7743]],[[7743,7743],"valid"],[[7744,7744],"mapped",[7745]],[[7745,7745],"valid"],[[7746,7746],"mapped",[7747]],[[7747,7747],"valid"],[[7748,7748],"mapped",[7749]],[[7749,7749],"valid"],[[7750,7750],"mapped",[7751]],[[7751,7751],"valid"],[[7752,7752],"mapped",[7753]],[[7753,7753],"valid"],[[7754,7754],"mapped",[7755]],[[7755,7755],"valid"],[[7756,7756],"mapped",[7757]],[[7757,7757],"valid"],[[7758,7758],"mapped",[7759]],[[7759,7759],"valid"],[[7760,7760],"mapped",[7761]],[[7761,7761],"valid"],[[7762,7762],"mapped",[7763]],[[7763,7763],"valid"],[[7764,7764],"mapped",[7765]],[[7765,7765],"valid"],[[7766,7766],"mapped",[7767]],[[7767,7767],"valid"],[[7768,7768],"mapped",[7769]],[[7769,7769],"valid"],[[7770,7770],"mapped",[7771]],[[7771,7771],"valid"],[[7772,7772],"mapped",[7773]],[[7773,7773],"valid"],[[7774,7774],"mapped",[7775]],[[7775,7775],"valid"],[[7776,7776],"mapped",[7777]],[[7777,7777],"valid"],[[7778,7778],"mapped",[7779]],[[7779,7779],"valid"],[[7780,7780],"mapped",[7781]],[[7781,7781],"valid"],[[7782,7782],"mapped",[7783]],[[7783,7783],"valid"],[[7784,7784],"mapped",[7785]],[[7785,7785],"valid"],[[7786,7786],"mapped",[7787]],[[7787,7787],"valid"],[[7788,7788],"mapped",[7789]],[[7789,7789],"valid"],[[7790,7790],"mapped",[7791]],[[7791,7791],"valid"],[[7792,7792],"mapped",[7793]],[[7793,7793],"valid"],[[7794,7794],"mapped",[7795]],[[7795,7795],"valid"],[[7796,7796],"mapped",[7797]],[[7797,7797],"valid"],[[7798,7798],"mapped",[7799]],[[7799,7799],"valid"],[[7800,7800],"mapped",[7801]],[[7801,7801],"valid"],[[7802,7802],"mapped",[7803]],[[7803,7803],"valid"],[[7804,7804],"mapped",[7805]],[[7805,7805],"valid"],[[7806,7806],"mapped",[7807]],[[7807,7807],"valid"],[[7808,7808],"mapped",[7809]],[[7809,7809],"valid"],[[7810,7810],"mapped",[7811]],[[7811,7811],"valid"],[[7812,7812],"mapped",[7813]],[[7813,7813],"valid"],[[7814,7814],"mapped",[7815]],[[7815,7815],"valid"],[[7816,7816],"mapped",[7817]],[[7817,7817],"valid"],[[7818,7818],"mapped",[7819]],[[7819,7819],"valid"],[[7820,7820],"mapped",[7821]],[[7821,7821],"valid"],[[7822,7822],"mapped",[7823]],[[7823,7823],"valid"],[[7824,7824],"mapped",[7825]],[[7825,7825],"valid"],[[7826,7826],"mapped",[7827]],[[7827,7827],"valid"],[[7828,7828],"mapped",[7829]],[[7829,7833],"valid"],[[7834,7834],"mapped",[97,702]],[[7835,7835],"mapped",[7777]],[[7836,7837],"valid"],[[7838,7838],"mapped",[115,115]],[[7839,7839],"valid"],[[7840,7840],"mapped",[7841]],[[7841,7841],"valid"],[[7842,7842],"mapped",[7843]],[[7843,7843],"valid"],[[7844,7844],"mapped",[7845]],[[7845,7845],"valid"],[[7846,7846],"mapped",[7847]],[[7847,7847],"valid"],[[7848,7848],"mapped",[7849]],[[7849,7849],"valid"],[[7850,7850],"mapped",[7851]],[[7851,7851],"valid"],[[7852,7852],"mapped",[7853]],[[7853,7853],"valid"],[[7854,7854],"mapped",[7855]],[[7855,7855],"valid"],[[7856,7856],"mapped",[7857]],[[7857,7857],"valid"],[[7858,7858],"mapped",[7859]],[[7859,7859],"valid"],[[7860,7860],"mapped",[7861]],[[7861,7861],"valid"],[[7862,7862],"mapped",[7863]],[[7863,7863],"valid"],[[7864,7864],"mapped",[7865]],[[7865,7865],"valid"],[[7866,7866],"mapped",[7867]],[[7867,7867],"valid"],[[7868,7868],"mapped",[7869]],[[7869,7869],"valid"],[[7870,7870],"mapped",[7871]],[[7871,7871],"valid"],[[7872,7872],"mapped",[7873]],[[7873,7873],"valid"],[[7874,7874],"mapped",[7875]],[[7875,7875],"valid"],[[7876,7876],"mapped",[7877]],[[7877,7877],"valid"],[[7878,7878],"mapped",[7879]],[[7879,7879],"valid"],[[7880,7880],"mapped",[7881]],[[7881,7881],"valid"],[[7882,7882],"mapped",[7883]],[[7883,7883],"valid"],[[7884,7884],"mapped",[7885]],[[7885,7885],"valid"],[[7886,7886],"mapped",[7887]],[[7887,7887],"valid"],[[7888,7888],"mapped",[7889]],[[7889,7889],"valid"],[[7890,7890],"mapped",[7891]],[[7891,7891],"valid"],[[7892,7892],"mapped",[7893]],[[7893,7893],"valid"],[[7894,7894],"mapped",[7895]],[[7895,7895],"valid"],[[7896,7896],"mapped",[7897]],[[7897,7897],"valid"],[[7898,7898],"mapped",[7899]],[[7899,7899],"valid"],[[7900,7900],"mapped",[7901]],[[7901,7901],"valid"],[[7902,7902],"mapped",[7903]],[[7903,7903],"valid"],[[7904,7904],"mapped",[7905]],[[7905,7905],"valid"],[[7906,7906],"mapped",[7907]],[[7907,7907],"valid"],[[7908,7908],"mapped",[7909]],[[7909,7909],"valid"],[[7910,7910],"mapped",[7911]],[[7911,7911],"valid"],[[7912,7912],"mapped",[7913]],[[7913,7913],"valid"],[[7914,7914],"mapped",[7915]],[[7915,7915],"valid"],[[7916,7916],"mapped",[7917]],[[7917,7917],"valid"],[[7918,7918],"mapped",[7919]],[[7919,7919],"valid"],[[7920,7920],"mapped",[7921]],[[7921,7921],"valid"],[[7922,7922],"mapped",[7923]],[[7923,7923],"valid"],[[7924,7924],"mapped",[7925]],[[7925,7925],"valid"],[[7926,7926],"mapped",[7927]],[[7927,7927],"valid"],[[7928,7928],"mapped",[7929]],[[7929,7929],"valid"],[[7930,7930],"mapped",[7931]],[[7931,7931],"valid"],[[7932,7932],"mapped",[7933]],[[7933,7933],"valid"],[[7934,7934],"mapped",[7935]],[[7935,7935],"valid"],[[7936,7943],"valid"],[[7944,7944],"mapped",[7936]],[[7945,7945],"mapped",[7937]],[[7946,7946],"mapped",[7938]],[[7947,7947],"mapped",[7939]],[[7948,7948],"mapped",[7940]],[[7949,7949],"mapped",[7941]],[[7950,7950],"mapped",[7942]],[[7951,7951],"mapped",[7943]],[[7952,7957],"valid"],[[7958,7959],"disallowed"],[[7960,7960],"mapped",[7952]],[[7961,7961],"mapped",[7953]],[[7962,7962],"mapped",[7954]],[[7963,7963],"mapped",[7955]],[[7964,7964],"mapped",[7956]],[[7965,7965],"mapped",[7957]],[[7966,7967],"disallowed"],[[7968,7975],"valid"],[[7976,7976],"mapped",[7968]],[[7977,7977],"mapped",[7969]],[[7978,7978],"mapped",[7970]],[[7979,7979],"mapped",[7971]],[[7980,7980],"mapped",[7972]],[[7981,7981],"mapped",[7973]],[[7982,7982],"mapped",[7974]],[[7983,7983],"mapped",[7975]],[[7984,7991],"valid"],[[7992,7992],"mapped",[7984]],[[7993,7993],"mapped",[7985]],[[7994,7994],"mapped",[7986]],[[7995,7995],"mapped",[7987]],[[7996,7996],"mapped",[7988]],[[7997,7997],"mapped",[7989]],[[7998,7998],"mapped",[7990]],[[7999,7999],"mapped",[7991]],[[8000,8005],"valid"],[[8006,8007],"disallowed"],[[8008,8008],"mapped",[8000]],[[8009,8009],"mapped",[8001]],[[8010,8010],"mapped",[8002]],[[8011,8011],"mapped",[8003]],[[8012,8012],"mapped",[8004]],[[8013,8013],"mapped",[8005]],[[8014,8015],"disallowed"],[[8016,8023],"valid"],[[8024,8024],"disallowed"],[[8025,8025],"mapped",[8017]],[[8026,8026],"disallowed"],[[8027,8027],"mapped",[8019]],[[8028,8028],"disallowed"],[[8029,8029],"mapped",[8021]],[[8030,8030],"disallowed"],[[8031,8031],"mapped",[8023]],[[8032,8039],"valid"],[[8040,8040],"mapped",[8032]],[[8041,8041],"mapped",[8033]],[[8042,8042],"mapped",[8034]],[[8043,8043],"mapped",[8035]],[[8044,8044],"mapped",[8036]],[[8045,8045],"mapped",[8037]],[[8046,8046],"mapped",[8038]],[[8047,8047],"mapped",[8039]],[[8048,8048],"valid"],[[8049,8049],"mapped",[940]],[[8050,8050],"valid"],[[8051,8051],"mapped",[941]],[[8052,8052],"valid"],[[8053,8053],"mapped",[942]],[[8054,8054],"valid"],[[8055,8055],"mapped",[943]],[[8056,8056],"valid"],[[8057,8057],"mapped",[972]],[[8058,8058],"valid"],[[8059,8059],"mapped",[973]],[[8060,8060],"valid"],[[8061,8061],"mapped",[974]],[[8062,8063],"disallowed"],[[8064,8064],"mapped",[7936,953]],[[8065,8065],"mapped",[7937,953]],[[8066,8066],"mapped",[7938,953]],[[8067,8067],"mapped",[7939,953]],[[8068,8068],"mapped",[7940,953]],[[8069,8069],"mapped",[7941,953]],[[8070,8070],"mapped",[7942,953]],[[8071,8071],"mapped",[7943,953]],[[8072,8072],"mapped",[7936,953]],[[8073,8073],"mapped",[7937,953]],[[8074,8074],"mapped",[7938,953]],[[8075,8075],"mapped",[7939,953]],[[8076,8076],"mapped",[7940,953]],[[8077,8077],"mapped",[7941,953]],[[8078,8078],"mapped",[7942,953]],[[8079,8079],"mapped",[7943,953]],[[8080,8080],"mapped",[7968,953]],[[8081,8081],"mapped",[7969,953]],[[8082,8082],"mapped",[7970,953]],[[8083,8083],"mapped",[7971,953]],[[8084,8084],"mapped",[7972,953]],[[8085,8085],"mapped",[7973,953]],[[8086,8086],"mapped",[7974,953]],[[8087,8087],"mapped",[7975,953]],[[8088,8088],"mapped",[7968,953]],[[8089,8089],"mapped",[7969,953]],[[8090,8090],"mapped",[7970,953]],[[8091,8091],"mapped",[7971,953]],[[8092,8092],"mapped",[7972,953]],[[8093,8093],"mapped",[7973,953]],[[8094,8094],"mapped",[7974,953]],[[8095,8095],"mapped",[7975,953]],[[8096,8096],"mapped",[8032,953]],[[8097,8097],"mapped",[8033,953]],[[8098,8098],"mapped",[8034,953]],[[8099,8099],"mapped",[8035,953]],[[8100,8100],"mapped",[8036,953]],[[8101,8101],"mapped",[8037,953]],[[8102,8102],"mapped",[8038,953]],[[8103,8103],"mapped",[8039,953]],[[8104,8104],"mapped",[8032,953]],[[8105,8105],"mapped",[8033,953]],[[8106,8106],"mapped",[8034,953]],[[8107,8107],"mapped",[8035,953]],[[8108,8108],"mapped",[8036,953]],[[8109,8109],"mapped",[8037,953]],[[8110,8110],"mapped",[8038,953]],[[8111,8111],"mapped",[8039,953]],[[8112,8113],"valid"],[[8114,8114],"mapped",[8048,953]],[[8115,8115],"mapped",[945,953]],[[8116,8116],"mapped",[940,953]],[[8117,8117],"disallowed"],[[8118,8118],"valid"],[[8119,8119],"mapped",[8118,953]],[[8120,8120],"mapped",[8112]],[[8121,8121],"mapped",[8113]],[[8122,8122],"mapped",[8048]],[[8123,8123],"mapped",[940]],[[8124,8124],"mapped",[945,953]],[[8125,8125],"disallowed_STD3_mapped",[32,787]],[[8126,8126],"mapped",[953]],[[8127,8127],"disallowed_STD3_mapped",[32,787]],[[8128,8128],"disallowed_STD3_mapped",[32,834]],[[8129,8129],"disallowed_STD3_mapped",[32,776,834]],[[8130,8130],"mapped",[8052,953]],[[8131,8131],"mapped",[951,953]],[[8132,8132],"mapped",[942,953]],[[8133,8133],"disallowed"],[[8134,8134],"valid"],[[8135,8135],"mapped",[8134,953]],[[8136,8136],"mapped",[8050]],[[8137,8137],"mapped",[941]],[[8138,8138],"mapped",[8052]],[[8139,8139],"mapped",[942]],[[8140,8140],"mapped",[951,953]],[[8141,8141],"disallowed_STD3_mapped",[32,787,768]],[[8142,8142],"disallowed_STD3_mapped",[32,787,769]],[[8143,8143],"disallowed_STD3_mapped",[32,787,834]],[[8144,8146],"valid"],[[8147,8147],"mapped",[912]],[[8148,8149],"disallowed"],[[8150,8151],"valid"],[[8152,8152],"mapped",[8144]],[[8153,8153],"mapped",[8145]],[[8154,8154],"mapped",[8054]],[[8155,8155],"mapped",[943]],[[8156,8156],"disallowed"],[[8157,8157],"disallowed_STD3_mapped",[32,788,768]],[[8158,8158],"disallowed_STD3_mapped",[32,788,769]],[[8159,8159],"disallowed_STD3_mapped",[32,788,834]],[[8160,8162],"valid"],[[8163,8163],"mapped",[944]],[[8164,8167],"valid"],[[8168,8168],"mapped",[8160]],[[8169,8169],"mapped",[8161]],[[8170,8170],"mapped",[8058]],[[8171,8171],"mapped",[973]],[[8172,8172],"mapped",[8165]],[[8173,8173],"disallowed_STD3_mapped",[32,776,768]],[[8174,8174],"disallowed_STD3_mapped",[32,776,769]],[[8175,8175],"disallowed_STD3_mapped",[96]],[[8176,8177],"disallowed"],[[8178,8178],"mapped",[8060,953]],[[8179,8179],"mapped",[969,953]],[[8180,8180],"mapped",[974,953]],[[8181,8181],"disallowed"],[[8182,8182],"valid"],[[8183,8183],"mapped",[8182,953]],[[8184,8184],"mapped",[8056]],[[8185,8185],"mapped",[972]],[[8186,8186],"mapped",[8060]],[[8187,8187],"mapped",[974]],[[8188,8188],"mapped",[969,953]],[[8189,8189],"disallowed_STD3_mapped",[32,769]],[[8190,8190],"disallowed_STD3_mapped",[32,788]],[[8191,8191],"disallowed"],[[8192,8202],"disallowed_STD3_mapped",[32]],[[8203,8203],"ignored"],[[8204,8205],"deviation",[]],[[8206,8207],"disallowed"],[[8208,8208],"valid",[],"NV8"],[[8209,8209],"mapped",[8208]],[[8210,8214],"valid",[],"NV8"],[[8215,8215],"disallowed_STD3_mapped",[32,819]],[[8216,8227],"valid",[],"NV8"],[[8228,8230],"disallowed"],[[8231,8231],"valid",[],"NV8"],[[8232,8238],"disallowed"],[[8239,8239],"disallowed_STD3_mapped",[32]],[[8240,8242],"valid",[],"NV8"],[[8243,8243],"mapped",[8242,8242]],[[8244,8244],"mapped",[8242,8242,8242]],[[8245,8245],"valid",[],"NV8"],[[8246,8246],"mapped",[8245,8245]],[[8247,8247],"mapped",[8245,8245,8245]],[[8248,8251],"valid",[],"NV8"],[[8252,8252],"disallowed_STD3_mapped",[33,33]],[[8253,8253],"valid",[],"NV8"],[[8254,8254],"disallowed_STD3_mapped",[32,773]],[[8255,8262],"valid",[],"NV8"],[[8263,8263],"disallowed_STD3_mapped",[63,63]],[[8264,8264],"disallowed_STD3_mapped",[63,33]],[[8265,8265],"disallowed_STD3_mapped",[33,63]],[[8266,8269],"valid",[],"NV8"],[[8270,8274],"valid",[],"NV8"],[[8275,8276],"valid",[],"NV8"],[[8277,8278],"valid",[],"NV8"],[[8279,8279],"mapped",[8242,8242,8242,8242]],[[8280,8286],"valid",[],"NV8"],[[8287,8287],"disallowed_STD3_mapped",[32]],[[8288,8288],"ignored"],[[8289,8291],"disallowed"],[[8292,8292],"ignored"],[[8293,8293],"disallowed"],[[8294,8297],"disallowed"],[[8298,8303],"disallowed"],[[8304,8304],"mapped",[48]],[[8305,8305],"mapped",[105]],[[8306,8307],"disallowed"],[[8308,8308],"mapped",[52]],[[8309,8309],"mapped",[53]],[[8310,8310],"mapped",[54]],[[8311,8311],"mapped",[55]],[[8312,8312],"mapped",[56]],[[8313,8313],"mapped",[57]],[[8314,8314],"disallowed_STD3_mapped",[43]],[[8315,8315],"mapped",[8722]],[[8316,8316],"disallowed_STD3_mapped",[61]],[[8317,8317],"disallowed_STD3_mapped",[40]],[[8318,8318],"disallowed_STD3_mapped",[41]],[[8319,8319],"mapped",[110]],[[8320,8320],"mapped",[48]],[[8321,8321],"mapped",[49]],[[8322,8322],"mapped",[50]],[[8323,8323],"mapped",[51]],[[8324,8324],"mapped",[52]],[[8325,8325],"mapped",[53]],[[8326,8326],"mapped",[54]],[[8327,8327],"mapped",[55]],[[8328,8328],"mapped",[56]],[[8329,8329],"mapped",[57]],[[8330,8330],"disallowed_STD3_mapped",[43]],[[8331,8331],"mapped",[8722]],[[8332,8332],"disallowed_STD3_mapped",[61]],[[8333,8333],"disallowed_STD3_mapped",[40]],[[8334,8334],"disallowed_STD3_mapped",[41]],[[8335,8335],"disallowed"],[[8336,8336],"mapped",[97]],[[8337,8337],"mapped",[101]],[[8338,8338],"mapped",[111]],[[8339,8339],"mapped",[120]],[[8340,8340],"mapped",[601]],[[8341,8341],"mapped",[104]],[[8342,8342],"mapped",[107]],[[8343,8343],"mapped",[108]],[[8344,8344],"mapped",[109]],[[8345,8345],"mapped",[110]],[[8346,8346],"mapped",[112]],[[8347,8347],"mapped",[115]],[[8348,8348],"mapped",[116]],[[8349,8351],"disallowed"],[[8352,8359],"valid",[],"NV8"],[[8360,8360],"mapped",[114,115]],[[8361,8362],"valid",[],"NV8"],[[8363,8363],"valid",[],"NV8"],[[8364,8364],"valid",[],"NV8"],[[8365,8367],"valid",[],"NV8"],[[8368,8369],"valid",[],"NV8"],[[8370,8373],"valid",[],"NV8"],[[8374,8376],"valid",[],"NV8"],[[8377,8377],"valid",[],"NV8"],[[8378,8378],"valid",[],"NV8"],[[8379,8381],"valid",[],"NV8"],[[8382,8382],"valid",[],"NV8"],[[8383,8399],"disallowed"],[[8400,8417],"valid",[],"NV8"],[[8418,8419],"valid",[],"NV8"],[[8420,8426],"valid",[],"NV8"],[[8427,8427],"valid",[],"NV8"],[[8428,8431],"valid",[],"NV8"],[[8432,8432],"valid",[],"NV8"],[[8433,8447],"disallowed"],[[8448,8448],"disallowed_STD3_mapped",[97,47,99]],[[8449,8449],"disallowed_STD3_mapped",[97,47,115]],[[8450,8450],"mapped",[99]],[[8451,8451],"mapped",[176,99]],[[8452,8452],"valid",[],"NV8"],[[8453,8453],"disallowed_STD3_mapped",[99,47,111]],[[8454,8454],"disallowed_STD3_mapped",[99,47,117]],[[8455,8455],"mapped",[603]],[[8456,8456],"valid",[],"NV8"],[[8457,8457],"mapped",[176,102]],[[8458,8458],"mapped",[103]],[[8459,8462],"mapped",[104]],[[8463,8463],"mapped",[295]],[[8464,8465],"mapped",[105]],[[8466,8467],"mapped",[108]],[[8468,8468],"valid",[],"NV8"],[[8469,8469],"mapped",[110]],[[8470,8470],"mapped",[110,111]],[[8471,8472],"valid",[],"NV8"],[[8473,8473],"mapped",[112]],[[8474,8474],"mapped",[113]],[[8475,8477],"mapped",[114]],[[8478,8479],"valid",[],"NV8"],[[8480,8480],"mapped",[115,109]],[[8481,8481],"mapped",[116,101,108]],[[8482,8482],"mapped",[116,109]],[[8483,8483],"valid",[],"NV8"],[[8484,8484],"mapped",[122]],[[8485,8485],"valid",[],"NV8"],[[8486,8486],"mapped",[969]],[[8487,8487],"valid",[],"NV8"],[[8488,8488],"mapped",[122]],[[8489,8489],"valid",[],"NV8"],[[8490,8490],"mapped",[107]],[[8491,8491],"mapped",[229]],[[8492,8492],"mapped",[98]],[[8493,8493],"mapped",[99]],[[8494,8494],"valid",[],"NV8"],[[8495,8496],"mapped",[101]],[[8497,8497],"mapped",[102]],[[8498,8498],"disallowed"],[[8499,8499],"mapped",[109]],[[8500,8500],"mapped",[111]],[[8501,8501],"mapped",[1488]],[[8502,8502],"mapped",[1489]],[[8503,8503],"mapped",[1490]],[[8504,8504],"mapped",[1491]],[[8505,8505],"mapped",[105]],[[8506,8506],"valid",[],"NV8"],[[8507,8507],"mapped",[102,97,120]],[[8508,8508],"mapped",[960]],[[8509,8510],"mapped",[947]],[[8511,8511],"mapped",[960]],[[8512,8512],"mapped",[8721]],[[8513,8516],"valid",[],"NV8"],[[8517,8518],"mapped",[100]],[[8519,8519],"mapped",[101]],[[8520,8520],"mapped",[105]],[[8521,8521],"mapped",[106]],[[8522,8523],"valid",[],"NV8"],[[8524,8524],"valid",[],"NV8"],[[8525,8525],"valid",[],"NV8"],[[8526,8526],"valid"],[[8527,8527],"valid",[],"NV8"],[[8528,8528],"mapped",[49,8260,55]],[[8529,8529],"mapped",[49,8260,57]],[[8530,8530],"mapped",[49,8260,49,48]],[[8531,8531],"mapped",[49,8260,51]],[[8532,8532],"mapped",[50,8260,51]],[[8533,8533],"mapped",[49,8260,53]],[[8534,8534],"mapped",[50,8260,53]],[[8535,8535],"mapped",[51,8260,53]],[[8536,8536],"mapped",[52,8260,53]],[[8537,8537],"mapped",[49,8260,54]],[[8538,8538],"mapped",[53,8260,54]],[[8539,8539],"mapped",[49,8260,56]],[[8540,8540],"mapped",[51,8260,56]],[[8541,8541],"mapped",[53,8260,56]],[[8542,8542],"mapped",[55,8260,56]],[[8543,8543],"mapped",[49,8260]],[[8544,8544],"mapped",[105]],[[8545,8545],"mapped",[105,105]],[[8546,8546],"mapped",[105,105,105]],[[8547,8547],"mapped",[105,118]],[[8548,8548],"mapped",[118]],[[8549,8549],"mapped",[118,105]],[[8550,8550],"mapped",[118,105,105]],[[8551,8551],"mapped",[118,105,105,105]],[[8552,8552],"mapped",[105,120]],[[8553,8553],"mapped",[120]],[[8554,8554],"mapped",[120,105]],[[8555,8555],"mapped",[120,105,105]],[[8556,8556],"mapped",[108]],[[8557,8557],"mapped",[99]],[[8558,8558],"mapped",[100]],[[8559,8559],"mapped",[109]],[[8560,8560],"mapped",[105]],[[8561,8561],"mapped",[105,105]],[[8562,8562],"mapped",[105,105,105]],[[8563,8563],"mapped",[105,118]],[[8564,8564],"mapped",[118]],[[8565,8565],"mapped",[118,105]],[[8566,8566],"mapped",[118,105,105]],[[8567,8567],"mapped",[118,105,105,105]],[[8568,8568],"mapped",[105,120]],[[8569,8569],"mapped",[120]],[[8570,8570],"mapped",[120,105]],[[8571,8571],"mapped",[120,105,105]],[[8572,8572],"mapped",[108]],[[8573,8573],"mapped",[99]],[[8574,8574],"mapped",[100]],[[8575,8575],"mapped",[109]],[[8576,8578],"valid",[],"NV8"],[[8579,8579],"disallowed"],[[8580,8580],"valid"],[[8581,8584],"valid",[],"NV8"],[[8585,8585],"mapped",[48,8260,51]],[[8586,8587],"valid",[],"NV8"],[[8588,8591],"disallowed"],[[8592,8682],"valid",[],"NV8"],[[8683,8691],"valid",[],"NV8"],[[8692,8703],"valid",[],"NV8"],[[8704,8747],"valid",[],"NV8"],[[8748,8748],"mapped",[8747,8747]],[[8749,8749],"mapped",[8747,8747,8747]],[[8750,8750],"valid",[],"NV8"],[[8751,8751],"mapped",[8750,8750]],[[8752,8752],"mapped",[8750,8750,8750]],[[8753,8799],"valid",[],"NV8"],[[8800,8800],"disallowed_STD3_valid"],[[8801,8813],"valid",[],"NV8"],[[8814,8815],"disallowed_STD3_valid"],[[8816,8945],"valid",[],"NV8"],[[8946,8959],"valid",[],"NV8"],[[8960,8960],"valid",[],"NV8"],[[8961,8961],"valid",[],"NV8"],[[8962,9000],"valid",[],"NV8"],[[9001,9001],"mapped",[12296]],[[9002,9002],"mapped",[12297]],[[9003,9082],"valid",[],"NV8"],[[9083,9083],"valid",[],"NV8"],[[9084,9084],"valid",[],"NV8"],[[9085,9114],"valid",[],"NV8"],[[9115,9166],"valid",[],"NV8"],[[9167,9168],"valid",[],"NV8"],[[9169,9179],"valid",[],"NV8"],[[9180,9191],"valid",[],"NV8"],[[9192,9192],"valid",[],"NV8"],[[9193,9203],"valid",[],"NV8"],[[9204,9210],"valid",[],"NV8"],[[9211,9215],"disallowed"],[[9216,9252],"valid",[],"NV8"],[[9253,9254],"valid",[],"NV8"],[[9255,9279],"disallowed"],[[9280,9290],"valid",[],"NV8"],[[9291,9311],"disallowed"],[[9312,9312],"mapped",[49]],[[9313,9313],"mapped",[50]],[[9314,9314],"mapped",[51]],[[9315,9315],"mapped",[52]],[[9316,9316],"mapped",[53]],[[9317,9317],"mapped",[54]],[[9318,9318],"mapped",[55]],[[9319,9319],"mapped",[56]],[[9320,9320],"mapped",[57]],[[9321,9321],"mapped",[49,48]],[[9322,9322],"mapped",[49,49]],[[9323,9323],"mapped",[49,50]],[[9324,9324],"mapped",[49,51]],[[9325,9325],"mapped",[49,52]],[[9326,9326],"mapped",[49,53]],[[9327,9327],"mapped",[49,54]],[[9328,9328],"mapped",[49,55]],[[9329,9329],"mapped",[49,56]],[[9330,9330],"mapped",[49,57]],[[9331,9331],"mapped",[50,48]],[[9332,9332],"disallowed_STD3_mapped",[40,49,41]],[[9333,9333],"disallowed_STD3_mapped",[40,50,41]],[[9334,9334],"disallowed_STD3_mapped",[40,51,41]],[[9335,9335],"disallowed_STD3_mapped",[40,52,41]],[[9336,9336],"disallowed_STD3_mapped",[40,53,41]],[[9337,9337],"disallowed_STD3_mapped",[40,54,41]],[[9338,9338],"disallowed_STD3_mapped",[40,55,41]],[[9339,9339],"disallowed_STD3_mapped",[40,56,41]],[[9340,9340],"disallowed_STD3_mapped",[40,57,41]],[[9341,9341],"disallowed_STD3_mapped",[40,49,48,41]],[[9342,9342],"disallowed_STD3_mapped",[40,49,49,41]],[[9343,9343],"disallowed_STD3_mapped",[40,49,50,41]],[[9344,9344],"disallowed_STD3_mapped",[40,49,51,41]],[[9345,9345],"disallowed_STD3_mapped",[40,49,52,41]],[[9346,9346],"disallowed_STD3_mapped",[40,49,53,41]],[[9347,9347],"disallowed_STD3_mapped",[40,49,54,41]],[[9348,9348],"disallowed_STD3_mapped",[40,49,55,41]],[[9349,9349],"disallowed_STD3_mapped",[40,49,56,41]],[[9350,9350],"disallowed_STD3_mapped",[40,49,57,41]],[[9351,9351],"disallowed_STD3_mapped",[40,50,48,41]],[[9352,9371],"disallowed"],[[9372,9372],"disallowed_STD3_mapped",[40,97,41]],[[9373,9373],"disallowed_STD3_mapped",[40,98,41]],[[9374,9374],"disallowed_STD3_mapped",[40,99,41]],[[9375,9375],"disallowed_STD3_mapped",[40,100,41]],[[9376,9376],"disallowed_STD3_mapped",[40,101,41]],[[9377,9377],"disallowed_STD3_mapped",[40,102,41]],[[9378,9378],"disallowed_STD3_mapped",[40,103,41]],[[9379,9379],"disallowed_STD3_mapped",[40,104,41]],[[9380,9380],"disallowed_STD3_mapped",[40,105,41]],[[9381,9381],"disallowed_STD3_mapped",[40,106,41]],[[9382,9382],"disallowed_STD3_mapped",[40,107,41]],[[9383,9383],"disallowed_STD3_mapped",[40,108,41]],[[9384,9384],"disallowed_STD3_mapped",[40,109,41]],[[9385,9385],"disallowed_STD3_mapped",[40,110,41]],[[9386,9386],"disallowed_STD3_mapped",[40,111,41]],[[9387,9387],"disallowed_STD3_mapped",[40,112,41]],[[9388,9388],"disallowed_STD3_mapped",[40,113,41]],[[9389,9389],"disallowed_STD3_mapped",[40,114,41]],[[9390,9390],"disallowed_STD3_mapped",[40,115,41]],[[9391,9391],"disallowed_STD3_mapped",[40,116,41]],[[9392,9392],"disallowed_STD3_mapped",[40,117,41]],[[9393,9393],"disallowed_STD3_mapped",[40,118,41]],[[9394,9394],"disallowed_STD3_mapped",[40,119,41]],[[9395,9395],"disallowed_STD3_mapped",[40,120,41]],[[9396,9396],"disallowed_STD3_mapped",[40,121,41]],[[9397,9397],"disallowed_STD3_mapped",[40,122,41]],[[9398,9398],"mapped",[97]],[[9399,9399],"mapped",[98]],[[9400,9400],"mapped",[99]],[[9401,9401],"mapped",[100]],[[9402,9402],"mapped",[101]],[[9403,9403],"mapped",[102]],[[9404,9404],"mapped",[103]],[[9405,9405],"mapped",[104]],[[9406,9406],"mapped",[105]],[[9407,9407],"mapped",[106]],[[9408,9408],"mapped",[107]],[[9409,9409],"mapped",[108]],[[9410,9410],"mapped",[109]],[[9411,9411],"mapped",[110]],[[9412,9412],"mapped",[111]],[[9413,9413],"mapped",[112]],[[9414,9414],"mapped",[113]],[[9415,9415],"mapped",[114]],[[9416,9416],"mapped",[115]],[[9417,9417],"mapped",[116]],[[9418,9418],"mapped",[117]],[[9419,9419],"mapped",[118]],[[9420,9420],"mapped",[119]],[[9421,9421],"mapped",[120]],[[9422,9422],"mapped",[121]],[[9423,9423],"mapped",[122]],[[9424,9424],"mapped",[97]],[[9425,9425],"mapped",[98]],[[9426,9426],"mapped",[99]],[[9427,9427],"mapped",[100]],[[9428,9428],"mapped",[101]],[[9429,9429],"mapped",[102]],[[9430,9430],"mapped",[103]],[[9431,9431],"mapped",[104]],[[9432,9432],"mapped",[105]],[[9433,9433],"mapped",[106]],[[9434,9434],"mapped",[107]],[[9435,9435],"mapped",[108]],[[9436,9436],"mapped",[109]],[[9437,9437],"mapped",[110]],[[9438,9438],"mapped",[111]],[[9439,9439],"mapped",[112]],[[9440,9440],"mapped",[113]],[[9441,9441],"mapped",[114]],[[9442,9442],"mapped",[115]],[[9443,9443],"mapped",[116]],[[9444,9444],"mapped",[117]],[[9445,9445],"mapped",[118]],[[9446,9446],"mapped",[119]],[[9447,9447],"mapped",[120]],[[9448,9448],"mapped",[121]],[[9449,9449],"mapped",[122]],[[9450,9450],"mapped",[48]],[[9451,9470],"valid",[],"NV8"],[[9471,9471],"valid",[],"NV8"],[[9472,9621],"valid",[],"NV8"],[[9622,9631],"valid",[],"NV8"],[[9632,9711],"valid",[],"NV8"],[[9712,9719],"valid",[],"NV8"],[[9720,9727],"valid",[],"NV8"],[[9728,9747],"valid",[],"NV8"],[[9748,9749],"valid",[],"NV8"],[[9750,9751],"valid",[],"NV8"],[[9752,9752],"valid",[],"NV8"],[[9753,9753],"valid",[],"NV8"],[[9754,9839],"valid",[],"NV8"],[[9840,9841],"valid",[],"NV8"],[[9842,9853],"valid",[],"NV8"],[[9854,9855],"valid",[],"NV8"],[[9856,9865],"valid",[],"NV8"],[[9866,9873],"valid",[],"NV8"],[[9874,9884],"valid",[],"NV8"],[[9885,9885],"valid",[],"NV8"],[[9886,9887],"valid",[],"NV8"],[[9888,9889],"valid",[],"NV8"],[[9890,9905],"valid",[],"NV8"],[[9906,9906],"valid",[],"NV8"],[[9907,9916],"valid",[],"NV8"],[[9917,9919],"valid",[],"NV8"],[[9920,9923],"valid",[],"NV8"],[[9924,9933],"valid",[],"NV8"],[[9934,9934],"valid",[],"NV8"],[[9935,9953],"valid",[],"NV8"],[[9954,9954],"valid",[],"NV8"],[[9955,9955],"valid",[],"NV8"],[[9956,9959],"valid",[],"NV8"],[[9960,9983],"valid",[],"NV8"],[[9984,9984],"valid",[],"NV8"],[[9985,9988],"valid",[],"NV8"],[[9989,9989],"valid",[],"NV8"],[[9990,9993],"valid",[],"NV8"],[[9994,9995],"valid",[],"NV8"],[[9996,10023],"valid",[],"NV8"],[[10024,10024],"valid",[],"NV8"],[[10025,10059],"valid",[],"NV8"],[[10060,10060],"valid",[],"NV8"],[[10061,10061],"valid",[],"NV8"],[[10062,10062],"valid",[],"NV8"],[[10063,10066],"valid",[],"NV8"],[[10067,10069],"valid",[],"NV8"],[[10070,10070],"valid",[],"NV8"],[[10071,10071],"valid",[],"NV8"],[[10072,10078],"valid",[],"NV8"],[[10079,10080],"valid",[],"NV8"],[[10081,10087],"valid",[],"NV8"],[[10088,10101],"valid",[],"NV8"],[[10102,10132],"valid",[],"NV8"],[[10133,10135],"valid",[],"NV8"],[[10136,10159],"valid",[],"NV8"],[[10160,10160],"valid",[],"NV8"],[[10161,10174],"valid",[],"NV8"],[[10175,10175],"valid",[],"NV8"],[[10176,10182],"valid",[],"NV8"],[[10183,10186],"valid",[],"NV8"],[[10187,10187],"valid",[],"NV8"],[[10188,10188],"valid",[],"NV8"],[[10189,10189],"valid",[],"NV8"],[[10190,10191],"valid",[],"NV8"],[[10192,10219],"valid",[],"NV8"],[[10220,10223],"valid",[],"NV8"],[[10224,10239],"valid",[],"NV8"],[[10240,10495],"valid",[],"NV8"],[[10496,10763],"valid",[],"NV8"],[[10764,10764],"mapped",[8747,8747,8747,8747]],[[10765,10867],"valid",[],"NV8"],[[10868,10868],"disallowed_STD3_mapped",[58,58,61]],[[10869,10869],"disallowed_STD3_mapped",[61,61]],[[10870,10870],"disallowed_STD3_mapped",[61,61,61]],[[10871,10971],"valid",[],"NV8"],[[10972,10972],"mapped",[10973,824]],[[10973,11007],"valid",[],"NV8"],[[11008,11021],"valid",[],"NV8"],[[11022,11027],"valid",[],"NV8"],[[11028,11034],"valid",[],"NV8"],[[11035,11039],"valid",[],"NV8"],[[11040,11043],"valid",[],"NV8"],[[11044,11084],"valid",[],"NV8"],[[11085,11087],"valid",[],"NV8"],[[11088,11092],"valid",[],"NV8"],[[11093,11097],"valid",[],"NV8"],[[11098,11123],"valid",[],"NV8"],[[11124,11125],"disallowed"],[[11126,11157],"valid",[],"NV8"],[[11158,11159],"disallowed"],[[11160,11193],"valid",[],"NV8"],[[11194,11196],"disallowed"],[[11197,11208],"valid",[],"NV8"],[[11209,11209],"disallowed"],[[11210,11217],"valid",[],"NV8"],[[11218,11243],"disallowed"],[[11244,11247],"valid",[],"NV8"],[[11248,11263],"disallowed"],[[11264,11264],"mapped",[11312]],[[11265,11265],"mapped",[11313]],[[11266,11266],"mapped",[11314]],[[11267,11267],"mapped",[11315]],[[11268,11268],"mapped",[11316]],[[11269,11269],"mapped",[11317]],[[11270,11270],"mapped",[11318]],[[11271,11271],"mapped",[11319]],[[11272,11272],"mapped",[11320]],[[11273,11273],"mapped",[11321]],[[11274,11274],"mapped",[11322]],[[11275,11275],"mapped",[11323]],[[11276,11276],"mapped",[11324]],[[11277,11277],"mapped",[11325]],[[11278,11278],"mapped",[11326]],[[11279,11279],"mapped",[11327]],[[11280,11280],"mapped",[11328]],[[11281,11281],"mapped",[11329]],[[11282,11282],"mapped",[11330]],[[11283,11283],"mapped",[11331]],[[11284,11284],"mapped",[11332]],[[11285,11285],"mapped",[11333]],[[11286,11286],"mapped",[11334]],[[11287,11287],"mapped",[11335]],[[11288,11288],"mapped",[11336]],[[11289,11289],"mapped",[11337]],[[11290,11290],"mapped",[11338]],[[11291,11291],"mapped",[11339]],[[11292,11292],"mapped",[11340]],[[11293,11293],"mapped",[11341]],[[11294,11294],"mapped",[11342]],[[11295,11295],"mapped",[11343]],[[11296,11296],"mapped",[11344]],[[11297,11297],"mapped",[11345]],[[11298,11298],"mapped",[11346]],[[11299,11299],"mapped",[11347]],[[11300,11300],"mapped",[11348]],[[11301,11301],"mapped",[11349]],[[11302,11302],"mapped",[11350]],[[11303,11303],"mapped",[11351]],[[11304,11304],"mapped",[11352]],[[11305,11305],"mapped",[11353]],[[11306,11306],"mapped",[11354]],[[11307,11307],"mapped",[11355]],[[11308,11308],"mapped",[11356]],[[11309,11309],"mapped",[11357]],[[11310,11310],"mapped",[11358]],[[11311,11311],"disallowed"],[[11312,11358],"valid"],[[11359,11359],"disallowed"],[[11360,11360],"mapped",[11361]],[[11361,11361],"valid"],[[11362,11362],"mapped",[619]],[[11363,11363],"mapped",[7549]],[[11364,11364],"mapped",[637]],[[11365,11366],"valid"],[[11367,11367],"mapped",[11368]],[[11368,11368],"valid"],[[11369,11369],"mapped",[11370]],[[11370,11370],"valid"],[[11371,11371],"mapped",[11372]],[[11372,11372],"valid"],[[11373,11373],"mapped",[593]],[[11374,11374],"mapped",[625]],[[11375,11375],"mapped",[592]],[[11376,11376],"mapped",[594]],[[11377,11377],"valid"],[[11378,11378],"mapped",[11379]],[[11379,11379],"valid"],[[11380,11380],"valid"],[[11381,11381],"mapped",[11382]],[[11382,11383],"valid"],[[11384,11387],"valid"],[[11388,11388],"mapped",[106]],[[11389,11389],"mapped",[118]],[[11390,11390],"mapped",[575]],[[11391,11391],"mapped",[576]],[[11392,11392],"mapped",[11393]],[[11393,11393],"valid"],[[11394,11394],"mapped",[11395]],[[11395,11395],"valid"],[[11396,11396],"mapped",[11397]],[[11397,11397],"valid"],[[11398,11398],"mapped",[11399]],[[11399,11399],"valid"],[[11400,11400],"mapped",[11401]],[[11401,11401],"valid"],[[11402,11402],"mapped",[11403]],[[11403,11403],"valid"],[[11404,11404],"mapped",[11405]],[[11405,11405],"valid"],[[11406,11406],"mapped",[11407]],[[11407,11407],"valid"],[[11408,11408],"mapped",[11409]],[[11409,11409],"valid"],[[11410,11410],"mapped",[11411]],[[11411,11411],"valid"],[[11412,11412],"mapped",[11413]],[[11413,11413],"valid"],[[11414,11414],"mapped",[11415]],[[11415,11415],"valid"],[[11416,11416],"mapped",[11417]],[[11417,11417],"valid"],[[11418,11418],"mapped",[11419]],[[11419,11419],"valid"],[[11420,11420],"mapped",[11421]],[[11421,11421],"valid"],[[11422,11422],"mapped",[11423]],[[11423,11423],"valid"],[[11424,11424],"mapped",[11425]],[[11425,11425],"valid"],[[11426,11426],"mapped",[11427]],[[11427,11427],"valid"],[[11428,11428],"mapped",[11429]],[[11429,11429],"valid"],[[11430,11430],"mapped",[11431]],[[11431,11431],"valid"],[[11432,11432],"mapped",[11433]],[[11433,11433],"valid"],[[11434,11434],"mapped",[11435]],[[11435,11435],"valid"],[[11436,11436],"mapped",[11437]],[[11437,11437],"valid"],[[11438,11438],"mapped",[11439]],[[11439,11439],"valid"],[[11440,11440],"mapped",[11441]],[[11441,11441],"valid"],[[11442,11442],"mapped",[11443]],[[11443,11443],"valid"],[[11444,11444],"mapped",[11445]],[[11445,11445],"valid"],[[11446,11446],"mapped",[11447]],[[11447,11447],"valid"],[[11448,11448],"mapped",[11449]],[[11449,11449],"valid"],[[11450,11450],"mapped",[11451]],[[11451,11451],"valid"],[[11452,11452],"mapped",[11453]],[[11453,11453],"valid"],[[11454,11454],"mapped",[11455]],[[11455,11455],"valid"],[[11456,11456],"mapped",[11457]],[[11457,11457],"valid"],[[11458,11458],"mapped",[11459]],[[11459,11459],"valid"],[[11460,11460],"mapped",[11461]],[[11461,11461],"valid"],[[11462,11462],"mapped",[11463]],[[11463,11463],"valid"],[[11464,11464],"mapped",[11465]],[[11465,11465],"valid"],[[11466,11466],"mapped",[11467]],[[11467,11467],"valid"],[[11468,11468],"mapped",[11469]],[[11469,11469],"valid"],[[11470,11470],"mapped",[11471]],[[11471,11471],"valid"],[[11472,11472],"mapped",[11473]],[[11473,11473],"valid"],[[11474,11474],"mapped",[11475]],[[11475,11475],"valid"],[[11476,11476],"mapped",[11477]],[[11477,11477],"valid"],[[11478,11478],"mapped",[11479]],[[11479,11479],"valid"],[[11480,11480],"mapped",[11481]],[[11481,11481],"valid"],[[11482,11482],"mapped",[11483]],[[11483,11483],"valid"],[[11484,11484],"mapped",[11485]],[[11485,11485],"valid"],[[11486,11486],"mapped",[11487]],[[11487,11487],"valid"],[[11488,11488],"mapped",[11489]],[[11489,11489],"valid"],[[11490,11490],"mapped",[11491]],[[11491,11492],"valid"],[[11493,11498],"valid",[],"NV8"],[[11499,11499],"mapped",[11500]],[[11500,11500],"valid"],[[11501,11501],"mapped",[11502]],[[11502,11505],"valid"],[[11506,11506],"mapped",[11507]],[[11507,11507],"valid"],[[11508,11512],"disallowed"],[[11513,11519],"valid",[],"NV8"],[[11520,11557],"valid"],[[11558,11558],"disallowed"],[[11559,11559],"valid"],[[11560,11564],"disallowed"],[[11565,11565],"valid"],[[11566,11567],"disallowed"],[[11568,11621],"valid"],[[11622,11623],"valid"],[[11624,11630],"disallowed"],[[11631,11631],"mapped",[11617]],[[11632,11632],"valid",[],"NV8"],[[11633,11646],"disallowed"],[[11647,11647],"valid"],[[11648,11670],"valid"],[[11671,11679],"disallowed"],[[11680,11686],"valid"],[[11687,11687],"disallowed"],[[11688,11694],"valid"],[[11695,11695],"disallowed"],[[11696,11702],"valid"],[[11703,11703],"disallowed"],[[11704,11710],"valid"],[[11711,11711],"disallowed"],[[11712,11718],"valid"],[[11719,11719],"disallowed"],[[11720,11726],"valid"],[[11727,11727],"disallowed"],[[11728,11734],"valid"],[[11735,11735],"disallowed"],[[11736,11742],"valid"],[[11743,11743],"disallowed"],[[11744,11775],"valid"],[[11776,11799],"valid",[],"NV8"],[[11800,11803],"valid",[],"NV8"],[[11804,11805],"valid",[],"NV8"],[[11806,11822],"valid",[],"NV8"],[[11823,11823],"valid"],[[11824,11824],"valid",[],"NV8"],[[11825,11825],"valid",[],"NV8"],[[11826,11835],"valid",[],"NV8"],[[11836,11842],"valid",[],"NV8"],[[11843,11903],"disallowed"],[[11904,11929],"valid",[],"NV8"],[[11930,11930],"disallowed"],[[11931,11934],"valid",[],"NV8"],[[11935,11935],"mapped",[27597]],[[11936,12018],"valid",[],"NV8"],[[12019,12019],"mapped",[40863]],[[12020,12031],"disallowed"],[[12032,12032],"mapped",[19968]],[[12033,12033],"mapped",[20008]],[[12034,12034],"mapped",[20022]],[[12035,12035],"mapped",[20031]],[[12036,12036],"mapped",[20057]],[[12037,12037],"mapped",[20101]],[[12038,12038],"mapped",[20108]],[[12039,12039],"mapped",[20128]],[[12040,12040],"mapped",[20154]],[[12041,12041],"mapped",[20799]],[[12042,12042],"mapped",[20837]],[[12043,12043],"mapped",[20843]],[[12044,12044],"mapped",[20866]],[[12045,12045],"mapped",[20886]],[[12046,12046],"mapped",[20907]],[[12047,12047],"mapped",[20960]],[[12048,12048],"mapped",[20981]],[[12049,12049],"mapped",[20992]],[[12050,12050],"mapped",[21147]],[[12051,12051],"mapped",[21241]],[[12052,12052],"mapped",[21269]],[[12053,12053],"mapped",[21274]],[[12054,12054],"mapped",[21304]],[[12055,12055],"mapped",[21313]],[[12056,12056],"mapped",[21340]],[[12057,12057],"mapped",[21353]],[[12058,12058],"mapped",[21378]],[[12059,12059],"mapped",[21430]],[[12060,12060],"mapped",[21448]],[[12061,12061],"mapped",[21475]],[[12062,12062],"mapped",[22231]],[[12063,12063],"mapped",[22303]],[[12064,12064],"mapped",[22763]],[[12065,12065],"mapped",[22786]],[[12066,12066],"mapped",[22794]],[[12067,12067],"mapped",[22805]],[[12068,12068],"mapped",[22823]],[[12069,12069],"mapped",[22899]],[[12070,12070],"mapped",[23376]],[[12071,12071],"mapped",[23424]],[[12072,12072],"mapped",[23544]],[[12073,12073],"mapped",[23567]],[[12074,12074],"mapped",[23586]],[[12075,12075],"mapped",[23608]],[[12076,12076],"mapped",[23662]],[[12077,12077],"mapped",[23665]],[[12078,12078],"mapped",[24027]],[[12079,12079],"mapped",[24037]],[[12080,12080],"mapped",[24049]],[[12081,12081],"mapped",[24062]],[[12082,12082],"mapped",[24178]],[[12083,12083],"mapped",[24186]],[[12084,12084],"mapped",[24191]],[[12085,12085],"mapped",[24308]],[[12086,12086],"mapped",[24318]],[[12087,12087],"mapped",[24331]],[[12088,12088],"mapped",[24339]],[[12089,12089],"mapped",[24400]],[[12090,12090],"mapped",[24417]],[[12091,12091],"mapped",[24435]],[[12092,12092],"mapped",[24515]],[[12093,12093],"mapped",[25096]],[[12094,12094],"mapped",[25142]],[[12095,12095],"mapped",[25163]],[[12096,12096],"mapped",[25903]],[[12097,12097],"mapped",[25908]],[[12098,12098],"mapped",[25991]],[[12099,12099],"mapped",[26007]],[[12100,12100],"mapped",[26020]],[[12101,12101],"mapped",[26041]],[[12102,12102],"mapped",[26080]],[[12103,12103],"mapped",[26085]],[[12104,12104],"mapped",[26352]],[[12105,12105],"mapped",[26376]],[[12106,12106],"mapped",[26408]],[[12107,12107],"mapped",[27424]],[[12108,12108],"mapped",[27490]],[[12109,12109],"mapped",[27513]],[[12110,12110],"mapped",[27571]],[[12111,12111],"mapped",[27595]],[[12112,12112],"mapped",[27604]],[[12113,12113],"mapped",[27611]],[[12114,12114],"mapped",[27663]],[[12115,12115],"mapped",[27668]],[[12116,12116],"mapped",[27700]],[[12117,12117],"mapped",[28779]],[[12118,12118],"mapped",[29226]],[[12119,12119],"mapped",[29238]],[[12120,12120],"mapped",[29243]],[[12121,12121],"mapped",[29247]],[[12122,12122],"mapped",[29255]],[[12123,12123],"mapped",[29273]],[[12124,12124],"mapped",[29275]],[[12125,12125],"mapped",[29356]],[[12126,12126],"mapped",[29572]],[[12127,12127],"mapped",[29577]],[[12128,12128],"mapped",[29916]],[[12129,12129],"mapped",[29926]],[[12130,12130],"mapped",[29976]],[[12131,12131],"mapped",[29983]],[[12132,12132],"mapped",[29992]],[[12133,12133],"mapped",[30000]],[[12134,12134],"mapped",[30091]],[[12135,12135],"mapped",[30098]],[[12136,12136],"mapped",[30326]],[[12137,12137],"mapped",[30333]],[[12138,12138],"mapped",[30382]],[[12139,12139],"mapped",[30399]],[[12140,12140],"mapped",[30446]],[[12141,12141],"mapped",[30683]],[[12142,12142],"mapped",[30690]],[[12143,12143],"mapped",[30707]],[[12144,12144],"mapped",[31034]],[[12145,12145],"mapped",[31160]],[[12146,12146],"mapped",[31166]],[[12147,12147],"mapped",[31348]],[[12148,12148],"mapped",[31435]],[[12149,12149],"mapped",[31481]],[[12150,12150],"mapped",[31859]],[[12151,12151],"mapped",[31992]],[[12152,12152],"mapped",[32566]],[[12153,12153],"mapped",[32593]],[[12154,12154],"mapped",[32650]],[[12155,12155],"mapped",[32701]],[[12156,12156],"mapped",[32769]],[[12157,12157],"mapped",[32780]],[[12158,12158],"mapped",[32786]],[[12159,12159],"mapped",[32819]],[[12160,12160],"mapped",[32895]],[[12161,12161],"mapped",[32905]],[[12162,12162],"mapped",[33251]],[[12163,12163],"mapped",[33258]],[[12164,12164],"mapped",[33267]],[[12165,12165],"mapped",[33276]],[[12166,12166],"mapped",[33292]],[[12167,12167],"mapped",[33307]],[[12168,12168],"mapped",[33311]],[[12169,12169],"mapped",[33390]],[[12170,12170],"mapped",[33394]],[[12171,12171],"mapped",[33400]],[[12172,12172],"mapped",[34381]],[[12173,12173],"mapped",[34411]],[[12174,12174],"mapped",[34880]],[[12175,12175],"mapped",[34892]],[[12176,12176],"mapped",[34915]],[[12177,12177],"mapped",[35198]],[[12178,12178],"mapped",[35211]],[[12179,12179],"mapped",[35282]],[[12180,12180],"mapped",[35328]],[[12181,12181],"mapped",[35895]],[[12182,12182],"mapped",[35910]],[[12183,12183],"mapped",[35925]],[[12184,12184],"mapped",[35960]],[[12185,12185],"mapped",[35997]],[[12186,12186],"mapped",[36196]],[[12187,12187],"mapped",[36208]],[[12188,12188],"mapped",[36275]],[[12189,12189],"mapped",[36523]],[[12190,12190],"mapped",[36554]],[[12191,12191],"mapped",[36763]],[[12192,12192],"mapped",[36784]],[[12193,12193],"mapped",[36789]],[[12194,12194],"mapped",[37009]],[[12195,12195],"mapped",[37193]],[[12196,12196],"mapped",[37318]],[[12197,12197],"mapped",[37324]],[[12198,12198],"mapped",[37329]],[[12199,12199],"mapped",[38263]],[[12200,12200],"mapped",[38272]],[[12201,12201],"mapped",[38428]],[[12202,12202],"mapped",[38582]],[[12203,12203],"mapped",[38585]],[[12204,12204],"mapped",[38632]],[[12205,12205],"mapped",[38737]],[[12206,12206],"mapped",[38750]],[[12207,12207],"mapped",[38754]],[[12208,12208],"mapped",[38761]],[[12209,12209],"mapped",[38859]],[[12210,12210],"mapped",[38893]],[[12211,12211],"mapped",[38899]],[[12212,12212],"mapped",[38913]],[[12213,12213],"mapped",[39080]],[[12214,12214],"mapped",[39131]],[[12215,12215],"mapped",[39135]],[[12216,12216],"mapped",[39318]],[[12217,12217],"mapped",[39321]],[[12218,12218],"mapped",[39340]],[[12219,12219],"mapped",[39592]],[[12220,12220],"mapped",[39640]],[[12221,12221],"mapped",[39647]],[[12222,12222],"mapped",[39717]],[[12223,12223],"mapped",[39727]],[[12224,12224],"mapped",[39730]],[[12225,12225],"mapped",[39740]],[[12226,12226],"mapped",[39770]],[[12227,12227],"mapped",[40165]],[[12228,12228],"mapped",[40565]],[[12229,12229],"mapped",[40575]],[[12230,12230],"mapped",[40613]],[[12231,12231],"mapped",[40635]],[[12232,12232],"mapped",[40643]],[[12233,12233],"mapped",[40653]],[[12234,12234],"mapped",[40657]],[[12235,12235],"mapped",[40697]],[[12236,12236],"mapped",[40701]],[[12237,12237],"mapped",[40718]],[[12238,12238],"mapped",[40723]],[[12239,12239],"mapped",[40736]],[[12240,12240],"mapped",[40763]],[[12241,12241],"mapped",[40778]],[[12242,12242],"mapped",[40786]],[[12243,12243],"mapped",[40845]],[[12244,12244],"mapped",[40860]],[[12245,12245],"mapped",[40864]],[[12246,12271],"disallowed"],[[12272,12283],"disallowed"],[[12284,12287],"disallowed"],[[12288,12288],"disallowed_STD3_mapped",[32]],[[12289,12289],"valid",[],"NV8"],[[12290,12290],"mapped",[46]],[[12291,12292],"valid",[],"NV8"],[[12293,12295],"valid"],[[12296,12329],"valid",[],"NV8"],[[12330,12333],"valid"],[[12334,12341],"valid",[],"NV8"],[[12342,12342],"mapped",[12306]],[[12343,12343],"valid",[],"NV8"],[[12344,12344],"mapped",[21313]],[[12345,12345],"mapped",[21316]],[[12346,12346],"mapped",[21317]],[[12347,12347],"valid",[],"NV8"],[[12348,12348],"valid"],[[12349,12349],"valid",[],"NV8"],[[12350,12350],"valid",[],"NV8"],[[12351,12351],"valid",[],"NV8"],[[12352,12352],"disallowed"],[[12353,12436],"valid"],[[12437,12438],"valid"],[[12439,12440],"disallowed"],[[12441,12442],"valid"],[[12443,12443],"disallowed_STD3_mapped",[32,12441]],[[12444,12444],"disallowed_STD3_mapped",[32,12442]],[[12445,12446],"valid"],[[12447,12447],"mapped",[12424,12426]],[[12448,12448],"valid",[],"NV8"],[[12449,12542],"valid"],[[12543,12543],"mapped",[12467,12488]],[[12544,12548],"disallowed"],[[12549,12588],"valid"],[[12589,12589],"valid"],[[12590,12592],"disallowed"],[[12593,12593],"mapped",[4352]],[[12594,12594],"mapped",[4353]],[[12595,12595],"mapped",[4522]],[[12596,12596],"mapped",[4354]],[[12597,12597],"mapped",[4524]],[[12598,12598],"mapped",[4525]],[[12599,12599],"mapped",[4355]],[[12600,12600],"mapped",[4356]],[[12601,12601],"mapped",[4357]],[[12602,12602],"mapped",[4528]],[[12603,12603],"mapped",[4529]],[[12604,12604],"mapped",[4530]],[[12605,12605],"mapped",[4531]],[[12606,12606],"mapped",[4532]],[[12607,12607],"mapped",[4533]],[[12608,12608],"mapped",[4378]],[[12609,12609],"mapped",[4358]],[[12610,12610],"mapped",[4359]],[[12611,12611],"mapped",[4360]],[[12612,12612],"mapped",[4385]],[[12613,12613],"mapped",[4361]],[[12614,12614],"mapped",[4362]],[[12615,12615],"mapped",[4363]],[[12616,12616],"mapped",[4364]],[[12617,12617],"mapped",[4365]],[[12618,12618],"mapped",[4366]],[[12619,12619],"mapped",[4367]],[[12620,12620],"mapped",[4368]],[[12621,12621],"mapped",[4369]],[[12622,12622],"mapped",[4370]],[[12623,12623],"mapped",[4449]],[[12624,12624],"mapped",[4450]],[[12625,12625],"mapped",[4451]],[[12626,12626],"mapped",[4452]],[[12627,12627],"mapped",[4453]],[[12628,12628],"mapped",[4454]],[[12629,12629],"mapped",[4455]],[[12630,12630],"mapped",[4456]],[[12631,12631],"mapped",[4457]],[[12632,12632],"mapped",[4458]],[[12633,12633],"mapped",[4459]],[[12634,12634],"mapped",[4460]],[[12635,12635],"mapped",[4461]],[[12636,12636],"mapped",[4462]],[[12637,12637],"mapped",[4463]],[[12638,12638],"mapped",[4464]],[[12639,12639],"mapped",[4465]],[[12640,12640],"mapped",[4466]],[[12641,12641],"mapped",[4467]],[[12642,12642],"mapped",[4468]],[[12643,12643],"mapped",[4469]],[[12644,12644],"disallowed"],[[12645,12645],"mapped",[4372]],[[12646,12646],"mapped",[4373]],[[12647,12647],"mapped",[4551]],[[12648,12648],"mapped",[4552]],[[12649,12649],"mapped",[4556]],[[12650,12650],"mapped",[4558]],[[12651,12651],"mapped",[4563]],[[12652,12652],"mapped",[4567]],[[12653,12653],"mapped",[4569]],[[12654,12654],"mapped",[4380]],[[12655,12655],"mapped",[4573]],[[12656,12656],"mapped",[4575]],[[12657,12657],"mapped",[4381]],[[12658,12658],"mapped",[4382]],[[12659,12659],"mapped",[4384]],[[12660,12660],"mapped",[4386]],[[12661,12661],"mapped",[4387]],[[12662,12662],"mapped",[4391]],[[12663,12663],"mapped",[4393]],[[12664,12664],"mapped",[4395]],[[12665,12665],"mapped",[4396]],[[12666,12666],"mapped",[4397]],[[12667,12667],"mapped",[4398]],[[12668,12668],"mapped",[4399]],[[12669,12669],"mapped",[4402]],[[12670,12670],"mapped",[4406]],[[12671,12671],"mapped",[4416]],[[12672,12672],"mapped",[4423]],[[12673,12673],"mapped",[4428]],[[12674,12674],"mapped",[4593]],[[12675,12675],"mapped",[4594]],[[12676,12676],"mapped",[4439]],[[12677,12677],"mapped",[4440]],[[12678,12678],"mapped",[4441]],[[12679,12679],"mapped",[4484]],[[12680,12680],"mapped",[4485]],[[12681,12681],"mapped",[4488]],[[12682,12682],"mapped",[4497]],[[12683,12683],"mapped",[4498]],[[12684,12684],"mapped",[4500]],[[12685,12685],"mapped",[4510]],[[12686,12686],"mapped",[4513]],[[12687,12687],"disallowed"],[[12688,12689],"valid",[],"NV8"],[[12690,12690],"mapped",[19968]],[[12691,12691],"mapped",[20108]],[[12692,12692],"mapped",[19977]],[[12693,12693],"mapped",[22235]],[[12694,12694],"mapped",[19978]],[[12695,12695],"mapped",[20013]],[[12696,12696],"mapped",[19979]],[[12697,12697],"mapped",[30002]],[[12698,12698],"mapped",[20057]],[[12699,12699],"mapped",[19993]],[[12700,12700],"mapped",[19969]],[[12701,12701],"mapped",[22825]],[[12702,12702],"mapped",[22320]],[[12703,12703],"mapped",[20154]],[[12704,12727],"valid"],[[12728,12730],"valid"],[[12731,12735],"disallowed"],[[12736,12751],"valid",[],"NV8"],[[12752,12771],"valid",[],"NV8"],[[12772,12783],"disallowed"],[[12784,12799],"valid"],[[12800,12800],"disallowed_STD3_mapped",[40,4352,41]],[[12801,12801],"disallowed_STD3_mapped",[40,4354,41]],[[12802,12802],"disallowed_STD3_mapped",[40,4355,41]],[[12803,12803],"disallowed_STD3_mapped",[40,4357,41]],[[12804,12804],"disallowed_STD3_mapped",[40,4358,41]],[[12805,12805],"disallowed_STD3_mapped",[40,4359,41]],[[12806,12806],"disallowed_STD3_mapped",[40,4361,41]],[[12807,12807],"disallowed_STD3_mapped",[40,4363,41]],[[12808,12808],"disallowed_STD3_mapped",[40,4364,41]],[[12809,12809],"disallowed_STD3_mapped",[40,4366,41]],[[12810,12810],"disallowed_STD3_mapped",[40,4367,41]],[[12811,12811],"disallowed_STD3_mapped",[40,4368,41]],[[12812,12812],"disallowed_STD3_mapped",[40,4369,41]],[[12813,12813],"disallowed_STD3_mapped",[40,4370,41]],[[12814,12814],"disallowed_STD3_mapped",[40,44032,41]],[[12815,12815],"disallowed_STD3_mapped",[40,45208,41]],[[12816,12816],"disallowed_STD3_mapped",[40,45796,41]],[[12817,12817],"disallowed_STD3_mapped",[40,46972,41]],[[12818,12818],"disallowed_STD3_mapped",[40,47560,41]],[[12819,12819],"disallowed_STD3_mapped",[40,48148,41]],[[12820,12820],"disallowed_STD3_mapped",[40,49324,41]],[[12821,12821],"disallowed_STD3_mapped",[40,50500,41]],[[12822,12822],"disallowed_STD3_mapped",[40,51088,41]],[[12823,12823],"disallowed_STD3_mapped",[40,52264,41]],[[12824,12824],"disallowed_STD3_mapped",[40,52852,41]],[[12825,12825],"disallowed_STD3_mapped",[40,53440,41]],[[12826,12826],"disallowed_STD3_mapped",[40,54028,41]],[[12827,12827],"disallowed_STD3_mapped",[40,54616,41]],[[12828,12828],"disallowed_STD3_mapped",[40,51452,41]],[[12829,12829],"disallowed_STD3_mapped",[40,50724,51204,41]],[[12830,12830],"disallowed_STD3_mapped",[40,50724,54980,41]],[[12831,12831],"disallowed"],[[12832,12832],"disallowed_STD3_mapped",[40,19968,41]],[[12833,12833],"disallowed_STD3_mapped",[40,20108,41]],[[12834,12834],"disallowed_STD3_mapped",[40,19977,41]],[[12835,12835],"disallowed_STD3_mapped",[40,22235,41]],[[12836,12836],"disallowed_STD3_mapped",[40,20116,41]],[[12837,12837],"disallowed_STD3_mapped",[40,20845,41]],[[12838,12838],"disallowed_STD3_mapped",[40,19971,41]],[[12839,12839],"disallowed_STD3_mapped",[40,20843,41]],[[12840,12840],"disallowed_STD3_mapped",[40,20061,41]],[[12841,12841],"disallowed_STD3_mapped",[40,21313,41]],[[12842,12842],"disallowed_STD3_mapped",[40,26376,41]],[[12843,12843],"disallowed_STD3_mapped",[40,28779,41]],[[12844,12844],"disallowed_STD3_mapped",[40,27700,41]],[[12845,12845],"disallowed_STD3_mapped",[40,26408,41]],[[12846,12846],"disallowed_STD3_mapped",[40,37329,41]],[[12847,12847],"disallowed_STD3_mapped",[40,22303,41]],[[12848,12848],"disallowed_STD3_mapped",[40,26085,41]],[[12849,12849],"disallowed_STD3_mapped",[40,26666,41]],[[12850,12850],"disallowed_STD3_mapped",[40,26377,41]],[[12851,12851],"disallowed_STD3_mapped",[40,31038,41]],[[12852,12852],"disallowed_STD3_mapped",[40,21517,41]],[[12853,12853],"disallowed_STD3_mapped",[40,29305,41]],[[12854,12854],"disallowed_STD3_mapped",[40,36001,41]],[[12855,12855],"disallowed_STD3_mapped",[40,31069,41]],[[12856,12856],"disallowed_STD3_mapped",[40,21172,41]],[[12857,12857],"disallowed_STD3_mapped",[40,20195,41]],[[12858,12858],"disallowed_STD3_mapped",[40,21628,41]],[[12859,12859],"disallowed_STD3_mapped",[40,23398,41]],[[12860,12860],"disallowed_STD3_mapped",[40,30435,41]],[[12861,12861],"disallowed_STD3_mapped",[40,20225,41]],[[12862,12862],"disallowed_STD3_mapped",[40,36039,41]],[[12863,12863],"disallowed_STD3_mapped",[40,21332,41]],[[12864,12864],"disallowed_STD3_mapped",[40,31085,41]],[[12865,12865],"disallowed_STD3_mapped",[40,20241,41]],[[12866,12866],"disallowed_STD3_mapped",[40,33258,41]],[[12867,12867],"disallowed_STD3_mapped",[40,33267,41]],[[12868,12868],"mapped",[21839]],[[12869,12869],"mapped",[24188]],[[12870,12870],"mapped",[25991]],[[12871,12871],"mapped",[31631]],[[12872,12879],"valid",[],"NV8"],[[12880,12880],"mapped",[112,116,101]],[[12881,12881],"mapped",[50,49]],[[12882,12882],"mapped",[50,50]],[[12883,12883],"mapped",[50,51]],[[12884,12884],"mapped",[50,52]],[[12885,12885],"mapped",[50,53]],[[12886,12886],"mapped",[50,54]],[[12887,12887],"mapped",[50,55]],[[12888,12888],"mapped",[50,56]],[[12889,12889],"mapped",[50,57]],[[12890,12890],"mapped",[51,48]],[[12891,12891],"mapped",[51,49]],[[12892,12892],"mapped",[51,50]],[[12893,12893],"mapped",[51,51]],[[12894,12894],"mapped",[51,52]],[[12895,12895],"mapped",[51,53]],[[12896,12896],"mapped",[4352]],[[12897,12897],"mapped",[4354]],[[12898,12898],"mapped",[4355]],[[12899,12899],"mapped",[4357]],[[12900,12900],"mapped",[4358]],[[12901,12901],"mapped",[4359]],[[12902,12902],"mapped",[4361]],[[12903,12903],"mapped",[4363]],[[12904,12904],"mapped",[4364]],[[12905,12905],"mapped",[4366]],[[12906,12906],"mapped",[4367]],[[12907,12907],"mapped",[4368]],[[12908,12908],"mapped",[4369]],[[12909,12909],"mapped",[4370]],[[12910,12910],"mapped",[44032]],[[12911,12911],"mapped",[45208]],[[12912,12912],"mapped",[45796]],[[12913,12913],"mapped",[46972]],[[12914,12914],"mapped",[47560]],[[12915,12915],"mapped",[48148]],[[12916,12916],"mapped",[49324]],[[12917,12917],"mapped",[50500]],[[12918,12918],"mapped",[51088]],[[12919,12919],"mapped",[52264]],[[12920,12920],"mapped",[52852]],[[12921,12921],"mapped",[53440]],[[12922,12922],"mapped",[54028]],[[12923,12923],"mapped",[54616]],[[12924,12924],"mapped",[52280,44256]],[[12925,12925],"mapped",[51452,51032]],[[12926,12926],"mapped",[50864]],[[12927,12927],"valid",[],"NV8"],[[12928,12928],"mapped",[19968]],[[12929,12929],"mapped",[20108]],[[12930,12930],"mapped",[19977]],[[12931,12931],"mapped",[22235]],[[12932,12932],"mapped",[20116]],[[12933,12933],"mapped",[20845]],[[12934,12934],"mapped",[19971]],[[12935,12935],"mapped",[20843]],[[12936,12936],"mapped",[20061]],[[12937,12937],"mapped",[21313]],[[12938,12938],"mapped",[26376]],[[12939,12939],"mapped",[28779]],[[12940,12940],"mapped",[27700]],[[12941,12941],"mapped",[26408]],[[12942,12942],"mapped",[37329]],[[12943,12943],"mapped",[22303]],[[12944,12944],"mapped",[26085]],[[12945,12945],"mapped",[26666]],[[12946,12946],"mapped",[26377]],[[12947,12947],"mapped",[31038]],[[12948,12948],"mapped",[21517]],[[12949,12949],"mapped",[29305]],[[12950,12950],"mapped",[36001]],[[12951,12951],"mapped",[31069]],[[12952,12952],"mapped",[21172]],[[12953,12953],"mapped",[31192]],[[12954,12954],"mapped",[30007]],[[12955,12955],"mapped",[22899]],[[12956,12956],"mapped",[36969]],[[12957,12957],"mapped",[20778]],[[12958,12958],"mapped",[21360]],[[12959,12959],"mapped",[27880]],[[12960,12960],"mapped",[38917]],[[12961,12961],"mapped",[20241]],[[12962,12962],"mapped",[20889]],[[12963,12963],"mapped",[27491]],[[12964,12964],"mapped",[19978]],[[12965,12965],"mapped",[20013]],[[12966,12966],"mapped",[19979]],[[12967,12967],"mapped",[24038]],[[12968,12968],"mapped",[21491]],[[12969,12969],"mapped",[21307]],[[12970,12970],"mapped",[23447]],[[12971,12971],"mapped",[23398]],[[12972,12972],"mapped",[30435]],[[12973,12973],"mapped",[20225]],[[12974,12974],"mapped",[36039]],[[12975,12975],"mapped",[21332]],[[12976,12976],"mapped",[22812]],[[12977,12977],"mapped",[51,54]],[[12978,12978],"mapped",[51,55]],[[12979,12979],"mapped",[51,56]],[[12980,12980],"mapped",[51,57]],[[12981,12981],"mapped",[52,48]],[[12982,12982],"mapped",[52,49]],[[12983,12983],"mapped",[52,50]],[[12984,12984],"mapped",[52,51]],[[12985,12985],"mapped",[52,52]],[[12986,12986],"mapped",[52,53]],[[12987,12987],"mapped",[52,54]],[[12988,12988],"mapped",[52,55]],[[12989,12989],"mapped",[52,56]],[[12990,12990],"mapped",[52,57]],[[12991,12991],"mapped",[53,48]],[[12992,12992],"mapped",[49,26376]],[[12993,12993],"mapped",[50,26376]],[[12994,12994],"mapped",[51,26376]],[[12995,12995],"mapped",[52,26376]],[[12996,12996],"mapped",[53,26376]],[[12997,12997],"mapped",[54,26376]],[[12998,12998],"mapped",[55,26376]],[[12999,12999],"mapped",[56,26376]],[[13000,13000],"mapped",[57,26376]],[[13001,13001],"mapped",[49,48,26376]],[[13002,13002],"mapped",[49,49,26376]],[[13003,13003],"mapped",[49,50,26376]],[[13004,13004],"mapped",[104,103]],[[13005,13005],"mapped",[101,114,103]],[[13006,13006],"mapped",[101,118]],[[13007,13007],"mapped",[108,116,100]],[[13008,13008],"mapped",[12450]],[[13009,13009],"mapped",[12452]],[[13010,13010],"mapped",[12454]],[[13011,13011],"mapped",[12456]],[[13012,13012],"mapped",[12458]],[[13013,13013],"mapped",[12459]],[[13014,13014],"mapped",[12461]],[[13015,13015],"mapped",[12463]],[[13016,13016],"mapped",[12465]],[[13017,13017],"mapped",[12467]],[[13018,13018],"mapped",[12469]],[[13019,13019],"mapped",[12471]],[[13020,13020],"mapped",[12473]],[[13021,13021],"mapped",[12475]],[[13022,13022],"mapped",[12477]],[[13023,13023],"mapped",[12479]],[[13024,13024],"mapped",[12481]],[[13025,13025],"mapped",[12484]],[[13026,13026],"mapped",[12486]],[[13027,13027],"mapped",[12488]],[[13028,13028],"mapped",[12490]],[[13029,13029],"mapped",[12491]],[[13030,13030],"mapped",[12492]],[[13031,13031],"mapped",[12493]],[[13032,13032],"mapped",[12494]],[[13033,13033],"mapped",[12495]],[[13034,13034],"mapped",[12498]],[[13035,13035],"mapped",[12501]],[[13036,13036],"mapped",[12504]],[[13037,13037],"mapped",[12507]],[[13038,13038],"mapped",[12510]],[[13039,13039],"mapped",[12511]],[[13040,13040],"mapped",[12512]],[[13041,13041],"mapped",[12513]],[[13042,13042],"mapped",[12514]],[[13043,13043],"mapped",[12516]],[[13044,13044],"mapped",[12518]],[[13045,13045],"mapped",[12520]],[[13046,13046],"mapped",[12521]],[[13047,13047],"mapped",[12522]],[[13048,13048],"mapped",[12523]],[[13049,13049],"mapped",[12524]],[[13050,13050],"mapped",[12525]],[[13051,13051],"mapped",[12527]],[[13052,13052],"mapped",[12528]],[[13053,13053],"mapped",[12529]],[[13054,13054],"mapped",[12530]],[[13055,13055],"disallowed"],[[13056,13056],"mapped",[12450,12497,12540,12488]],[[13057,13057],"mapped",[12450,12523,12501,12449]],[[13058,13058],"mapped",[12450,12531,12506,12450]],[[13059,13059],"mapped",[12450,12540,12523]],[[13060,13060],"mapped",[12452,12491,12531,12464]],[[13061,13061],"mapped",[12452,12531,12481]],[[13062,13062],"mapped",[12454,12457,12531]],[[13063,13063],"mapped",[12456,12473,12463,12540,12489]],[[13064,13064],"mapped",[12456,12540,12459,12540]],[[13065,13065],"mapped",[12458,12531,12473]],[[13066,13066],"mapped",[12458,12540,12512]],[[13067,13067],"mapped",[12459,12452,12522]],[[13068,13068],"mapped",[12459,12521,12483,12488]],[[13069,13069],"mapped",[12459,12525,12522,12540]],[[13070,13070],"mapped",[12460,12525,12531]],[[13071,13071],"mapped",[12460,12531,12510]],[[13072,13072],"mapped",[12462,12460]],[[13073,13073],"mapped",[12462,12491,12540]],[[13074,13074],"mapped",[12461,12517,12522,12540]],[[13075,13075],"mapped",[12462,12523,12480,12540]],[[13076,13076],"mapped",[12461,12525]],[[13077,13077],"mapped",[12461,12525,12464,12521,12512]],[[13078,13078],"mapped",[12461,12525,12513,12540,12488,12523]],[[13079,13079],"mapped",[12461,12525,12527,12483,12488]],[[13080,13080],"mapped",[12464,12521,12512]],[[13081,13081],"mapped",[12464,12521,12512,12488,12531]],[[13082,13082],"mapped",[12463,12523,12476,12452,12525]],[[13083,13083],"mapped",[12463,12525,12540,12493]],[[13084,13084],"mapped",[12465,12540,12473]],[[13085,13085],"mapped",[12467,12523,12490]],[[13086,13086],"mapped",[12467,12540,12509]],[[13087,13087],"mapped",[12469,12452,12463,12523]],[[13088,13088],"mapped",[12469,12531,12481,12540,12512]],[[13089,13089],"mapped",[12471,12522,12531,12464]],[[13090,13090],"mapped",[12475,12531,12481]],[[13091,13091],"mapped",[12475,12531,12488]],[[13092,13092],"mapped",[12480,12540,12473]],[[13093,13093],"mapped",[12487,12471]],[[13094,13094],"mapped",[12489,12523]],[[13095,13095],"mapped",[12488,12531]],[[13096,13096],"mapped",[12490,12494]],[[13097,13097],"mapped",[12494,12483,12488]],[[13098,13098],"mapped",[12495,12452,12484]],[[13099,13099],"mapped",[12497,12540,12475,12531,12488]],[[13100,13100],"mapped",[12497,12540,12484]],[[13101,13101],"mapped",[12496,12540,12524,12523]],[[13102,13102],"mapped",[12500,12450,12473,12488,12523]],[[13103,13103],"mapped",[12500,12463,12523]],[[13104,13104],"mapped",[12500,12467]],[[13105,13105],"mapped",[12499,12523]],[[13106,13106],"mapped",[12501,12449,12521,12483,12489]],[[13107,13107],"mapped",[12501,12451,12540,12488]],[[13108,13108],"mapped",[12502,12483,12471,12455,12523]],[[13109,13109],"mapped",[12501,12521,12531]],[[13110,13110],"mapped",[12504,12463,12479,12540,12523]],[[13111,13111],"mapped",[12506,12477]],[[13112,13112],"mapped",[12506,12491,12498]],[[13113,13113],"mapped",[12504,12523,12484]],[[13114,13114],"mapped",[12506,12531,12473]],[[13115,13115],"mapped",[12506,12540,12472]],[[13116,13116],"mapped",[12505,12540,12479]],[[13117,13117],"mapped",[12509,12452,12531,12488]],[[13118,13118],"mapped",[12508,12523,12488]],[[13119,13119],"mapped",[12507,12531]],[[13120,13120],"mapped",[12509,12531,12489]],[[13121,13121],"mapped",[12507,12540,12523]],[[13122,13122],"mapped",[12507,12540,12531]],[[13123,13123],"mapped",[12510,12452,12463,12525]],[[13124,13124],"mapped",[12510,12452,12523]],[[13125,13125],"mapped",[12510,12483,12495]],[[13126,13126],"mapped",[12510,12523,12463]],[[13127,13127],"mapped",[12510,12531,12471,12519,12531]],[[13128,13128],"mapped",[12511,12463,12525,12531]],[[13129,13129],"mapped",[12511,12522]],[[13130,13130],"mapped",[12511,12522,12496,12540,12523]],[[13131,13131],"mapped",[12513,12460]],[[13132,13132],"mapped",[12513,12460,12488,12531]],[[13133,13133],"mapped",[12513,12540,12488,12523]],[[13134,13134],"mapped",[12516,12540,12489]],[[13135,13135],"mapped",[12516,12540,12523]],[[13136,13136],"mapped",[12518,12450,12531]],[[13137,13137],"mapped",[12522,12483,12488,12523]],[[13138,13138],"mapped",[12522,12521]],[[13139,13139],"mapped",[12523,12500,12540]],[[13140,13140],"mapped",[12523,12540,12502,12523]],[[13141,13141],"mapped",[12524,12512]],[[13142,13142],"mapped",[12524,12531,12488,12466,12531]],[[13143,13143],"mapped",[12527,12483,12488]],[[13144,13144],"mapped",[48,28857]],[[13145,13145],"mapped",[49,28857]],[[13146,13146],"mapped",[50,28857]],[[13147,13147],"mapped",[51,28857]],[[13148,13148],"mapped",[52,28857]],[[13149,13149],"mapped",[53,28857]],[[13150,13150],"mapped",[54,28857]],[[13151,13151],"mapped",[55,28857]],[[13152,13152],"mapped",[56,28857]],[[13153,13153],"mapped",[57,28857]],[[13154,13154],"mapped",[49,48,28857]],[[13155,13155],"mapped",[49,49,28857]],[[13156,13156],"mapped",[49,50,28857]],[[13157,13157],"mapped",[49,51,28857]],[[13158,13158],"mapped",[49,52,28857]],[[13159,13159],"mapped",[49,53,28857]],[[13160,13160],"mapped",[49,54,28857]],[[13161,13161],"mapped",[49,55,28857]],[[13162,13162],"mapped",[49,56,28857]],[[13163,13163],"mapped",[49,57,28857]],[[13164,13164],"mapped",[50,48,28857]],[[13165,13165],"mapped",[50,49,28857]],[[13166,13166],"mapped",[50,50,28857]],[[13167,13167],"mapped",[50,51,28857]],[[13168,13168],"mapped",[50,52,28857]],[[13169,13169],"mapped",[104,112,97]],[[13170,13170],"mapped",[100,97]],[[13171,13171],"mapped",[97,117]],[[13172,13172],"mapped",[98,97,114]],[[13173,13173],"mapped",[111,118]],[[13174,13174],"mapped",[112,99]],[[13175,13175],"mapped",[100,109]],[[13176,13176],"mapped",[100,109,50]],[[13177,13177],"mapped",[100,109,51]],[[13178,13178],"mapped",[105,117]],[[13179,13179],"mapped",[24179,25104]],[[13180,13180],"mapped",[26157,21644]],[[13181,13181],"mapped",[22823,27491]],[[13182,13182],"mapped",[26126,27835]],[[13183,13183],"mapped",[26666,24335,20250,31038]],[[13184,13184],"mapped",[112,97]],[[13185,13185],"mapped",[110,97]],[[13186,13186],"mapped",[956,97]],[[13187,13187],"mapped",[109,97]],[[13188,13188],"mapped",[107,97]],[[13189,13189],"mapped",[107,98]],[[13190,13190],"mapped",[109,98]],[[13191,13191],"mapped",[103,98]],[[13192,13192],"mapped",[99,97,108]],[[13193,13193],"mapped",[107,99,97,108]],[[13194,13194],"mapped",[112,102]],[[13195,13195],"mapped",[110,102]],[[13196,13196],"mapped",[956,102]],[[13197,13197],"mapped",[956,103]],[[13198,13198],"mapped",[109,103]],[[13199,13199],"mapped",[107,103]],[[13200,13200],"mapped",[104,122]],[[13201,13201],"mapped",[107,104,122]],[[13202,13202],"mapped",[109,104,122]],[[13203,13203],"mapped",[103,104,122]],[[13204,13204],"mapped",[116,104,122]],[[13205,13205],"mapped",[956,108]],[[13206,13206],"mapped",[109,108]],[[13207,13207],"mapped",[100,108]],[[13208,13208],"mapped",[107,108]],[[13209,13209],"mapped",[102,109]],[[13210,13210],"mapped",[110,109]],[[13211,13211],"mapped",[956,109]],[[13212,13212],"mapped",[109,109]],[[13213,13213],"mapped",[99,109]],[[13214,13214],"mapped",[107,109]],[[13215,13215],"mapped",[109,109,50]],[[13216,13216],"mapped",[99,109,50]],[[13217,13217],"mapped",[109,50]],[[13218,13218],"mapped",[107,109,50]],[[13219,13219],"mapped",[109,109,51]],[[13220,13220],"mapped",[99,109,51]],[[13221,13221],"mapped",[109,51]],[[13222,13222],"mapped",[107,109,51]],[[13223,13223],"mapped",[109,8725,115]],[[13224,13224],"mapped",[109,8725,115,50]],[[13225,13225],"mapped",[112,97]],[[13226,13226],"mapped",[107,112,97]],[[13227,13227],"mapped",[109,112,97]],[[13228,13228],"mapped",[103,112,97]],[[13229,13229],"mapped",[114,97,100]],[[13230,13230],"mapped",[114,97,100,8725,115]],[[13231,13231],"mapped",[114,97,100,8725,115,50]],[[13232,13232],"mapped",[112,115]],[[13233,13233],"mapped",[110,115]],[[13234,13234],"mapped",[956,115]],[[13235,13235],"mapped",[109,115]],[[13236,13236],"mapped",[112,118]],[[13237,13237],"mapped",[110,118]],[[13238,13238],"mapped",[956,118]],[[13239,13239],"mapped",[109,118]],[[13240,13240],"mapped",[107,118]],[[13241,13241],"mapped",[109,118]],[[13242,13242],"mapped",[112,119]],[[13243,13243],"mapped",[110,119]],[[13244,13244],"mapped",[956,119]],[[13245,13245],"mapped",[109,119]],[[13246,13246],"mapped",[107,119]],[[13247,13247],"mapped",[109,119]],[[13248,13248],"mapped",[107,969]],[[13249,13249],"mapped",[109,969]],[[13250,13250],"disallowed"],[[13251,13251],"mapped",[98,113]],[[13252,13252],"mapped",[99,99]],[[13253,13253],"mapped",[99,100]],[[13254,13254],"mapped",[99,8725,107,103]],[[13255,13255],"disallowed"],[[13256,13256],"mapped",[100,98]],[[13257,13257],"mapped",[103,121]],[[13258,13258],"mapped",[104,97]],[[13259,13259],"mapped",[104,112]],[[13260,13260],"mapped",[105,110]],[[13261,13261],"mapped",[107,107]],[[13262,13262],"mapped",[107,109]],[[13263,13263],"mapped",[107,116]],[[13264,13264],"mapped",[108,109]],[[13265,13265],"mapped",[108,110]],[[13266,13266],"mapped",[108,111,103]],[[13267,13267],"mapped",[108,120]],[[13268,13268],"mapped",[109,98]],[[13269,13269],"mapped",[109,105,108]],[[13270,13270],"mapped",[109,111,108]],[[13271,13271],"mapped",[112,104]],[[13272,13272],"disallowed"],[[13273,13273],"mapped",[112,112,109]],[[13274,13274],"mapped",[112,114]],[[13275,13275],"mapped",[115,114]],[[13276,13276],"mapped",[115,118]],[[13277,13277],"mapped",[119,98]],[[13278,13278],"mapped",[118,8725,109]],[[13279,13279],"mapped",[97,8725,109]],[[13280,13280],"mapped",[49,26085]],[[13281,13281],"mapped",[50,26085]],[[13282,13282],"mapped",[51,26085]],[[13283,13283],"mapped",[52,26085]],[[13284,13284],"mapped",[53,26085]],[[13285,13285],"mapped",[54,26085]],[[13286,13286],"mapped",[55,26085]],[[13287,13287],"mapped",[56,26085]],[[13288,13288],"mapped",[57,26085]],[[13289,13289],"mapped",[49,48,26085]],[[13290,13290],"mapped",[49,49,26085]],[[13291,13291],"mapped",[49,50,26085]],[[13292,13292],"mapped",[49,51,26085]],[[13293,13293],"mapped",[49,52,26085]],[[13294,13294],"mapped",[49,53,26085]],[[13295,13295],"mapped",[49,54,26085]],[[13296,13296],"mapped",[49,55,26085]],[[13297,13297],"mapped",[49,56,26085]],[[13298,13298],"mapped",[49,57,26085]],[[13299,13299],"mapped",[50,48,26085]],[[13300,13300],"mapped",[50,49,26085]],[[13301,13301],"mapped",[50,50,26085]],[[13302,13302],"mapped",[50,51,26085]],[[13303,13303],"mapped",[50,52,26085]],[[13304,13304],"mapped",[50,53,26085]],[[13305,13305],"mapped",[50,54,26085]],[[13306,13306],"mapped",[50,55,26085]],[[13307,13307],"mapped",[50,56,26085]],[[13308,13308],"mapped",[50,57,26085]],[[13309,13309],"mapped",[51,48,26085]],[[13310,13310],"mapped",[51,49,26085]],[[13311,13311],"mapped",[103,97,108]],[[13312,19893],"valid"],[[19894,19903],"disallowed"],[[19904,19967],"valid",[],"NV8"],[[19968,40869],"valid"],[[40870,40891],"valid"],[[40892,40899],"valid"],[[40900,40907],"valid"],[[40908,40908],"valid"],[[40909,40917],"valid"],[[40918,40959],"disallowed"],[[40960,42124],"valid"],[[42125,42127],"disallowed"],[[42128,42145],"valid",[],"NV8"],[[42146,42147],"valid",[],"NV8"],[[42148,42163],"valid",[],"NV8"],[[42164,42164],"valid",[],"NV8"],[[42165,42176],"valid",[],"NV8"],[[42177,42177],"valid",[],"NV8"],[[42178,42180],"valid",[],"NV8"],[[42181,42181],"valid",[],"NV8"],[[42182,42182],"valid",[],"NV8"],[[42183,42191],"disallowed"],[[42192,42237],"valid"],[[42238,42239],"valid",[],"NV8"],[[42240,42508],"valid"],[[42509,42511],"valid",[],"NV8"],[[42512,42539],"valid"],[[42540,42559],"disallowed"],[[42560,42560],"mapped",[42561]],[[42561,42561],"valid"],[[42562,42562],"mapped",[42563]],[[42563,42563],"valid"],[[42564,42564],"mapped",[42565]],[[42565,42565],"valid"],[[42566,42566],"mapped",[42567]],[[42567,42567],"valid"],[[42568,42568],"mapped",[42569]],[[42569,42569],"valid"],[[42570,42570],"mapped",[42571]],[[42571,42571],"valid"],[[42572,42572],"mapped",[42573]],[[42573,42573],"valid"],[[42574,42574],"mapped",[42575]],[[42575,42575],"valid"],[[42576,42576],"mapped",[42577]],[[42577,42577],"valid"],[[42578,42578],"mapped",[42579]],[[42579,42579],"valid"],[[42580,42580],"mapped",[42581]],[[42581,42581],"valid"],[[42582,42582],"mapped",[42583]],[[42583,42583],"valid"],[[42584,42584],"mapped",[42585]],[[42585,42585],"valid"],[[42586,42586],"mapped",[42587]],[[42587,42587],"valid"],[[42588,42588],"mapped",[42589]],[[42589,42589],"valid"],[[42590,42590],"mapped",[42591]],[[42591,42591],"valid"],[[42592,42592],"mapped",[42593]],[[42593,42593],"valid"],[[42594,42594],"mapped",[42595]],[[42595,42595],"valid"],[[42596,42596],"mapped",[42597]],[[42597,42597],"valid"],[[42598,42598],"mapped",[42599]],[[42599,42599],"valid"],[[42600,42600],"mapped",[42601]],[[42601,42601],"valid"],[[42602,42602],"mapped",[42603]],[[42603,42603],"valid"],[[42604,42604],"mapped",[42605]],[[42605,42607],"valid"],[[42608,42611],"valid",[],"NV8"],[[42612,42619],"valid"],[[42620,42621],"valid"],[[42622,42622],"valid",[],"NV8"],[[42623,42623],"valid"],[[42624,42624],"mapped",[42625]],[[42625,42625],"valid"],[[42626,42626],"mapped",[42627]],[[42627,42627],"valid"],[[42628,42628],"mapped",[42629]],[[42629,42629],"valid"],[[42630,42630],"mapped",[42631]],[[42631,42631],"valid"],[[42632,42632],"mapped",[42633]],[[42633,42633],"valid"],[[42634,42634],"mapped",[42635]],[[42635,42635],"valid"],[[42636,42636],"mapped",[42637]],[[42637,42637],"valid"],[[42638,42638],"mapped",[42639]],[[42639,42639],"valid"],[[42640,42640],"mapped",[42641]],[[42641,42641],"valid"],[[42642,42642],"mapped",[42643]],[[42643,42643],"valid"],[[42644,42644],"mapped",[42645]],[[42645,42645],"valid"],[[42646,42646],"mapped",[42647]],[[42647,42647],"valid"],[[42648,42648],"mapped",[42649]],[[42649,42649],"valid"],[[42650,42650],"mapped",[42651]],[[42651,42651],"valid"],[[42652,42652],"mapped",[1098]],[[42653,42653],"mapped",[1100]],[[42654,42654],"valid"],[[42655,42655],"valid"],[[42656,42725],"valid"],[[42726,42735],"valid",[],"NV8"],[[42736,42737],"valid"],[[42738,42743],"valid",[],"NV8"],[[42744,42751],"disallowed"],[[42752,42774],"valid",[],"NV8"],[[42775,42778],"valid"],[[42779,42783],"valid"],[[42784,42785],"valid",[],"NV8"],[[42786,42786],"mapped",[42787]],[[42787,42787],"valid"],[[42788,42788],"mapped",[42789]],[[42789,42789],"valid"],[[42790,42790],"mapped",[42791]],[[42791,42791],"valid"],[[42792,42792],"mapped",[42793]],[[42793,42793],"valid"],[[42794,42794],"mapped",[42795]],[[42795,42795],"valid"],[[42796,42796],"mapped",[42797]],[[42797,42797],"valid"],[[42798,42798],"mapped",[42799]],[[42799,42801],"valid"],[[42802,42802],"mapped",[42803]],[[42803,42803],"valid"],[[42804,42804],"mapped",[42805]],[[42805,42805],"valid"],[[42806,42806],"mapped",[42807]],[[42807,42807],"valid"],[[42808,42808],"mapped",[42809]],[[42809,42809],"valid"],[[42810,42810],"mapped",[42811]],[[42811,42811],"valid"],[[42812,42812],"mapped",[42813]],[[42813,42813],"valid"],[[42814,42814],"mapped",[42815]],[[42815,42815],"valid"],[[42816,42816],"mapped",[42817]],[[42817,42817],"valid"],[[42818,42818],"mapped",[42819]],[[42819,42819],"valid"],[[42820,42820],"mapped",[42821]],[[42821,42821],"valid"],[[42822,42822],"mapped",[42823]],[[42823,42823],"valid"],[[42824,42824],"mapped",[42825]],[[42825,42825],"valid"],[[42826,42826],"mapped",[42827]],[[42827,42827],"valid"],[[42828,42828],"mapped",[42829]],[[42829,42829],"valid"],[[42830,42830],"mapped",[42831]],[[42831,42831],"valid"],[[42832,42832],"mapped",[42833]],[[42833,42833],"valid"],[[42834,42834],"mapped",[42835]],[[42835,42835],"valid"],[[42836,42836],"mapped",[42837]],[[42837,42837],"valid"],[[42838,42838],"mapped",[42839]],[[42839,42839],"valid"],[[42840,42840],"mapped",[42841]],[[42841,42841],"valid"],[[42842,42842],"mapped",[42843]],[[42843,42843],"valid"],[[42844,42844],"mapped",[42845]],[[42845,42845],"valid"],[[42846,42846],"mapped",[42847]],[[42847,42847],"valid"],[[42848,42848],"mapped",[42849]],[[42849,42849],"valid"],[[42850,42850],"mapped",[42851]],[[42851,42851],"valid"],[[42852,42852],"mapped",[42853]],[[42853,42853],"valid"],[[42854,42854],"mapped",[42855]],[[42855,42855],"valid"],[[42856,42856],"mapped",[42857]],[[42857,42857],"valid"],[[42858,42858],"mapped",[42859]],[[42859,42859],"valid"],[[42860,42860],"mapped",[42861]],[[42861,42861],"valid"],[[42862,42862],"mapped",[42863]],[[42863,42863],"valid"],[[42864,42864],"mapped",[42863]],[[42865,42872],"valid"],[[42873,42873],"mapped",[42874]],[[42874,42874],"valid"],[[42875,42875],"mapped",[42876]],[[42876,42876],"valid"],[[42877,42877],"mapped",[7545]],[[42878,42878],"mapped",[42879]],[[42879,42879],"valid"],[[42880,42880],"mapped",[42881]],[[42881,42881],"valid"],[[42882,42882],"mapped",[42883]],[[42883,42883],"valid"],[[42884,42884],"mapped",[42885]],[[42885,42885],"valid"],[[42886,42886],"mapped",[42887]],[[42887,42888],"valid"],[[42889,42890],"valid",[],"NV8"],[[42891,42891],"mapped",[42892]],[[42892,42892],"valid"],[[42893,42893],"mapped",[613]],[[42894,42894],"valid"],[[42895,42895],"valid"],[[42896,42896],"mapped",[42897]],[[42897,42897],"valid"],[[42898,42898],"mapped",[42899]],[[42899,42899],"valid"],[[42900,42901],"valid"],[[42902,42902],"mapped",[42903]],[[42903,42903],"valid"],[[42904,42904],"mapped",[42905]],[[42905,42905],"valid"],[[42906,42906],"mapped",[42907]],[[42907,42907],"valid"],[[42908,42908],"mapped",[42909]],[[42909,42909],"valid"],[[42910,42910],"mapped",[42911]],[[42911,42911],"valid"],[[42912,42912],"mapped",[42913]],[[42913,42913],"valid"],[[42914,42914],"mapped",[42915]],[[42915,42915],"valid"],[[42916,42916],"mapped",[42917]],[[42917,42917],"valid"],[[42918,42918],"mapped",[42919]],[[42919,42919],"valid"],[[42920,42920],"mapped",[42921]],[[42921,42921],"valid"],[[42922,42922],"mapped",[614]],[[42923,42923],"mapped",[604]],[[42924,42924],"mapped",[609]],[[42925,42925],"mapped",[620]],[[42926,42927],"disallowed"],[[42928,42928],"mapped",[670]],[[42929,42929],"mapped",[647]],[[42930,42930],"mapped",[669]],[[42931,42931],"mapped",[43859]],[[42932,42932],"mapped",[42933]],[[42933,42933],"valid"],[[42934,42934],"mapped",[42935]],[[42935,42935],"valid"],[[42936,42998],"disallowed"],[[42999,42999],"valid"],[[43000,43000],"mapped",[295]],[[43001,43001],"mapped",[339]],[[43002,43002],"valid"],[[43003,43007],"valid"],[[43008,43047],"valid"],[[43048,43051],"valid",[],"NV8"],[[43052,43055],"disallowed"],[[43056,43065],"valid",[],"NV8"],[[43066,43071],"disallowed"],[[43072,43123],"valid"],[[43124,43127],"valid",[],"NV8"],[[43128,43135],"disallowed"],[[43136,43204],"valid"],[[43205,43213],"disallowed"],[[43214,43215],"valid",[],"NV8"],[[43216,43225],"valid"],[[43226,43231],"disallowed"],[[43232,43255],"valid"],[[43256,43258],"valid",[],"NV8"],[[43259,43259],"valid"],[[43260,43260],"valid",[],"NV8"],[[43261,43261],"valid"],[[43262,43263],"disallowed"],[[43264,43309],"valid"],[[43310,43311],"valid",[],"NV8"],[[43312,43347],"valid"],[[43348,43358],"disallowed"],[[43359,43359],"valid",[],"NV8"],[[43360,43388],"valid",[],"NV8"],[[43389,43391],"disallowed"],[[43392,43456],"valid"],[[43457,43469],"valid",[],"NV8"],[[43470,43470],"disallowed"],[[43471,43481],"valid"],[[43482,43485],"disallowed"],[[43486,43487],"valid",[],"NV8"],[[43488,43518],"valid"],[[43519,43519],"disallowed"],[[43520,43574],"valid"],[[43575,43583],"disallowed"],[[43584,43597],"valid"],[[43598,43599],"disallowed"],[[43600,43609],"valid"],[[43610,43611],"disallowed"],[[43612,43615],"valid",[],"NV8"],[[43616,43638],"valid"],[[43639,43641],"valid",[],"NV8"],[[43642,43643],"valid"],[[43644,43647],"valid"],[[43648,43714],"valid"],[[43715,43738],"disallowed"],[[43739,43741],"valid"],[[43742,43743],"valid",[],"NV8"],[[43744,43759],"valid"],[[43760,43761],"valid",[],"NV8"],[[43762,43766],"valid"],[[43767,43776],"disallowed"],[[43777,43782],"valid"],[[43783,43784],"disallowed"],[[43785,43790],"valid"],[[43791,43792],"disallowed"],[[43793,43798],"valid"],[[43799,43807],"disallowed"],[[43808,43814],"valid"],[[43815,43815],"disallowed"],[[43816,43822],"valid"],[[43823,43823],"disallowed"],[[43824,43866],"valid"],[[43867,43867],"valid",[],"NV8"],[[43868,43868],"mapped",[42791]],[[43869,43869],"mapped",[43831]],[[43870,43870],"mapped",[619]],[[43871,43871],"mapped",[43858]],[[43872,43875],"valid"],[[43876,43877],"valid"],[[43878,43887],"disallowed"],[[43888,43888],"mapped",[5024]],[[43889,43889],"mapped",[5025]],[[43890,43890],"mapped",[5026]],[[43891,43891],"mapped",[5027]],[[43892,43892],"mapped",[5028]],[[43893,43893],"mapped",[5029]],[[43894,43894],"mapped",[5030]],[[43895,43895],"mapped",[5031]],[[43896,43896],"mapped",[5032]],[[43897,43897],"mapped",[5033]],[[43898,43898],"mapped",[5034]],[[43899,43899],"mapped",[5035]],[[43900,43900],"mapped",[5036]],[[43901,43901],"mapped",[5037]],[[43902,43902],"mapped",[5038]],[[43903,43903],"mapped",[5039]],[[43904,43904],"mapped",[5040]],[[43905,43905],"mapped",[5041]],[[43906,43906],"mapped",[5042]],[[43907,43907],"mapped",[5043]],[[43908,43908],"mapped",[5044]],[[43909,43909],"mapped",[5045]],[[43910,43910],"mapped",[5046]],[[43911,43911],"mapped",[5047]],[[43912,43912],"mapped",[5048]],[[43913,43913],"mapped",[5049]],[[43914,43914],"mapped",[5050]],[[43915,43915],"mapped",[5051]],[[43916,43916],"mapped",[5052]],[[43917,43917],"mapped",[5053]],[[43918,43918],"mapped",[5054]],[[43919,43919],"mapped",[5055]],[[43920,43920],"mapped",[5056]],[[43921,43921],"mapped",[5057]],[[43922,43922],"mapped",[5058]],[[43923,43923],"mapped",[5059]],[[43924,43924],"mapped",[5060]],[[43925,43925],"mapped",[5061]],[[43926,43926],"mapped",[5062]],[[43927,43927],"mapped",[5063]],[[43928,43928],"mapped",[5064]],[[43929,43929],"mapped",[5065]],[[43930,43930],"mapped",[5066]],[[43931,43931],"mapped",[5067]],[[43932,43932],"mapped",[5068]],[[43933,43933],"mapped",[5069]],[[43934,43934],"mapped",[5070]],[[43935,43935],"mapped",[5071]],[[43936,43936],"mapped",[5072]],[[43937,43937],"mapped",[5073]],[[43938,43938],"mapped",[5074]],[[43939,43939],"mapped",[5075]],[[43940,43940],"mapped",[5076]],[[43941,43941],"mapped",[5077]],[[43942,43942],"mapped",[5078]],[[43943,43943],"mapped",[5079]],[[43944,43944],"mapped",[5080]],[[43945,43945],"mapped",[5081]],[[43946,43946],"mapped",[5082]],[[43947,43947],"mapped",[5083]],[[43948,43948],"mapped",[5084]],[[43949,43949],"mapped",[5085]],[[43950,43950],"mapped",[5086]],[[43951,43951],"mapped",[5087]],[[43952,43952],"mapped",[5088]],[[43953,43953],"mapped",[5089]],[[43954,43954],"mapped",[5090]],[[43955,43955],"mapped",[5091]],[[43956,43956],"mapped",[5092]],[[43957,43957],"mapped",[5093]],[[43958,43958],"mapped",[5094]],[[43959,43959],"mapped",[5095]],[[43960,43960],"mapped",[5096]],[[43961,43961],"mapped",[5097]],[[43962,43962],"mapped",[5098]],[[43963,43963],"mapped",[5099]],[[43964,43964],"mapped",[5100]],[[43965,43965],"mapped",[5101]],[[43966,43966],"mapped",[5102]],[[43967,43967],"mapped",[5103]],[[43968,44010],"valid"],[[44011,44011],"valid",[],"NV8"],[[44012,44013],"valid"],[[44014,44015],"disallowed"],[[44016,44025],"valid"],[[44026,44031],"disallowed"],[[44032,55203],"valid"],[[55204,55215],"disallowed"],[[55216,55238],"valid",[],"NV8"],[[55239,55242],"disallowed"],[[55243,55291],"valid",[],"NV8"],[[55292,55295],"disallowed"],[[55296,57343],"disallowed"],[[57344,63743],"disallowed"],[[63744,63744],"mapped",[35912]],[[63745,63745],"mapped",[26356]],[[63746,63746],"mapped",[36554]],[[63747,63747],"mapped",[36040]],[[63748,63748],"mapped",[28369]],[[63749,63749],"mapped",[20018]],[[63750,63750],"mapped",[21477]],[[63751,63752],"mapped",[40860]],[[63753,63753],"mapped",[22865]],[[63754,63754],"mapped",[37329]],[[63755,63755],"mapped",[21895]],[[63756,63756],"mapped",[22856]],[[63757,63757],"mapped",[25078]],[[63758,63758],"mapped",[30313]],[[63759,63759],"mapped",[32645]],[[63760,63760],"mapped",[34367]],[[63761,63761],"mapped",[34746]],[[63762,63762],"mapped",[35064]],[[63763,63763],"mapped",[37007]],[[63764,63764],"mapped",[27138]],[[63765,63765],"mapped",[27931]],[[63766,63766],"mapped",[28889]],[[63767,63767],"mapped",[29662]],[[63768,63768],"mapped",[33853]],[[63769,63769],"mapped",[37226]],[[63770,63770],"mapped",[39409]],[[63771,63771],"mapped",[20098]],[[63772,63772],"mapped",[21365]],[[63773,63773],"mapped",[27396]],[[63774,63774],"mapped",[29211]],[[63775,63775],"mapped",[34349]],[[63776,63776],"mapped",[40478]],[[63777,63777],"mapped",[23888]],[[63778,63778],"mapped",[28651]],[[63779,63779],"mapped",[34253]],[[63780,63780],"mapped",[35172]],[[63781,63781],"mapped",[25289]],[[63782,63782],"mapped",[33240]],[[63783,63783],"mapped",[34847]],[[63784,63784],"mapped",[24266]],[[63785,63785],"mapped",[26391]],[[63786,63786],"mapped",[28010]],[[63787,63787],"mapped",[29436]],[[63788,63788],"mapped",[37070]],[[63789,63789],"mapped",[20358]],[[63790,63790],"mapped",[20919]],[[63791,63791],"mapped",[21214]],[[63792,63792],"mapped",[25796]],[[63793,63793],"mapped",[27347]],[[63794,63794],"mapped",[29200]],[[63795,63795],"mapped",[30439]],[[63796,63796],"mapped",[32769]],[[63797,63797],"mapped",[34310]],[[63798,63798],"mapped",[34396]],[[63799,63799],"mapped",[36335]],[[63800,63800],"mapped",[38706]],[[63801,63801],"mapped",[39791]],[[63802,63802],"mapped",[40442]],[[63803,63803],"mapped",[30860]],[[63804,63804],"mapped",[31103]],[[63805,63805],"mapped",[32160]],[[63806,63806],"mapped",[33737]],[[63807,63807],"mapped",[37636]],[[63808,63808],"mapped",[40575]],[[63809,63809],"mapped",[35542]],[[63810,63810],"mapped",[22751]],[[63811,63811],"mapped",[24324]],[[63812,63812],"mapped",[31840]],[[63813,63813],"mapped",[32894]],[[63814,63814],"mapped",[29282]],[[63815,63815],"mapped",[30922]],[[63816,63816],"mapped",[36034]],[[63817,63817],"mapped",[38647]],[[63818,63818],"mapped",[22744]],[[63819,63819],"mapped",[23650]],[[63820,63820],"mapped",[27155]],[[63821,63821],"mapped",[28122]],[[63822,63822],"mapped",[28431]],[[63823,63823],"mapped",[32047]],[[63824,63824],"mapped",[32311]],[[63825,63825],"mapped",[38475]],[[63826,63826],"mapped",[21202]],[[63827,63827],"mapped",[32907]],[[63828,63828],"mapped",[20956]],[[63829,63829],"mapped",[20940]],[[63830,63830],"mapped",[31260]],[[63831,63831],"mapped",[32190]],[[63832,63832],"mapped",[33777]],[[63833,63833],"mapped",[38517]],[[63834,63834],"mapped",[35712]],[[63835,63835],"mapped",[25295]],[[63836,63836],"mapped",[27138]],[[63837,63837],"mapped",[35582]],[[63838,63838],"mapped",[20025]],[[63839,63839],"mapped",[23527]],[[63840,63840],"mapped",[24594]],[[63841,63841],"mapped",[29575]],[[63842,63842],"mapped",[30064]],[[63843,63843],"mapped",[21271]],[[63844,63844],"mapped",[30971]],[[63845,63845],"mapped",[20415]],[[63846,63846],"mapped",[24489]],[[63847,63847],"mapped",[19981]],[[63848,63848],"mapped",[27852]],[[63849,63849],"mapped",[25976]],[[63850,63850],"mapped",[32034]],[[63851,63851],"mapped",[21443]],[[63852,63852],"mapped",[22622]],[[63853,63853],"mapped",[30465]],[[63854,63854],"mapped",[33865]],[[63855,63855],"mapped",[35498]],[[63856,63856],"mapped",[27578]],[[63857,63857],"mapped",[36784]],[[63858,63858],"mapped",[27784]],[[63859,63859],"mapped",[25342]],[[63860,63860],"mapped",[33509]],[[63861,63861],"mapped",[25504]],[[63862,63862],"mapped",[30053]],[[63863,63863],"mapped",[20142]],[[63864,63864],"mapped",[20841]],[[63865,63865],"mapped",[20937]],[[63866,63866],"mapped",[26753]],[[63867,63867],"mapped",[31975]],[[63868,63868],"mapped",[33391]],[[63869,63869],"mapped",[35538]],[[63870,63870],"mapped",[37327]],[[63871,63871],"mapped",[21237]],[[63872,63872],"mapped",[21570]],[[63873,63873],"mapped",[22899]],[[63874,63874],"mapped",[24300]],[[63875,63875],"mapped",[26053]],[[63876,63876],"mapped",[28670]],[[63877,63877],"mapped",[31018]],[[63878,63878],"mapped",[38317]],[[63879,63879],"mapped",[39530]],[[63880,63880],"mapped",[40599]],[[63881,63881],"mapped",[40654]],[[63882,63882],"mapped",[21147]],[[63883,63883],"mapped",[26310]],[[63884,63884],"mapped",[27511]],[[63885,63885],"mapped",[36706]],[[63886,63886],"mapped",[24180]],[[63887,63887],"mapped",[24976]],[[63888,63888],"mapped",[25088]],[[63889,63889],"mapped",[25754]],[[63890,63890],"mapped",[28451]],[[63891,63891],"mapped",[29001]],[[63892,63892],"mapped",[29833]],[[63893,63893],"mapped",[31178]],[[63894,63894],"mapped",[32244]],[[63895,63895],"mapped",[32879]],[[63896,63896],"mapped",[36646]],[[63897,63897],"mapped",[34030]],[[63898,63898],"mapped",[36899]],[[63899,63899],"mapped",[37706]],[[63900,63900],"mapped",[21015]],[[63901,63901],"mapped",[21155]],[[63902,63902],"mapped",[21693]],[[63903,63903],"mapped",[28872]],[[63904,63904],"mapped",[35010]],[[63905,63905],"mapped",[35498]],[[63906,63906],"mapped",[24265]],[[63907,63907],"mapped",[24565]],[[63908,63908],"mapped",[25467]],[[63909,63909],"mapped",[27566]],[[63910,63910],"mapped",[31806]],[[63911,63911],"mapped",[29557]],[[63912,63912],"mapped",[20196]],[[63913,63913],"mapped",[22265]],[[63914,63914],"mapped",[23527]],[[63915,63915],"mapped",[23994]],[[63916,63916],"mapped",[24604]],[[63917,63917],"mapped",[29618]],[[63918,63918],"mapped",[29801]],[[63919,63919],"mapped",[32666]],[[63920,63920],"mapped",[32838]],[[63921,63921],"mapped",[37428]],[[63922,63922],"mapped",[38646]],[[63923,63923],"mapped",[38728]],[[63924,63924],"mapped",[38936]],[[63925,63925],"mapped",[20363]],[[63926,63926],"mapped",[31150]],[[63927,63927],"mapped",[37300]],[[63928,63928],"mapped",[38584]],[[63929,63929],"mapped",[24801]],[[63930,63930],"mapped",[20102]],[[63931,63931],"mapped",[20698]],[[63932,63932],"mapped",[23534]],[[63933,63933],"mapped",[23615]],[[63934,63934],"mapped",[26009]],[[63935,63935],"mapped",[27138]],[[63936,63936],"mapped",[29134]],[[63937,63937],"mapped",[30274]],[[63938,63938],"mapped",[34044]],[[63939,63939],"mapped",[36988]],[[63940,63940],"mapped",[40845]],[[63941,63941],"mapped",[26248]],[[63942,63942],"mapped",[38446]],[[63943,63943],"mapped",[21129]],[[63944,63944],"mapped",[26491]],[[63945,63945],"mapped",[26611]],[[63946,63946],"mapped",[27969]],[[63947,63947],"mapped",[28316]],[[63948,63948],"mapped",[29705]],[[63949,63949],"mapped",[30041]],[[63950,63950],"mapped",[30827]],[[63951,63951],"mapped",[32016]],[[63952,63952],"mapped",[39006]],[[63953,63953],"mapped",[20845]],[[63954,63954],"mapped",[25134]],[[63955,63955],"mapped",[38520]],[[63956,63956],"mapped",[20523]],[[63957,63957],"mapped",[23833]],[[63958,63958],"mapped",[28138]],[[63959,63959],"mapped",[36650]],[[63960,63960],"mapped",[24459]],[[63961,63961],"mapped",[24900]],[[63962,63962],"mapped",[26647]],[[63963,63963],"mapped",[29575]],[[63964,63964],"mapped",[38534]],[[63965,63965],"mapped",[21033]],[[63966,63966],"mapped",[21519]],[[63967,63967],"mapped",[23653]],[[63968,63968],"mapped",[26131]],[[63969,63969],"mapped",[26446]],[[63970,63970],"mapped",[26792]],[[63971,63971],"mapped",[27877]],[[63972,63972],"mapped",[29702]],[[63973,63973],"mapped",[30178]],[[63974,63974],"mapped",[32633]],[[63975,63975],"mapped",[35023]],[[63976,63976],"mapped",[35041]],[[63977,63977],"mapped",[37324]],[[63978,63978],"mapped",[38626]],[[63979,63979],"mapped",[21311]],[[63980,63980],"mapped",[28346]],[[63981,63981],"mapped",[21533]],[[63982,63982],"mapped",[29136]],[[63983,63983],"mapped",[29848]],[[63984,63984],"mapped",[34298]],[[63985,63985],"mapped",[38563]],[[63986,63986],"mapped",[40023]],[[63987,63987],"mapped",[40607]],[[63988,63988],"mapped",[26519]],[[63989,63989],"mapped",[28107]],[[63990,63990],"mapped",[33256]],[[63991,63991],"mapped",[31435]],[[63992,63992],"mapped",[31520]],[[63993,63993],"mapped",[31890]],[[63994,63994],"mapped",[29376]],[[63995,63995],"mapped",[28825]],[[63996,63996],"mapped",[35672]],[[63997,63997],"mapped",[20160]],[[63998,63998],"mapped",[33590]],[[63999,63999],"mapped",[21050]],[[64000,64000],"mapped",[20999]],[[64001,64001],"mapped",[24230]],[[64002,64002],"mapped",[25299]],[[64003,64003],"mapped",[31958]],[[64004,64004],"mapped",[23429]],[[64005,64005],"mapped",[27934]],[[64006,64006],"mapped",[26292]],[[64007,64007],"mapped",[36667]],[[64008,64008],"mapped",[34892]],[[64009,64009],"mapped",[38477]],[[64010,64010],"mapped",[35211]],[[64011,64011],"mapped",[24275]],[[64012,64012],"mapped",[20800]],[[64013,64013],"mapped",[21952]],[[64014,64015],"valid"],[[64016,64016],"mapped",[22618]],[[64017,64017],"valid"],[[64018,64018],"mapped",[26228]],[[64019,64020],"valid"],[[64021,64021],"mapped",[20958]],[[64022,64022],"mapped",[29482]],[[64023,64023],"mapped",[30410]],[[64024,64024],"mapped",[31036]],[[64025,64025],"mapped",[31070]],[[64026,64026],"mapped",[31077]],[[64027,64027],"mapped",[31119]],[[64028,64028],"mapped",[38742]],[[64029,64029],"mapped",[31934]],[[64030,64030],"mapped",[32701]],[[64031,64031],"valid"],[[64032,64032],"mapped",[34322]],[[64033,64033],"valid"],[[64034,64034],"mapped",[35576]],[[64035,64036],"valid"],[[64037,64037],"mapped",[36920]],[[64038,64038],"mapped",[37117]],[[64039,64041],"valid"],[[64042,64042],"mapped",[39151]],[[64043,64043],"mapped",[39164]],[[64044,64044],"mapped",[39208]],[[64045,64045],"mapped",[40372]],[[64046,64046],"mapped",[37086]],[[64047,64047],"mapped",[38583]],[[64048,64048],"mapped",[20398]],[[64049,64049],"mapped",[20711]],[[64050,64050],"mapped",[20813]],[[64051,64051],"mapped",[21193]],[[64052,64052],"mapped",[21220]],[[64053,64053],"mapped",[21329]],[[64054,64054],"mapped",[21917]],[[64055,64055],"mapped",[22022]],[[64056,64056],"mapped",[22120]],[[64057,64057],"mapped",[22592]],[[64058,64058],"mapped",[22696]],[[64059,64059],"mapped",[23652]],[[64060,64060],"mapped",[23662]],[[64061,64061],"mapped",[24724]],[[64062,64062],"mapped",[24936]],[[64063,64063],"mapped",[24974]],[[64064,64064],"mapped",[25074]],[[64065,64065],"mapped",[25935]],[[64066,64066],"mapped",[26082]],[[64067,64067],"mapped",[26257]],[[64068,64068],"mapped",[26757]],[[64069,64069],"mapped",[28023]],[[64070,64070],"mapped",[28186]],[[64071,64071],"mapped",[28450]],[[64072,64072],"mapped",[29038]],[[64073,64073],"mapped",[29227]],[[64074,64074],"mapped",[29730]],[[64075,64075],"mapped",[30865]],[[64076,64076],"mapped",[31038]],[[64077,64077],"mapped",[31049]],[[64078,64078],"mapped",[31048]],[[64079,64079],"mapped",[31056]],[[64080,64080],"mapped",[31062]],[[64081,64081],"mapped",[31069]],[[64082,64082],"mapped",[31117]],[[64083,64083],"mapped",[31118]],[[64084,64084],"mapped",[31296]],[[64085,64085],"mapped",[31361]],[[64086,64086],"mapped",[31680]],[[64087,64087],"mapped",[32244]],[[64088,64088],"mapped",[32265]],[[64089,64089],"mapped",[32321]],[[64090,64090],"mapped",[32626]],[[64091,64091],"mapped",[32773]],[[64092,64092],"mapped",[33261]],[[64093,64094],"mapped",[33401]],[[64095,64095],"mapped",[33879]],[[64096,64096],"mapped",[35088]],[[64097,64097],"mapped",[35222]],[[64098,64098],"mapped",[35585]],[[64099,64099],"mapped",[35641]],[[64100,64100],"mapped",[36051]],[[64101,64101],"mapped",[36104]],[[64102,64102],"mapped",[36790]],[[64103,64103],"mapped",[36920]],[[64104,64104],"mapped",[38627]],[[64105,64105],"mapped",[38911]],[[64106,64106],"mapped",[38971]],[[64107,64107],"mapped",[24693]],[[64108,64108],"mapped",[148206]],[[64109,64109],"mapped",[33304]],[[64110,64111],"disallowed"],[[64112,64112],"mapped",[20006]],[[64113,64113],"mapped",[20917]],[[64114,64114],"mapped",[20840]],[[64115,64115],"mapped",[20352]],[[64116,64116],"mapped",[20805]],[[64117,64117],"mapped",[20864]],[[64118,64118],"mapped",[21191]],[[64119,64119],"mapped",[21242]],[[64120,64120],"mapped",[21917]],[[64121,64121],"mapped",[21845]],[[64122,64122],"mapped",[21913]],[[64123,64123],"mapped",[21986]],[[64124,64124],"mapped",[22618]],[[64125,64125],"mapped",[22707]],[[64126,64126],"mapped",[22852]],[[64127,64127],"mapped",[22868]],[[64128,64128],"mapped",[23138]],[[64129,64129],"mapped",[23336]],[[64130,64130],"mapped",[24274]],[[64131,64131],"mapped",[24281]],[[64132,64132],"mapped",[24425]],[[64133,64133],"mapped",[24493]],[[64134,64134],"mapped",[24792]],[[64135,64135],"mapped",[24910]],[[64136,64136],"mapped",[24840]],[[64137,64137],"mapped",[24974]],[[64138,64138],"mapped",[24928]],[[64139,64139],"mapped",[25074]],[[64140,64140],"mapped",[25140]],[[64141,64141],"mapped",[25540]],[[64142,64142],"mapped",[25628]],[[64143,64143],"mapped",[25682]],[[64144,64144],"mapped",[25942]],[[64145,64145],"mapped",[26228]],[[64146,64146],"mapped",[26391]],[[64147,64147],"mapped",[26395]],[[64148,64148],"mapped",[26454]],[[64149,64149],"mapped",[27513]],[[64150,64150],"mapped",[27578]],[[64151,64151],"mapped",[27969]],[[64152,64152],"mapped",[28379]],[[64153,64153],"mapped",[28363]],[[64154,64154],"mapped",[28450]],[[64155,64155],"mapped",[28702]],[[64156,64156],"mapped",[29038]],[[64157,64157],"mapped",[30631]],[[64158,64158],"mapped",[29237]],[[64159,64159],"mapped",[29359]],[[64160,64160],"mapped",[29482]],[[64161,64161],"mapped",[29809]],[[64162,64162],"mapped",[29958]],[[64163,64163],"mapped",[30011]],[[64164,64164],"mapped",[30237]],[[64165,64165],"mapped",[30239]],[[64166,64166],"mapped",[30410]],[[64167,64167],"mapped",[30427]],[[64168,64168],"mapped",[30452]],[[64169,64169],"mapped",[30538]],[[64170,64170],"mapped",[30528]],[[64171,64171],"mapped",[30924]],[[64172,64172],"mapped",[31409]],[[64173,64173],"mapped",[31680]],[[64174,64174],"mapped",[31867]],[[64175,64175],"mapped",[32091]],[[64176,64176],"mapped",[32244]],[[64177,64177],"mapped",[32574]],[[64178,64178],"mapped",[32773]],[[64179,64179],"mapped",[33618]],[[64180,64180],"mapped",[33775]],[[64181,64181],"mapped",[34681]],[[64182,64182],"mapped",[35137]],[[64183,64183],"mapped",[35206]],[[64184,64184],"mapped",[35222]],[[64185,64185],"mapped",[35519]],[[64186,64186],"mapped",[35576]],[[64187,64187],"mapped",[35531]],[[64188,64188],"mapped",[35585]],[[64189,64189],"mapped",[35582]],[[64190,64190],"mapped",[35565]],[[64191,64191],"mapped",[35641]],[[64192,64192],"mapped",[35722]],[[64193,64193],"mapped",[36104]],[[64194,64194],"mapped",[36664]],[[64195,64195],"mapped",[36978]],[[64196,64196],"mapped",[37273]],[[64197,64197],"mapped",[37494]],[[64198,64198],"mapped",[38524]],[[64199,64199],"mapped",[38627]],[[64200,64200],"mapped",[38742]],[[64201,64201],"mapped",[38875]],[[64202,64202],"mapped",[38911]],[[64203,64203],"mapped",[38923]],[[64204,64204],"mapped",[38971]],[[64205,64205],"mapped",[39698]],[[64206,64206],"mapped",[40860]],[[64207,64207],"mapped",[141386]],[[64208,64208],"mapped",[141380]],[[64209,64209],"mapped",[144341]],[[64210,64210],"mapped",[15261]],[[64211,64211],"mapped",[16408]],[[64212,64212],"mapped",[16441]],[[64213,64213],"mapped",[152137]],[[64214,64214],"mapped",[154832]],[[64215,64215],"mapped",[163539]],[[64216,64216],"mapped",[40771]],[[64217,64217],"mapped",[40846]],[[64218,64255],"disallowed"],[[64256,64256],"mapped",[102,102]],[[64257,64257],"mapped",[102,105]],[[64258,64258],"mapped",[102,108]],[[64259,64259],"mapped",[102,102,105]],[[64260,64260],"mapped",[102,102,108]],[[64261,64262],"mapped",[115,116]],[[64263,64274],"disallowed"],[[64275,64275],"mapped",[1396,1398]],[[64276,64276],"mapped",[1396,1381]],[[64277,64277],"mapped",[1396,1387]],[[64278,64278],"mapped",[1406,1398]],[[64279,64279],"mapped",[1396,1389]],[[64280,64284],"disallowed"],[[64285,64285],"mapped",[1497,1460]],[[64286,64286],"valid"],[[64287,64287],"mapped",[1522,1463]],[[64288,64288],"mapped",[1506]],[[64289,64289],"mapped",[1488]],[[64290,64290],"mapped",[1491]],[[64291,64291],"mapped",[1492]],[[64292,64292],"mapped",[1499]],[[64293,64293],"mapped",[1500]],[[64294,64294],"mapped",[1501]],[[64295,64295],"mapped",[1512]],[[64296,64296],"mapped",[1514]],[[64297,64297],"disallowed_STD3_mapped",[43]],[[64298,64298],"mapped",[1513,1473]],[[64299,64299],"mapped",[1513,1474]],[[64300,64300],"mapped",[1513,1468,1473]],[[64301,64301],"mapped",[1513,1468,1474]],[[64302,64302],"mapped",[1488,1463]],[[64303,64303],"mapped",[1488,1464]],[[64304,64304],"mapped",[1488,1468]],[[64305,64305],"mapped",[1489,1468]],[[64306,64306],"mapped",[1490,1468]],[[64307,64307],"mapped",[1491,1468]],[[64308,64308],"mapped",[1492,1468]],[[64309,64309],"mapped",[1493,1468]],[[64310,64310],"mapped",[1494,1468]],[[64311,64311],"disallowed"],[[64312,64312],"mapped",[1496,1468]],[[64313,64313],"mapped",[1497,1468]],[[64314,64314],"mapped",[1498,1468]],[[64315,64315],"mapped",[1499,1468]],[[64316,64316],"mapped",[1500,1468]],[[64317,64317],"disallowed"],[[64318,64318],"mapped",[1502,1468]],[[64319,64319],"disallowed"],[[64320,64320],"mapped",[1504,1468]],[[64321,64321],"mapped",[1505,1468]],[[64322,64322],"disallowed"],[[64323,64323],"mapped",[1507,1468]],[[64324,64324],"mapped",[1508,1468]],[[64325,64325],"disallowed"],[[64326,64326],"mapped",[1510,1468]],[[64327,64327],"mapped",[1511,1468]],[[64328,64328],"mapped",[1512,1468]],[[64329,64329],"mapped",[1513,1468]],[[64330,64330],"mapped",[1514,1468]],[[64331,64331],"mapped",[1493,1465]],[[64332,64332],"mapped",[1489,1471]],[[64333,64333],"mapped",[1499,1471]],[[64334,64334],"mapped",[1508,1471]],[[64335,64335],"mapped",[1488,1500]],[[64336,64337],"mapped",[1649]],[[64338,64341],"mapped",[1659]],[[64342,64345],"mapped",[1662]],[[64346,64349],"mapped",[1664]],[[64350,64353],"mapped",[1658]],[[64354,64357],"mapped",[1663]],[[64358,64361],"mapped",[1657]],[[64362,64365],"mapped",[1700]],[[64366,64369],"mapped",[1702]],[[64370,64373],"mapped",[1668]],[[64374,64377],"mapped",[1667]],[[64378,64381],"mapped",[1670]],[[64382,64385],"mapped",[1671]],[[64386,64387],"mapped",[1677]],[[64388,64389],"mapped",[1676]],[[64390,64391],"mapped",[1678]],[[64392,64393],"mapped",[1672]],[[64394,64395],"mapped",[1688]],[[64396,64397],"mapped",[1681]],[[64398,64401],"mapped",[1705]],[[64402,64405],"mapped",[1711]],[[64406,64409],"mapped",[1715]],[[64410,64413],"mapped",[1713]],[[64414,64415],"mapped",[1722]],[[64416,64419],"mapped",[1723]],[[64420,64421],"mapped",[1728]],[[64422,64425],"mapped",[1729]],[[64426,64429],"mapped",[1726]],[[64430,64431],"mapped",[1746]],[[64432,64433],"mapped",[1747]],[[64434,64449],"valid",[],"NV8"],[[64450,64466],"disallowed"],[[64467,64470],"mapped",[1709]],[[64471,64472],"mapped",[1735]],[[64473,64474],"mapped",[1734]],[[64475,64476],"mapped",[1736]],[[64477,64477],"mapped",[1735,1652]],[[64478,64479],"mapped",[1739]],[[64480,64481],"mapped",[1733]],[[64482,64483],"mapped",[1737]],[[64484,64487],"mapped",[1744]],[[64488,64489],"mapped",[1609]],[[64490,64491],"mapped",[1574,1575]],[[64492,64493],"mapped",[1574,1749]],[[64494,64495],"mapped",[1574,1608]],[[64496,64497],"mapped",[1574,1735]],[[64498,64499],"mapped",[1574,1734]],[[64500,64501],"mapped",[1574,1736]],[[64502,64504],"mapped",[1574,1744]],[[64505,64507],"mapped",[1574,1609]],[[64508,64511],"mapped",[1740]],[[64512,64512],"mapped",[1574,1580]],[[64513,64513],"mapped",[1574,1581]],[[64514,64514],"mapped",[1574,1605]],[[64515,64515],"mapped",[1574,1609]],[[64516,64516],"mapped",[1574,1610]],[[64517,64517],"mapped",[1576,1580]],[[64518,64518],"mapped",[1576,1581]],[[64519,64519],"mapped",[1576,1582]],[[64520,64520],"mapped",[1576,1605]],[[64521,64521],"mapped",[1576,1609]],[[64522,64522],"mapped",[1576,1610]],[[64523,64523],"mapped",[1578,1580]],[[64524,64524],"mapped",[1578,1581]],[[64525,64525],"mapped",[1578,1582]],[[64526,64526],"mapped",[1578,1605]],[[64527,64527],"mapped",[1578,1609]],[[64528,64528],"mapped",[1578,1610]],[[64529,64529],"mapped",[1579,1580]],[[64530,64530],"mapped",[1579,1605]],[[64531,64531],"mapped",[1579,1609]],[[64532,64532],"mapped",[1579,1610]],[[64533,64533],"mapped",[1580,1581]],[[64534,64534],"mapped",[1580,1605]],[[64535,64535],"mapped",[1581,1580]],[[64536,64536],"mapped",[1581,1605]],[[64537,64537],"mapped",[1582,1580]],[[64538,64538],"mapped",[1582,1581]],[[64539,64539],"mapped",[1582,1605]],[[64540,64540],"mapped",[1587,1580]],[[64541,64541],"mapped",[1587,1581]],[[64542,64542],"mapped",[1587,1582]],[[64543,64543],"mapped",[1587,1605]],[[64544,64544],"mapped",[1589,1581]],[[64545,64545],"mapped",[1589,1605]],[[64546,64546],"mapped",[1590,1580]],[[64547,64547],"mapped",[1590,1581]],[[64548,64548],"mapped",[1590,1582]],[[64549,64549],"mapped",[1590,1605]],[[64550,64550],"mapped",[1591,1581]],[[64551,64551],"mapped",[1591,1605]],[[64552,64552],"mapped",[1592,1605]],[[64553,64553],"mapped",[1593,1580]],[[64554,64554],"mapped",[1593,1605]],[[64555,64555],"mapped",[1594,1580]],[[64556,64556],"mapped",[1594,1605]],[[64557,64557],"mapped",[1601,1580]],[[64558,64558],"mapped",[1601,1581]],[[64559,64559],"mapped",[1601,1582]],[[64560,64560],"mapped",[1601,1605]],[[64561,64561],"mapped",[1601,1609]],[[64562,64562],"mapped",[1601,1610]],[[64563,64563],"mapped",[1602,1581]],[[64564,64564],"mapped",[1602,1605]],[[64565,64565],"mapped",[1602,1609]],[[64566,64566],"mapped",[1602,1610]],[[64567,64567],"mapped",[1603,1575]],[[64568,64568],"mapped",[1603,1580]],[[64569,64569],"mapped",[1603,1581]],[[64570,64570],"mapped",[1603,1582]],[[64571,64571],"mapped",[1603,1604]],[[64572,64572],"mapped",[1603,1605]],[[64573,64573],"mapped",[1603,1609]],[[64574,64574],"mapped",[1603,1610]],[[64575,64575],"mapped",[1604,1580]],[[64576,64576],"mapped",[1604,1581]],[[64577,64577],"mapped",[1604,1582]],[[64578,64578],"mapped",[1604,1605]],[[64579,64579],"mapped",[1604,1609]],[[64580,64580],"mapped",[1604,1610]],[[64581,64581],"mapped",[1605,1580]],[[64582,64582],"mapped",[1605,1581]],[[64583,64583],"mapped",[1605,1582]],[[64584,64584],"mapped",[1605,1605]],[[64585,64585],"mapped",[1605,1609]],[[64586,64586],"mapped",[1605,1610]],[[64587,64587],"mapped",[1606,1580]],[[64588,64588],"mapped",[1606,1581]],[[64589,64589],"mapped",[1606,1582]],[[64590,64590],"mapped",[1606,1605]],[[64591,64591],"mapped",[1606,1609]],[[64592,64592],"mapped",[1606,1610]],[[64593,64593],"mapped",[1607,1580]],[[64594,64594],"mapped",[1607,1605]],[[64595,64595],"mapped",[1607,1609]],[[64596,64596],"mapped",[1607,1610]],[[64597,64597],"mapped",[1610,1580]],[[64598,64598],"mapped",[1610,1581]],[[64599,64599],"mapped",[1610,1582]],[[64600,64600],"mapped",[1610,1605]],[[64601,64601],"mapped",[1610,1609]],[[64602,64602],"mapped",[1610,1610]],[[64603,64603],"mapped",[1584,1648]],[[64604,64604],"mapped",[1585,1648]],[[64605,64605],"mapped",[1609,1648]],[[64606,64606],"disallowed_STD3_mapped",[32,1612,1617]],[[64607,64607],"disallowed_STD3_mapped",[32,1613,1617]],[[64608,64608],"disallowed_STD3_mapped",[32,1614,1617]],[[64609,64609],"disallowed_STD3_mapped",[32,1615,1617]],[[64610,64610],"disallowed_STD3_mapped",[32,1616,1617]],[[64611,64611],"disallowed_STD3_mapped",[32,1617,1648]],[[64612,64612],"mapped",[1574,1585]],[[64613,64613],"mapped",[1574,1586]],[[64614,64614],"mapped",[1574,1605]],[[64615,64615],"mapped",[1574,1606]],[[64616,64616],"mapped",[1574,1609]],[[64617,64617],"mapped",[1574,1610]],[[64618,64618],"mapped",[1576,1585]],[[64619,64619],"mapped",[1576,1586]],[[64620,64620],"mapped",[1576,1605]],[[64621,64621],"mapped",[1576,1606]],[[64622,64622],"mapped",[1576,1609]],[[64623,64623],"mapped",[1576,1610]],[[64624,64624],"mapped",[1578,1585]],[[64625,64625],"mapped",[1578,1586]],[[64626,64626],"mapped",[1578,1605]],[[64627,64627],"mapped",[1578,1606]],[[64628,64628],"mapped",[1578,1609]],[[64629,64629],"mapped",[1578,1610]],[[64630,64630],"mapped",[1579,1585]],[[64631,64631],"mapped",[1579,1586]],[[64632,64632],"mapped",[1579,1605]],[[64633,64633],"mapped",[1579,1606]],[[64634,64634],"mapped",[1579,1609]],[[64635,64635],"mapped",[1579,1610]],[[64636,64636],"mapped",[1601,1609]],[[64637,64637],"mapped",[1601,1610]],[[64638,64638],"mapped",[1602,1609]],[[64639,64639],"mapped",[1602,1610]],[[64640,64640],"mapped",[1603,1575]],[[64641,64641],"mapped",[1603,1604]],[[64642,64642],"mapped",[1603,1605]],[[64643,64643],"mapped",[1603,1609]],[[64644,64644],"mapped",[1603,1610]],[[64645,64645],"mapped",[1604,1605]],[[64646,64646],"mapped",[1604,1609]],[[64647,64647],"mapped",[1604,1610]],[[64648,64648],"mapped",[1605,1575]],[[64649,64649],"mapped",[1605,1605]],[[64650,64650],"mapped",[1606,1585]],[[64651,64651],"mapped",[1606,1586]],[[64652,64652],"mapped",[1606,1605]],[[64653,64653],"mapped",[1606,1606]],[[64654,64654],"mapped",[1606,1609]],[[64655,64655],"mapped",[1606,1610]],[[64656,64656],"mapped",[1609,1648]],[[64657,64657],"mapped",[1610,1585]],[[64658,64658],"mapped",[1610,1586]],[[64659,64659],"mapped",[1610,1605]],[[64660,64660],"mapped",[1610,1606]],[[64661,64661],"mapped",[1610,1609]],[[64662,64662],"mapped",[1610,1610]],[[64663,64663],"mapped",[1574,1580]],[[64664,64664],"mapped",[1574,1581]],[[64665,64665],"mapped",[1574,1582]],[[64666,64666],"mapped",[1574,1605]],[[64667,64667],"mapped",[1574,1607]],[[64668,64668],"mapped",[1576,1580]],[[64669,64669],"mapped",[1576,1581]],[[64670,64670],"mapped",[1576,1582]],[[64671,64671],"mapped",[1576,1605]],[[64672,64672],"mapped",[1576,1607]],[[64673,64673],"mapped",[1578,1580]],[[64674,64674],"mapped",[1578,1581]],[[64675,64675],"mapped",[1578,1582]],[[64676,64676],"mapped",[1578,1605]],[[64677,64677],"mapped",[1578,1607]],[[64678,64678],"mapped",[1579,1605]],[[64679,64679],"mapped",[1580,1581]],[[64680,64680],"mapped",[1580,1605]],[[64681,64681],"mapped",[1581,1580]],[[64682,64682],"mapped",[1581,1605]],[[64683,64683],"mapped",[1582,1580]],[[64684,64684],"mapped",[1582,1605]],[[64685,64685],"mapped",[1587,1580]],[[64686,64686],"mapped",[1587,1581]],[[64687,64687],"mapped",[1587,1582]],[[64688,64688],"mapped",[1587,1605]],[[64689,64689],"mapped",[1589,1581]],[[64690,64690],"mapped",[1589,1582]],[[64691,64691],"mapped",[1589,1605]],[[64692,64692],"mapped",[1590,1580]],[[64693,64693],"mapped",[1590,1581]],[[64694,64694],"mapped",[1590,1582]],[[64695,64695],"mapped",[1590,1605]],[[64696,64696],"mapped",[1591,1581]],[[64697,64697],"mapped",[1592,1605]],[[64698,64698],"mapped",[1593,1580]],[[64699,64699],"mapped",[1593,1605]],[[64700,64700],"mapped",[1594,1580]],[[64701,64701],"mapped",[1594,1605]],[[64702,64702],"mapped",[1601,1580]],[[64703,64703],"mapped",[1601,1581]],[[64704,64704],"mapped",[1601,1582]],[[64705,64705],"mapped",[1601,1605]],[[64706,64706],"mapped",[1602,1581]],[[64707,64707],"mapped",[1602,1605]],[[64708,64708],"mapped",[1603,1580]],[[64709,64709],"mapped",[1603,1581]],[[64710,64710],"mapped",[1603,1582]],[[64711,64711],"mapped",[1603,1604]],[[64712,64712],"mapped",[1603,1605]],[[64713,64713],"mapped",[1604,1580]],[[64714,64714],"mapped",[1604,1581]],[[64715,64715],"mapped",[1604,1582]],[[64716,64716],"mapped",[1604,1605]],[[64717,64717],"mapped",[1604,1607]],[[64718,64718],"mapped",[1605,1580]],[[64719,64719],"mapped",[1605,1581]],[[64720,64720],"mapped",[1605,1582]],[[64721,64721],"mapped",[1605,1605]],[[64722,64722],"mapped",[1606,1580]],[[64723,64723],"mapped",[1606,1581]],[[64724,64724],"mapped",[1606,1582]],[[64725,64725],"mapped",[1606,1605]],[[64726,64726],"mapped",[1606,1607]],[[64727,64727],"mapped",[1607,1580]],[[64728,64728],"mapped",[1607,1605]],[[64729,64729],"mapped",[1607,1648]],[[64730,64730],"mapped",[1610,1580]],[[64731,64731],"mapped",[1610,1581]],[[64732,64732],"mapped",[1610,1582]],[[64733,64733],"mapped",[1610,1605]],[[64734,64734],"mapped",[1610,1607]],[[64735,64735],"mapped",[1574,1605]],[[64736,64736],"mapped",[1574,1607]],[[64737,64737],"mapped",[1576,1605]],[[64738,64738],"mapped",[1576,1607]],[[64739,64739],"mapped",[1578,1605]],[[64740,64740],"mapped",[1578,1607]],[[64741,64741],"mapped",[1579,1605]],[[64742,64742],"mapped",[1579,1607]],[[64743,64743],"mapped",[1587,1605]],[[64744,64744],"mapped",[1587,1607]],[[64745,64745],"mapped",[1588,1605]],[[64746,64746],"mapped",[1588,1607]],[[64747,64747],"mapped",[1603,1604]],[[64748,64748],"mapped",[1603,1605]],[[64749,64749],"mapped",[1604,1605]],[[64750,64750],"mapped",[1606,1605]],[[64751,64751],"mapped",[1606,1607]],[[64752,64752],"mapped",[1610,1605]],[[64753,64753],"mapped",[1610,1607]],[[64754,64754],"mapped",[1600,1614,1617]],[[64755,64755],"mapped",[1600,1615,1617]],[[64756,64756],"mapped",[1600,1616,1617]],[[64757,64757],"mapped",[1591,1609]],[[64758,64758],"mapped",[1591,1610]],[[64759,64759],"mapped",[1593,1609]],[[64760,64760],"mapped",[1593,1610]],[[64761,64761],"mapped",[1594,1609]],[[64762,64762],"mapped",[1594,1610]],[[64763,64763],"mapped",[1587,1609]],[[64764,64764],"mapped",[1587,1610]],[[64765,64765],"mapped",[1588,1609]],[[64766,64766],"mapped",[1588,1610]],[[64767,64767],"mapped",[1581,1609]],[[64768,64768],"mapped",[1581,1610]],[[64769,64769],"mapped",[1580,1609]],[[64770,64770],"mapped",[1580,1610]],[[64771,64771],"mapped",[1582,1609]],[[64772,64772],"mapped",[1582,1610]],[[64773,64773],"mapped",[1589,1609]],[[64774,64774],"mapped",[1589,1610]],[[64775,64775],"mapped",[1590,1609]],[[64776,64776],"mapped",[1590,1610]],[[64777,64777],"mapped",[1588,1580]],[[64778,64778],"mapped",[1588,1581]],[[64779,64779],"mapped",[1588,1582]],[[64780,64780],"mapped",[1588,1605]],[[64781,64781],"mapped",[1588,1585]],[[64782,64782],"mapped",[1587,1585]],[[64783,64783],"mapped",[1589,1585]],[[64784,64784],"mapped",[1590,1585]],[[64785,64785],"mapped",[1591,1609]],[[64786,64786],"mapped",[1591,1610]],[[64787,64787],"mapped",[1593,1609]],[[64788,64788],"mapped",[1593,1610]],[[64789,64789],"mapped",[1594,1609]],[[64790,64790],"mapped",[1594,1610]],[[64791,64791],"mapped",[1587,1609]],[[64792,64792],"mapped",[1587,1610]],[[64793,64793],"mapped",[1588,1609]],[[64794,64794],"mapped",[1588,1610]],[[64795,64795],"mapped",[1581,1609]],[[64796,64796],"mapped",[1581,1610]],[[64797,64797],"mapped",[1580,1609]],[[64798,64798],"mapped",[1580,1610]],[[64799,64799],"mapped",[1582,1609]],[[64800,64800],"mapped",[1582,1610]],[[64801,64801],"mapped",[1589,1609]],[[64802,64802],"mapped",[1589,1610]],[[64803,64803],"mapped",[1590,1609]],[[64804,64804],"mapped",[1590,1610]],[[64805,64805],"mapped",[1588,1580]],[[64806,64806],"mapped",[1588,1581]],[[64807,64807],"mapped",[1588,1582]],[[64808,64808],"mapped",[1588,1605]],[[64809,64809],"mapped",[1588,1585]],[[64810,64810],"mapped",[1587,1585]],[[64811,64811],"mapped",[1589,1585]],[[64812,64812],"mapped",[1590,1585]],[[64813,64813],"mapped",[1588,1580]],[[64814,64814],"mapped",[1588,1581]],[[64815,64815],"mapped",[1588,1582]],[[64816,64816],"mapped",[1588,1605]],[[64817,64817],"mapped",[1587,1607]],[[64818,64818],"mapped",[1588,1607]],[[64819,64819],"mapped",[1591,1605]],[[64820,64820],"mapped",[1587,1580]],[[64821,64821],"mapped",[1587,1581]],[[64822,64822],"mapped",[1587,1582]],[[64823,64823],"mapped",[1588,1580]],[[64824,64824],"mapped",[1588,1581]],[[64825,64825],"mapped",[1588,1582]],[[64826,64826],"mapped",[1591,1605]],[[64827,64827],"mapped",[1592,1605]],[[64828,64829],"mapped",[1575,1611]],[[64830,64831],"valid",[],"NV8"],[[64832,64847],"disallowed"],[[64848,64848],"mapped",[1578,1580,1605]],[[64849,64850],"mapped",[1578,1581,1580]],[[64851,64851],"mapped",[1578,1581,1605]],[[64852,64852],"mapped",[1578,1582,1605]],[[64853,64853],"mapped",[1578,1605,1580]],[[64854,64854],"mapped",[1578,1605,1581]],[[64855,64855],"mapped",[1578,1605,1582]],[[64856,64857],"mapped",[1580,1605,1581]],[[64858,64858],"mapped",[1581,1605,1610]],[[64859,64859],"mapped",[1581,1605,1609]],[[64860,64860],"mapped",[1587,1581,1580]],[[64861,64861],"mapped",[1587,1580,1581]],[[64862,64862],"mapped",[1587,1580,1609]],[[64863,64864],"mapped",[1587,1605,1581]],[[64865,64865],"mapped",[1587,1605,1580]],[[64866,64867],"mapped",[1587,1605,1605]],[[64868,64869],"mapped",[1589,1581,1581]],[[64870,64870],"mapped",[1589,1605,1605]],[[64871,64872],"mapped",[1588,1581,1605]],[[64873,64873],"mapped",[1588,1580,1610]],[[64874,64875],"mapped",[1588,1605,1582]],[[64876,64877],"mapped",[1588,1605,1605]],[[64878,64878],"mapped",[1590,1581,1609]],[[64879,64880],"mapped",[1590,1582,1605]],[[64881,64882],"mapped",[1591,1605,1581]],[[64883,64883],"mapped",[1591,1605,1605]],[[64884,64884],"mapped",[1591,1605,1610]],[[64885,64885],"mapped",[1593,1580,1605]],[[64886,64887],"mapped",[1593,1605,1605]],[[64888,64888],"mapped",[1593,1605,1609]],[[64889,64889],"mapped",[1594,1605,1605]],[[64890,64890],"mapped",[1594,1605,1610]],[[64891,64891],"mapped",[1594,1605,1609]],[[64892,64893],"mapped",[1601,1582,1605]],[[64894,64894],"mapped",[1602,1605,1581]],[[64895,64895],"mapped",[1602,1605,1605]],[[64896,64896],"mapped",[1604,1581,1605]],[[64897,64897],"mapped",[1604,1581,1610]],[[64898,64898],"mapped",[1604,1581,1609]],[[64899,64900],"mapped",[1604,1580,1580]],[[64901,64902],"mapped",[1604,1582,1605]],[[64903,64904],"mapped",[1604,1605,1581]],[[64905,64905],"mapped",[1605,1581,1580]],[[64906,64906],"mapped",[1605,1581,1605]],[[64907,64907],"mapped",[1605,1581,1610]],[[64908,64908],"mapped",[1605,1580,1581]],[[64909,64909],"mapped",[1605,1580,1605]],[[64910,64910],"mapped",[1605,1582,1580]],[[64911,64911],"mapped",[1605,1582,1605]],[[64912,64913],"disallowed"],[[64914,64914],"mapped",[1605,1580,1582]],[[64915,64915],"mapped",[1607,1605,1580]],[[64916,64916],"mapped",[1607,1605,1605]],[[64917,64917],"mapped",[1606,1581,1605]],[[64918,64918],"mapped",[1606,1581,1609]],[[64919,64920],"mapped",[1606,1580,1605]],[[64921,64921],"mapped",[1606,1580,1609]],[[64922,64922],"mapped",[1606,1605,1610]],[[64923,64923],"mapped",[1606,1605,1609]],[[64924,64925],"mapped",[1610,1605,1605]],[[64926,64926],"mapped",[1576,1582,1610]],[[64927,64927],"mapped",[1578,1580,1610]],[[64928,64928],"mapped",[1578,1580,1609]],[[64929,64929],"mapped",[1578,1582,1610]],[[64930,64930],"mapped",[1578,1582,1609]],[[64931,64931],"mapped",[1578,1605,1610]],[[64932,64932],"mapped",[1578,1605,1609]],[[64933,64933],"mapped",[1580,1605,1610]],[[64934,64934],"mapped",[1580,1581,1609]],[[64935,64935],"mapped",[1580,1605,1609]],[[64936,64936],"mapped",[1587,1582,1609]],[[64937,64937],"mapped",[1589,1581,1610]],[[64938,64938],"mapped",[1588,1581,1610]],[[64939,64939],"mapped",[1590,1581,1610]],[[64940,64940],"mapped",[1604,1580,1610]],[[64941,64941],"mapped",[1604,1605,1610]],[[64942,64942],"mapped",[1610,1581,1610]],[[64943,64943],"mapped",[1610,1580,1610]],[[64944,64944],"mapped",[1610,1605,1610]],[[64945,64945],"mapped",[1605,1605,1610]],[[64946,64946],"mapped",[1602,1605,1610]],[[64947,64947],"mapped",[1606,1581,1610]],[[64948,64948],"mapped",[1602,1605,1581]],[[64949,64949],"mapped",[1604,1581,1605]],[[64950,64950],"mapped",[1593,1605,1610]],[[64951,64951],"mapped",[1603,1605,1610]],[[64952,64952],"mapped",[1606,1580,1581]],[[64953,64953],"mapped",[1605,1582,1610]],[[64954,64954],"mapped",[1604,1580,1605]],[[64955,64955],"mapped",[1603,1605,1605]],[[64956,64956],"mapped",[1604,1580,1605]],[[64957,64957],"mapped",[1606,1580,1581]],[[64958,64958],"mapped",[1580,1581,1610]],[[64959,64959],"mapped",[1581,1580,1610]],[[64960,64960],"mapped",[1605,1580,1610]],[[64961,64961],"mapped",[1601,1605,1610]],[[64962,64962],"mapped",[1576,1581,1610]],[[64963,64963],"mapped",[1603,1605,1605]],[[64964,64964],"mapped",[1593,1580,1605]],[[64965,64965],"mapped",[1589,1605,1605]],[[64966,64966],"mapped",[1587,1582,1610]],[[64967,64967],"mapped",[1606,1580,1610]],[[64968,64975],"disallowed"],[[64976,65007],"disallowed"],[[65008,65008],"mapped",[1589,1604,1746]],[[65009,65009],"mapped",[1602,1604,1746]],[[65010,65010],"mapped",[1575,1604,1604,1607]],[[65011,65011],"mapped",[1575,1603,1576,1585]],[[65012,65012],"mapped",[1605,1581,1605,1583]],[[65013,65013],"mapped",[1589,1604,1593,1605]],[[65014,65014],"mapped",[1585,1587,1608,1604]],[[65015,65015],"mapped",[1593,1604,1610,1607]],[[65016,65016],"mapped",[1608,1587,1604,1605]],[[65017,65017],"mapped",[1589,1604,1609]],[[65018,65018],"disallowed_STD3_mapped",[1589,1604,1609,32,1575,1604,1604,1607,32,1593,1604,1610,1607,32,1608,1587,1604,1605]],[[65019,65019],"disallowed_STD3_mapped",[1580,1604,32,1580,1604,1575,1604,1607]],[[65020,65020],"mapped",[1585,1740,1575,1604]],[[65021,65021],"valid",[],"NV8"],[[65022,65023],"disallowed"],[[65024,65039],"ignored"],[[65040,65040],"disallowed_STD3_mapped",[44]],[[65041,65041],"mapped",[12289]],[[65042,65042],"disallowed"],[[65043,65043],"disallowed_STD3_mapped",[58]],[[65044,65044],"disallowed_STD3_mapped",[59]],[[65045,65045],"disallowed_STD3_mapped",[33]],[[65046,65046],"disallowed_STD3_mapped",[63]],[[65047,65047],"mapped",[12310]],[[65048,65048],"mapped",[12311]],[[65049,65049],"disallowed"],[[65050,65055],"disallowed"],[[65056,65059],"valid"],[[65060,65062],"valid"],[[65063,65069],"valid"],[[65070,65071],"valid"],[[65072,65072],"disallowed"],[[65073,65073],"mapped",[8212]],[[65074,65074],"mapped",[8211]],[[65075,65076],"disallowed_STD3_mapped",[95]],[[65077,65077],"disallowed_STD3_mapped",[40]],[[65078,65078],"disallowed_STD3_mapped",[41]],[[65079,65079],"disallowed_STD3_mapped",[123]],[[65080,65080],"disallowed_STD3_mapped",[125]],[[65081,65081],"mapped",[12308]],[[65082,65082],"mapped",[12309]],[[65083,65083],"mapped",[12304]],[[65084,65084],"mapped",[12305]],[[65085,65085],"mapped",[12298]],[[65086,65086],"mapped",[12299]],[[65087,65087],"mapped",[12296]],[[65088,65088],"mapped",[12297]],[[65089,65089],"mapped",[12300]],[[65090,65090],"mapped",[12301]],[[65091,65091],"mapped",[12302]],[[65092,65092],"mapped",[12303]],[[65093,65094],"valid",[],"NV8"],[[65095,65095],"disallowed_STD3_mapped",[91]],[[65096,65096],"disallowed_STD3_mapped",[93]],[[65097,65100],"disallowed_STD3_mapped",[32,773]],[[65101,65103],"disallowed_STD3_mapped",[95]],[[65104,65104],"disallowed_STD3_mapped",[44]],[[65105,65105],"mapped",[12289]],[[65106,65106],"disallowed"],[[65107,65107],"disallowed"],[[65108,65108],"disallowed_STD3_mapped",[59]],[[65109,65109],"disallowed_STD3_mapped",[58]],[[65110,65110],"disallowed_STD3_mapped",[63]],[[65111,65111],"disallowed_STD3_mapped",[33]],[[65112,65112],"mapped",[8212]],[[65113,65113],"disallowed_STD3_mapped",[40]],[[65114,65114],"disallowed_STD3_mapped",[41]],[[65115,65115],"disallowed_STD3_mapped",[123]],[[65116,65116],"disallowed_STD3_mapped",[125]],[[65117,65117],"mapped",[12308]],[[65118,65118],"mapped",[12309]],[[65119,65119],"disallowed_STD3_mapped",[35]],[[65120,65120],"disallowed_STD3_mapped",[38]],[[65121,65121],"disallowed_STD3_mapped",[42]],[[65122,65122],"disallowed_STD3_mapped",[43]],[[65123,65123],"mapped",[45]],[[65124,65124],"disallowed_STD3_mapped",[60]],[[65125,65125],"disallowed_STD3_mapped",[62]],[[65126,65126],"disallowed_STD3_mapped",[61]],[[65127,65127],"disallowed"],[[65128,65128],"disallowed_STD3_mapped",[92]],[[65129,65129],"disallowed_STD3_mapped",[36]],[[65130,65130],"disallowed_STD3_mapped",[37]],[[65131,65131],"disallowed_STD3_mapped",[64]],[[65132,65135],"disallowed"],[[65136,65136],"disallowed_STD3_mapped",[32,1611]],[[65137,65137],"mapped",[1600,1611]],[[65138,65138],"disallowed_STD3_mapped",[32,1612]],[[65139,65139],"valid"],[[65140,65140],"disallowed_STD3_mapped",[32,1613]],[[65141,65141],"disallowed"],[[65142,65142],"disallowed_STD3_mapped",[32,1614]],[[65143,65143],"mapped",[1600,1614]],[[65144,65144],"disallowed_STD3_mapped",[32,1615]],[[65145,65145],"mapped",[1600,1615]],[[65146,65146],"disallowed_STD3_mapped",[32,1616]],[[65147,65147],"mapped",[1600,1616]],[[65148,65148],"disallowed_STD3_mapped",[32,1617]],[[65149,65149],"mapped",[1600,1617]],[[65150,65150],"disallowed_STD3_mapped",[32,1618]],[[65151,65151],"mapped",[1600,1618]],[[65152,65152],"mapped",[1569]],[[65153,65154],"mapped",[1570]],[[65155,65156],"mapped",[1571]],[[65157,65158],"mapped",[1572]],[[65159,65160],"mapped",[1573]],[[65161,65164],"mapped",[1574]],[[65165,65166],"mapped",[1575]],[[65167,65170],"mapped",[1576]],[[65171,65172],"mapped",[1577]],[[65173,65176],"mapped",[1578]],[[65177,65180],"mapped",[1579]],[[65181,65184],"mapped",[1580]],[[65185,65188],"mapped",[1581]],[[65189,65192],"mapped",[1582]],[[65193,65194],"mapped",[1583]],[[65195,65196],"mapped",[1584]],[[65197,65198],"mapped",[1585]],[[65199,65200],"mapped",[1586]],[[65201,65204],"mapped",[1587]],[[65205,65208],"mapped",[1588]],[[65209,65212],"mapped",[1589]],[[65213,65216],"mapped",[1590]],[[65217,65220],"mapped",[1591]],[[65221,65224],"mapped",[1592]],[[65225,65228],"mapped",[1593]],[[65229,65232],"mapped",[1594]],[[65233,65236],"mapped",[1601]],[[65237,65240],"mapped",[1602]],[[65241,65244],"mapped",[1603]],[[65245,65248],"mapped",[1604]],[[65249,65252],"mapped",[1605]],[[65253,65256],"mapped",[1606]],[[65257,65260],"mapped",[1607]],[[65261,65262],"mapped",[1608]],[[65263,65264],"mapped",[1609]],[[65265,65268],"mapped",[1610]],[[65269,65270],"mapped",[1604,1570]],[[65271,65272],"mapped",[1604,1571]],[[65273,65274],"mapped",[1604,1573]],[[65275,65276],"mapped",[1604,1575]],[[65277,65278],"disallowed"],[[65279,65279],"ignored"],[[65280,65280],"disallowed"],[[65281,65281],"disallowed_STD3_mapped",[33]],[[65282,65282],"disallowed_STD3_mapped",[34]],[[65283,65283],"disallowed_STD3_mapped",[35]],[[65284,65284],"disallowed_STD3_mapped",[36]],[[65285,65285],"disallowed_STD3_mapped",[37]],[[65286,65286],"disallowed_STD3_mapped",[38]],[[65287,65287],"disallowed_STD3_mapped",[39]],[[65288,65288],"disallowed_STD3_mapped",[40]],[[65289,65289],"disallowed_STD3_mapped",[41]],[[65290,65290],"disallowed_STD3_mapped",[42]],[[65291,65291],"disallowed_STD3_mapped",[43]],[[65292,65292],"disallowed_STD3_mapped",[44]],[[65293,65293],"mapped",[45]],[[65294,65294],"mapped",[46]],[[65295,65295],"disallowed_STD3_mapped",[47]],[[65296,65296],"mapped",[48]],[[65297,65297],"mapped",[49]],[[65298,65298],"mapped",[50]],[[65299,65299],"mapped",[51]],[[65300,65300],"mapped",[52]],[[65301,65301],"mapped",[53]],[[65302,65302],"mapped",[54]],[[65303,65303],"mapped",[55]],[[65304,65304],"mapped",[56]],[[65305,65305],"mapped",[57]],[[65306,65306],"disallowed_STD3_mapped",[58]],[[65307,65307],"disallowed_STD3_mapped",[59]],[[65308,65308],"disallowed_STD3_mapped",[60]],[[65309,65309],"disallowed_STD3_mapped",[61]],[[65310,65310],"disallowed_STD3_mapped",[62]],[[65311,65311],"disallowed_STD3_mapped",[63]],[[65312,65312],"disallowed_STD3_mapped",[64]],[[65313,65313],"mapped",[97]],[[65314,65314],"mapped",[98]],[[65315,65315],"mapped",[99]],[[65316,65316],"mapped",[100]],[[65317,65317],"mapped",[101]],[[65318,65318],"mapped",[102]],[[65319,65319],"mapped",[103]],[[65320,65320],"mapped",[104]],[[65321,65321],"mapped",[105]],[[65322,65322],"mapped",[106]],[[65323,65323],"mapped",[107]],[[65324,65324],"mapped",[108]],[[65325,65325],"mapped",[109]],[[65326,65326],"mapped",[110]],[[65327,65327],"mapped",[111]],[[65328,65328],"mapped",[112]],[[65329,65329],"mapped",[113]],[[65330,65330],"mapped",[114]],[[65331,65331],"mapped",[115]],[[65332,65332],"mapped",[116]],[[65333,65333],"mapped",[117]],[[65334,65334],"mapped",[118]],[[65335,65335],"mapped",[119]],[[65336,65336],"mapped",[120]],[[65337,65337],"mapped",[121]],[[65338,65338],"mapped",[122]],[[65339,65339],"disallowed_STD3_mapped",[91]],[[65340,65340],"disallowed_STD3_mapped",[92]],[[65341,65341],"disallowed_STD3_mapped",[93]],[[65342,65342],"disallowed_STD3_mapped",[94]],[[65343,65343],"disallowed_STD3_mapped",[95]],[[65344,65344],"disallowed_STD3_mapped",[96]],[[65345,65345],"mapped",[97]],[[65346,65346],"mapped",[98]],[[65347,65347],"mapped",[99]],[[65348,65348],"mapped",[100]],[[65349,65349],"mapped",[101]],[[65350,65350],"mapped",[102]],[[65351,65351],"mapped",[103]],[[65352,65352],"mapped",[104]],[[65353,65353],"mapped",[105]],[[65354,65354],"mapped",[106]],[[65355,65355],"mapped",[107]],[[65356,65356],"mapped",[108]],[[65357,65357],"mapped",[109]],[[65358,65358],"mapped",[110]],[[65359,65359],"mapped",[111]],[[65360,65360],"mapped",[112]],[[65361,65361],"mapped",[113]],[[65362,65362],"mapped",[114]],[[65363,65363],"mapped",[115]],[[65364,65364],"mapped",[116]],[[65365,65365],"mapped",[117]],[[65366,65366],"mapped",[118]],[[65367,65367],"mapped",[119]],[[65368,65368],"mapped",[120]],[[65369,65369],"mapped",[121]],[[65370,65370],"mapped",[122]],[[65371,65371],"disallowed_STD3_mapped",[123]],[[65372,65372],"disallowed_STD3_mapped",[124]],[[65373,65373],"disallowed_STD3_mapped",[125]],[[65374,65374],"disallowed_STD3_mapped",[126]],[[65375,65375],"mapped",[10629]],[[65376,65376],"mapped",[10630]],[[65377,65377],"mapped",[46]],[[65378,65378],"mapped",[12300]],[[65379,65379],"mapped",[12301]],[[65380,65380],"mapped",[12289]],[[65381,65381],"mapped",[12539]],[[65382,65382],"mapped",[12530]],[[65383,65383],"mapped",[12449]],[[65384,65384],"mapped",[12451]],[[65385,65385],"mapped",[12453]],[[65386,65386],"mapped",[12455]],[[65387,65387],"mapped",[12457]],[[65388,65388],"mapped",[12515]],[[65389,65389],"mapped",[12517]],[[65390,65390],"mapped",[12519]],[[65391,65391],"mapped",[12483]],[[65392,65392],"mapped",[12540]],[[65393,65393],"mapped",[12450]],[[65394,65394],"mapped",[12452]],[[65395,65395],"mapped",[12454]],[[65396,65396],"mapped",[12456]],[[65397,65397],"mapped",[12458]],[[65398,65398],"mapped",[12459]],[[65399,65399],"mapped",[12461]],[[65400,65400],"mapped",[12463]],[[65401,65401],"mapped",[12465]],[[65402,65402],"mapped",[12467]],[[65403,65403],"mapped",[12469]],[[65404,65404],"mapped",[12471]],[[65405,65405],"mapped",[12473]],[[65406,65406],"mapped",[12475]],[[65407,65407],"mapped",[12477]],[[65408,65408],"mapped",[12479]],[[65409,65409],"mapped",[12481]],[[65410,65410],"mapped",[12484]],[[65411,65411],"mapped",[12486]],[[65412,65412],"mapped",[12488]],[[65413,65413],"mapped",[12490]],[[65414,65414],"mapped",[12491]],[[65415,65415],"mapped",[12492]],[[65416,65416],"mapped",[12493]],[[65417,65417],"mapped",[12494]],[[65418,65418],"mapped",[12495]],[[65419,65419],"mapped",[12498]],[[65420,65420],"mapped",[12501]],[[65421,65421],"mapped",[12504]],[[65422,65422],"mapped",[12507]],[[65423,65423],"mapped",[12510]],[[65424,65424],"mapped",[12511]],[[65425,65425],"mapped",[12512]],[[65426,65426],"mapped",[12513]],[[65427,65427],"mapped",[12514]],[[65428,65428],"mapped",[12516]],[[65429,65429],"mapped",[12518]],[[65430,65430],"mapped",[12520]],[[65431,65431],"mapped",[12521]],[[65432,65432],"mapped",[12522]],[[65433,65433],"mapped",[12523]],[[65434,65434],"mapped",[12524]],[[65435,65435],"mapped",[12525]],[[65436,65436],"mapped",[12527]],[[65437,65437],"mapped",[12531]],[[65438,65438],"mapped",[12441]],[[65439,65439],"mapped",[12442]],[[65440,65440],"disallowed"],[[65441,65441],"mapped",[4352]],[[65442,65442],"mapped",[4353]],[[65443,65443],"mapped",[4522]],[[65444,65444],"mapped",[4354]],[[65445,65445],"mapped",[4524]],[[65446,65446],"mapped",[4525]],[[65447,65447],"mapped",[4355]],[[65448,65448],"mapped",[4356]],[[65449,65449],"mapped",[4357]],[[65450,65450],"mapped",[4528]],[[65451,65451],"mapped",[4529]],[[65452,65452],"mapped",[4530]],[[65453,65453],"mapped",[4531]],[[65454,65454],"mapped",[4532]],[[65455,65455],"mapped",[4533]],[[65456,65456],"mapped",[4378]],[[65457,65457],"mapped",[4358]],[[65458,65458],"mapped",[4359]],[[65459,65459],"mapped",[4360]],[[65460,65460],"mapped",[4385]],[[65461,65461],"mapped",[4361]],[[65462,65462],"mapped",[4362]],[[65463,65463],"mapped",[4363]],[[65464,65464],"mapped",[4364]],[[65465,65465],"mapped",[4365]],[[65466,65466],"mapped",[4366]],[[65467,65467],"mapped",[4367]],[[65468,65468],"mapped",[4368]],[[65469,65469],"mapped",[4369]],[[65470,65470],"mapped",[4370]],[[65471,65473],"disallowed"],[[65474,65474],"mapped",[4449]],[[65475,65475],"mapped",[4450]],[[65476,65476],"mapped",[4451]],[[65477,65477],"mapped",[4452]],[[65478,65478],"mapped",[4453]],[[65479,65479],"mapped",[4454]],[[65480,65481],"disallowed"],[[65482,65482],"mapped",[4455]],[[65483,65483],"mapped",[4456]],[[65484,65484],"mapped",[4457]],[[65485,65485],"mapped",[4458]],[[65486,65486],"mapped",[4459]],[[65487,65487],"mapped",[4460]],[[65488,65489],"disallowed"],[[65490,65490],"mapped",[4461]],[[65491,65491],"mapped",[4462]],[[65492,65492],"mapped",[4463]],[[65493,65493],"mapped",[4464]],[[65494,65494],"mapped",[4465]],[[65495,65495],"mapped",[4466]],[[65496,65497],"disallowed"],[[65498,65498],"mapped",[4467]],[[65499,65499],"mapped",[4468]],[[65500,65500],"mapped",[4469]],[[65501,65503],"disallowed"],[[65504,65504],"mapped",[162]],[[65505,65505],"mapped",[163]],[[65506,65506],"mapped",[172]],[[65507,65507],"disallowed_STD3_mapped",[32,772]],[[65508,65508],"mapped",[166]],[[65509,65509],"mapped",[165]],[[65510,65510],"mapped",[8361]],[[65511,65511],"disallowed"],[[65512,65512],"mapped",[9474]],[[65513,65513],"mapped",[8592]],[[65514,65514],"mapped",[8593]],[[65515,65515],"mapped",[8594]],[[65516,65516],"mapped",[8595]],[[65517,65517],"mapped",[9632]],[[65518,65518],"mapped",[9675]],[[65519,65528],"disallowed"],[[65529,65531],"disallowed"],[[65532,65532],"disallowed"],[[65533,65533],"disallowed"],[[65534,65535],"disallowed"],[[65536,65547],"valid"],[[65548,65548],"disallowed"],[[65549,65574],"valid"],[[65575,65575],"disallowed"],[[65576,65594],"valid"],[[65595,65595],"disallowed"],[[65596,65597],"valid"],[[65598,65598],"disallowed"],[[65599,65613],"valid"],[[65614,65615],"disallowed"],[[65616,65629],"valid"],[[65630,65663],"disallowed"],[[65664,65786],"valid"],[[65787,65791],"disallowed"],[[65792,65794],"valid",[],"NV8"],[[65795,65798],"disallowed"],[[65799,65843],"valid",[],"NV8"],[[65844,65846],"disallowed"],[[65847,65855],"valid",[],"NV8"],[[65856,65930],"valid",[],"NV8"],[[65931,65932],"valid",[],"NV8"],[[65933,65935],"disallowed"],[[65936,65947],"valid",[],"NV8"],[[65948,65951],"disallowed"],[[65952,65952],"valid",[],"NV8"],[[65953,65999],"disallowed"],[[66000,66044],"valid",[],"NV8"],[[66045,66045],"valid"],[[66046,66175],"disallowed"],[[66176,66204],"valid"],[[66205,66207],"disallowed"],[[66208,66256],"valid"],[[66257,66271],"disallowed"],[[66272,66272],"valid"],[[66273,66299],"valid",[],"NV8"],[[66300,66303],"disallowed"],[[66304,66334],"valid"],[[66335,66335],"valid"],[[66336,66339],"valid",[],"NV8"],[[66340,66351],"disallowed"],[[66352,66368],"valid"],[[66369,66369],"valid",[],"NV8"],[[66370,66377],"valid"],[[66378,66378],"valid",[],"NV8"],[[66379,66383],"disallowed"],[[66384,66426],"valid"],[[66427,66431],"disallowed"],[[66432,66461],"valid"],[[66462,66462],"disallowed"],[[66463,66463],"valid",[],"NV8"],[[66464,66499],"valid"],[[66500,66503],"disallowed"],[[66504,66511],"valid"],[[66512,66517],"valid",[],"NV8"],[[66518,66559],"disallowed"],[[66560,66560],"mapped",[66600]],[[66561,66561],"mapped",[66601]],[[66562,66562],"mapped",[66602]],[[66563,66563],"mapped",[66603]],[[66564,66564],"mapped",[66604]],[[66565,66565],"mapped",[66605]],[[66566,66566],"mapped",[66606]],[[66567,66567],"mapped",[66607]],[[66568,66568],"mapped",[66608]],[[66569,66569],"mapped",[66609]],[[66570,66570],"mapped",[66610]],[[66571,66571],"mapped",[66611]],[[66572,66572],"mapped",[66612]],[[66573,66573],"mapped",[66613]],[[66574,66574],"mapped",[66614]],[[66575,66575],"mapped",[66615]],[[66576,66576],"mapped",[66616]],[[66577,66577],"mapped",[66617]],[[66578,66578],"mapped",[66618]],[[66579,66579],"mapped",[66619]],[[66580,66580],"mapped",[66620]],[[66581,66581],"mapped",[66621]],[[66582,66582],"mapped",[66622]],[[66583,66583],"mapped",[66623]],[[66584,66584],"mapped",[66624]],[[66585,66585],"mapped",[66625]],[[66586,66586],"mapped",[66626]],[[66587,66587],"mapped",[66627]],[[66588,66588],"mapped",[66628]],[[66589,66589],"mapped",[66629]],[[66590,66590],"mapped",[66630]],[[66591,66591],"mapped",[66631]],[[66592,66592],"mapped",[66632]],[[66593,66593],"mapped",[66633]],[[66594,66594],"mapped",[66634]],[[66595,66595],"mapped",[66635]],[[66596,66596],"mapped",[66636]],[[66597,66597],"mapped",[66637]],[[66598,66598],"mapped",[66638]],[[66599,66599],"mapped",[66639]],[[66600,66637],"valid"],[[66638,66717],"valid"],[[66718,66719],"disallowed"],[[66720,66729],"valid"],[[66730,66815],"disallowed"],[[66816,66855],"valid"],[[66856,66863],"disallowed"],[[66864,66915],"valid"],[[66916,66926],"disallowed"],[[66927,66927],"valid",[],"NV8"],[[66928,67071],"disallowed"],[[67072,67382],"valid"],[[67383,67391],"disallowed"],[[67392,67413],"valid"],[[67414,67423],"disallowed"],[[67424,67431],"valid"],[[67432,67583],"disallowed"],[[67584,67589],"valid"],[[67590,67591],"disallowed"],[[67592,67592],"valid"],[[67593,67593],"disallowed"],[[67594,67637],"valid"],[[67638,67638],"disallowed"],[[67639,67640],"valid"],[[67641,67643],"disallowed"],[[67644,67644],"valid"],[[67645,67646],"disallowed"],[[67647,67647],"valid"],[[67648,67669],"valid"],[[67670,67670],"disallowed"],[[67671,67679],"valid",[],"NV8"],[[67680,67702],"valid"],[[67703,67711],"valid",[],"NV8"],[[67712,67742],"valid"],[[67743,67750],"disallowed"],[[67751,67759],"valid",[],"NV8"],[[67760,67807],"disallowed"],[[67808,67826],"valid"],[[67827,67827],"disallowed"],[[67828,67829],"valid"],[[67830,67834],"disallowed"],[[67835,67839],"valid",[],"NV8"],[[67840,67861],"valid"],[[67862,67865],"valid",[],"NV8"],[[67866,67867],"valid",[],"NV8"],[[67868,67870],"disallowed"],[[67871,67871],"valid",[],"NV8"],[[67872,67897],"valid"],[[67898,67902],"disallowed"],[[67903,67903],"valid",[],"NV8"],[[67904,67967],"disallowed"],[[67968,68023],"valid"],[[68024,68027],"disallowed"],[[68028,68029],"valid",[],"NV8"],[[68030,68031],"valid"],[[68032,68047],"valid",[],"NV8"],[[68048,68049],"disallowed"],[[68050,68095],"valid",[],"NV8"],[[68096,68099],"valid"],[[68100,68100],"disallowed"],[[68101,68102],"valid"],[[68103,68107],"disallowed"],[[68108,68115],"valid"],[[68116,68116],"disallowed"],[[68117,68119],"valid"],[[68120,68120],"disallowed"],[[68121,68147],"valid"],[[68148,68151],"disallowed"],[[68152,68154],"valid"],[[68155,68158],"disallowed"],[[68159,68159],"valid"],[[68160,68167],"valid",[],"NV8"],[[68168,68175],"disallowed"],[[68176,68184],"valid",[],"NV8"],[[68185,68191],"disallowed"],[[68192,68220],"valid"],[[68221,68223],"valid",[],"NV8"],[[68224,68252],"valid"],[[68253,68255],"valid",[],"NV8"],[[68256,68287],"disallowed"],[[68288,68295],"valid"],[[68296,68296],"valid",[],"NV8"],[[68297,68326],"valid"],[[68327,68330],"disallowed"],[[68331,68342],"valid",[],"NV8"],[[68343,68351],"disallowed"],[[68352,68405],"valid"],[[68406,68408],"disallowed"],[[68409,68415],"valid",[],"NV8"],[[68416,68437],"valid"],[[68438,68439],"disallowed"],[[68440,68447],"valid",[],"NV8"],[[68448,68466],"valid"],[[68467,68471],"disallowed"],[[68472,68479],"valid",[],"NV8"],[[68480,68497],"valid"],[[68498,68504],"disallowed"],[[68505,68508],"valid",[],"NV8"],[[68509,68520],"disallowed"],[[68521,68527],"valid",[],"NV8"],[[68528,68607],"disallowed"],[[68608,68680],"valid"],[[68681,68735],"disallowed"],[[68736,68736],"mapped",[68800]],[[68737,68737],"mapped",[68801]],[[68738,68738],"mapped",[68802]],[[68739,68739],"mapped",[68803]],[[68740,68740],"mapped",[68804]],[[68741,68741],"mapped",[68805]],[[68742,68742],"mapped",[68806]],[[68743,68743],"mapped",[68807]],[[68744,68744],"mapped",[68808]],[[68745,68745],"mapped",[68809]],[[68746,68746],"mapped",[68810]],[[68747,68747],"mapped",[68811]],[[68748,68748],"mapped",[68812]],[[68749,68749],"mapped",[68813]],[[68750,68750],"mapped",[68814]],[[68751,68751],"mapped",[68815]],[[68752,68752],"mapped",[68816]],[[68753,68753],"mapped",[68817]],[[68754,68754],"mapped",[68818]],[[68755,68755],"mapped",[68819]],[[68756,68756],"mapped",[68820]],[[68757,68757],"mapped",[68821]],[[68758,68758],"mapped",[68822]],[[68759,68759],"mapped",[68823]],[[68760,68760],"mapped",[68824]],[[68761,68761],"mapped",[68825]],[[68762,68762],"mapped",[68826]],[[68763,68763],"mapped",[68827]],[[68764,68764],"mapped",[68828]],[[68765,68765],"mapped",[68829]],[[68766,68766],"mapped",[68830]],[[68767,68767],"mapped",[68831]],[[68768,68768],"mapped",[68832]],[[68769,68769],"mapped",[68833]],[[68770,68770],"mapped",[68834]],[[68771,68771],"mapped",[68835]],[[68772,68772],"mapped",[68836]],[[68773,68773],"mapped",[68837]],[[68774,68774],"mapped",[68838]],[[68775,68775],"mapped",[68839]],[[68776,68776],"mapped",[68840]],[[68777,68777],"mapped",[68841]],[[68778,68778],"mapped",[68842]],[[68779,68779],"mapped",[68843]],[[68780,68780],"mapped",[68844]],[[68781,68781],"mapped",[68845]],[[68782,68782],"mapped",[68846]],[[68783,68783],"mapped",[68847]],[[68784,68784],"mapped",[68848]],[[68785,68785],"mapped",[68849]],[[68786,68786],"mapped",[68850]],[[68787,68799],"disallowed"],[[68800,68850],"valid"],[[68851,68857],"disallowed"],[[68858,68863],"valid",[],"NV8"],[[68864,69215],"disallowed"],[[69216,69246],"valid",[],"NV8"],[[69247,69631],"disallowed"],[[69632,69702],"valid"],[[69703,69709],"valid",[],"NV8"],[[69710,69713],"disallowed"],[[69714,69733],"valid",[],"NV8"],[[69734,69743],"valid"],[[69744,69758],"disallowed"],[[69759,69759],"valid"],[[69760,69818],"valid"],[[69819,69820],"valid",[],"NV8"],[[69821,69821],"disallowed"],[[69822,69825],"valid",[],"NV8"],[[69826,69839],"disallowed"],[[69840,69864],"valid"],[[69865,69871],"disallowed"],[[69872,69881],"valid"],[[69882,69887],"disallowed"],[[69888,69940],"valid"],[[69941,69941],"disallowed"],[[69942,69951],"valid"],[[69952,69955],"valid",[],"NV8"],[[69956,69967],"disallowed"],[[69968,70003],"valid"],[[70004,70005],"valid",[],"NV8"],[[70006,70006],"valid"],[[70007,70015],"disallowed"],[[70016,70084],"valid"],[[70085,70088],"valid",[],"NV8"],[[70089,70089],"valid",[],"NV8"],[[70090,70092],"valid"],[[70093,70093],"valid",[],"NV8"],[[70094,70095],"disallowed"],[[70096,70105],"valid"],[[70106,70106],"valid"],[[70107,70107],"valid",[],"NV8"],[[70108,70108],"valid"],[[70109,70111],"valid",[],"NV8"],[[70112,70112],"disallowed"],[[70113,70132],"valid",[],"NV8"],[[70133,70143],"disallowed"],[[70144,70161],"valid"],[[70162,70162],"disallowed"],[[70163,70199],"valid"],[[70200,70205],"valid",[],"NV8"],[[70206,70271],"disallowed"],[[70272,70278],"valid"],[[70279,70279],"disallowed"],[[70280,70280],"valid"],[[70281,70281],"disallowed"],[[70282,70285],"valid"],[[70286,70286],"disallowed"],[[70287,70301],"valid"],[[70302,70302],"disallowed"],[[70303,70312],"valid"],[[70313,70313],"valid",[],"NV8"],[[70314,70319],"disallowed"],[[70320,70378],"valid"],[[70379,70383],"disallowed"],[[70384,70393],"valid"],[[70394,70399],"disallowed"],[[70400,70400],"valid"],[[70401,70403],"valid"],[[70404,70404],"disallowed"],[[70405,70412],"valid"],[[70413,70414],"disallowed"],[[70415,70416],"valid"],[[70417,70418],"disallowed"],[[70419,70440],"valid"],[[70441,70441],"disallowed"],[[70442,70448],"valid"],[[70449,70449],"disallowed"],[[70450,70451],"valid"],[[70452,70452],"disallowed"],[[70453,70457],"valid"],[[70458,70459],"disallowed"],[[70460,70468],"valid"],[[70469,70470],"disallowed"],[[70471,70472],"valid"],[[70473,70474],"disallowed"],[[70475,70477],"valid"],[[70478,70479],"disallowed"],[[70480,70480],"valid"],[[70481,70486],"disallowed"],[[70487,70487],"valid"],[[70488,70492],"disallowed"],[[70493,70499],"valid"],[[70500,70501],"disallowed"],[[70502,70508],"valid"],[[70509,70511],"disallowed"],[[70512,70516],"valid"],[[70517,70783],"disallowed"],[[70784,70853],"valid"],[[70854,70854],"valid",[],"NV8"],[[70855,70855],"valid"],[[70856,70863],"disallowed"],[[70864,70873],"valid"],[[70874,71039],"disallowed"],[[71040,71093],"valid"],[[71094,71095],"disallowed"],[[71096,71104],"valid"],[[71105,71113],"valid",[],"NV8"],[[71114,71127],"valid",[],"NV8"],[[71128,71133],"valid"],[[71134,71167],"disallowed"],[[71168,71232],"valid"],[[71233,71235],"valid",[],"NV8"],[[71236,71236],"valid"],[[71237,71247],"disallowed"],[[71248,71257],"valid"],[[71258,71295],"disallowed"],[[71296,71351],"valid"],[[71352,71359],"disallowed"],[[71360,71369],"valid"],[[71370,71423],"disallowed"],[[71424,71449],"valid"],[[71450,71452],"disallowed"],[[71453,71467],"valid"],[[71468,71471],"disallowed"],[[71472,71481],"valid"],[[71482,71487],"valid",[],"NV8"],[[71488,71839],"disallowed"],[[71840,71840],"mapped",[71872]],[[71841,71841],"mapped",[71873]],[[71842,71842],"mapped",[71874]],[[71843,71843],"mapped",[71875]],[[71844,71844],"mapped",[71876]],[[71845,71845],"mapped",[71877]],[[71846,71846],"mapped",[71878]],[[71847,71847],"mapped",[71879]],[[71848,71848],"mapped",[71880]],[[71849,71849],"mapped",[71881]],[[71850,71850],"mapped",[71882]],[[71851,71851],"mapped",[71883]],[[71852,71852],"mapped",[71884]],[[71853,71853],"mapped",[71885]],[[71854,71854],"mapped",[71886]],[[71855,71855],"mapped",[71887]],[[71856,71856],"mapped",[71888]],[[71857,71857],"mapped",[71889]],[[71858,71858],"mapped",[71890]],[[71859,71859],"mapped",[71891]],[[71860,71860],"mapped",[71892]],[[71861,71861],"mapped",[71893]],[[71862,71862],"mapped",[71894]],[[71863,71863],"mapped",[71895]],[[71864,71864],"mapped",[71896]],[[71865,71865],"mapped",[71897]],[[71866,71866],"mapped",[71898]],[[71867,71867],"mapped",[71899]],[[71868,71868],"mapped",[71900]],[[71869,71869],"mapped",[71901]],[[71870,71870],"mapped",[71902]],[[71871,71871],"mapped",[71903]],[[71872,71913],"valid"],[[71914,71922],"valid",[],"NV8"],[[71923,71934],"disallowed"],[[71935,71935],"valid"],[[71936,72383],"disallowed"],[[72384,72440],"valid"],[[72441,73727],"disallowed"],[[73728,74606],"valid"],[[74607,74648],"valid"],[[74649,74649],"valid"],[[74650,74751],"disallowed"],[[74752,74850],"valid",[],"NV8"],[[74851,74862],"valid",[],"NV8"],[[74863,74863],"disallowed"],[[74864,74867],"valid",[],"NV8"],[[74868,74868],"valid",[],"NV8"],[[74869,74879],"disallowed"],[[74880,75075],"valid"],[[75076,77823],"disallowed"],[[77824,78894],"valid"],[[78895,82943],"disallowed"],[[82944,83526],"valid"],[[83527,92159],"disallowed"],[[92160,92728],"valid"],[[92729,92735],"disallowed"],[[92736,92766],"valid"],[[92767,92767],"disallowed"],[[92768,92777],"valid"],[[92778,92781],"disallowed"],[[92782,92783],"valid",[],"NV8"],[[92784,92879],"disallowed"],[[92880,92909],"valid"],[[92910,92911],"disallowed"],[[92912,92916],"valid"],[[92917,92917],"valid",[],"NV8"],[[92918,92927],"disallowed"],[[92928,92982],"valid"],[[92983,92991],"valid",[],"NV8"],[[92992,92995],"valid"],[[92996,92997],"valid",[],"NV8"],[[92998,93007],"disallowed"],[[93008,93017],"valid"],[[93018,93018],"disallowed"],[[93019,93025],"valid",[],"NV8"],[[93026,93026],"disallowed"],[[93027,93047],"valid"],[[93048,93052],"disallowed"],[[93053,93071],"valid"],[[93072,93951],"disallowed"],[[93952,94020],"valid"],[[94021,94031],"disallowed"],[[94032,94078],"valid"],[[94079,94094],"disallowed"],[[94095,94111],"valid"],[[94112,110591],"disallowed"],[[110592,110593],"valid"],[[110594,113663],"disallowed"],[[113664,113770],"valid"],[[113771,113775],"disallowed"],[[113776,113788],"valid"],[[113789,113791],"disallowed"],[[113792,113800],"valid"],[[113801,113807],"disallowed"],[[113808,113817],"valid"],[[113818,113819],"disallowed"],[[113820,113820],"valid",[],"NV8"],[[113821,113822],"valid"],[[113823,113823],"valid",[],"NV8"],[[113824,113827],"ignored"],[[113828,118783],"disallowed"],[[118784,119029],"valid",[],"NV8"],[[119030,119039],"disallowed"],[[119040,119078],"valid",[],"NV8"],[[119079,119080],"disallowed"],[[119081,119081],"valid",[],"NV8"],[[119082,119133],"valid",[],"NV8"],[[119134,119134],"mapped",[119127,119141]],[[119135,119135],"mapped",[119128,119141]],[[119136,119136],"mapped",[119128,119141,119150]],[[119137,119137],"mapped",[119128,119141,119151]],[[119138,119138],"mapped",[119128,119141,119152]],[[119139,119139],"mapped",[119128,119141,119153]],[[119140,119140],"mapped",[119128,119141,119154]],[[119141,119154],"valid",[],"NV8"],[[119155,119162],"disallowed"],[[119163,119226],"valid",[],"NV8"],[[119227,119227],"mapped",[119225,119141]],[[119228,119228],"mapped",[119226,119141]],[[119229,119229],"mapped",[119225,119141,119150]],[[119230,119230],"mapped",[119226,119141,119150]],[[119231,119231],"mapped",[119225,119141,119151]],[[119232,119232],"mapped",[119226,119141,119151]],[[119233,119261],"valid",[],"NV8"],[[119262,119272],"valid",[],"NV8"],[[119273,119295],"disallowed"],[[119296,119365],"valid",[],"NV8"],[[119366,119551],"disallowed"],[[119552,119638],"valid",[],"NV8"],[[119639,119647],"disallowed"],[[119648,119665],"valid",[],"NV8"],[[119666,119807],"disallowed"],[[119808,119808],"mapped",[97]],[[119809,119809],"mapped",[98]],[[119810,119810],"mapped",[99]],[[119811,119811],"mapped",[100]],[[119812,119812],"mapped",[101]],[[119813,119813],"mapped",[102]],[[119814,119814],"mapped",[103]],[[119815,119815],"mapped",[104]],[[119816,119816],"mapped",[105]],[[119817,119817],"mapped",[106]],[[119818,119818],"mapped",[107]],[[119819,119819],"mapped",[108]],[[119820,119820],"mapped",[109]],[[119821,119821],"mapped",[110]],[[119822,119822],"mapped",[111]],[[119823,119823],"mapped",[112]],[[119824,119824],"mapped",[113]],[[119825,119825],"mapped",[114]],[[119826,119826],"mapped",[115]],[[119827,119827],"mapped",[116]],[[119828,119828],"mapped",[117]],[[119829,119829],"mapped",[118]],[[119830,119830],"mapped",[119]],[[119831,119831],"mapped",[120]],[[119832,119832],"mapped",[121]],[[119833,119833],"mapped",[122]],[[119834,119834],"mapped",[97]],[[119835,119835],"mapped",[98]],[[119836,119836],"mapped",[99]],[[119837,119837],"mapped",[100]],[[119838,119838],"mapped",[101]],[[119839,119839],"mapped",[102]],[[119840,119840],"mapped",[103]],[[119841,119841],"mapped",[104]],[[119842,119842],"mapped",[105]],[[119843,119843],"mapped",[106]],[[119844,119844],"mapped",[107]],[[119845,119845],"mapped",[108]],[[119846,119846],"mapped",[109]],[[119847,119847],"mapped",[110]],[[119848,119848],"mapped",[111]],[[119849,119849],"mapped",[112]],[[119850,119850],"mapped",[113]],[[119851,119851],"mapped",[114]],[[119852,119852],"mapped",[115]],[[119853,119853],"mapped",[116]],[[119854,119854],"mapped",[117]],[[119855,119855],"mapped",[118]],[[119856,119856],"mapped",[119]],[[119857,119857],"mapped",[120]],[[119858,119858],"mapped",[121]],[[119859,119859],"mapped",[122]],[[119860,119860],"mapped",[97]],[[119861,119861],"mapped",[98]],[[119862,119862],"mapped",[99]],[[119863,119863],"mapped",[100]],[[119864,119864],"mapped",[101]],[[119865,119865],"mapped",[102]],[[119866,119866],"mapped",[103]],[[119867,119867],"mapped",[104]],[[119868,119868],"mapped",[105]],[[119869,119869],"mapped",[106]],[[119870,119870],"mapped",[107]],[[119871,119871],"mapped",[108]],[[119872,119872],"mapped",[109]],[[119873,119873],"mapped",[110]],[[119874,119874],"mapped",[111]],[[119875,119875],"mapped",[112]],[[119876,119876],"mapped",[113]],[[119877,119877],"mapped",[114]],[[119878,119878],"mapped",[115]],[[119879,119879],"mapped",[116]],[[119880,119880],"mapped",[117]],[[119881,119881],"mapped",[118]],[[119882,119882],"mapped",[119]],[[119883,119883],"mapped",[120]],[[119884,119884],"mapped",[121]],[[119885,119885],"mapped",[122]],[[119886,119886],"mapped",[97]],[[119887,119887],"mapped",[98]],[[119888,119888],"mapped",[99]],[[119889,119889],"mapped",[100]],[[119890,119890],"mapped",[101]],[[119891,119891],"mapped",[102]],[[119892,119892],"mapped",[103]],[[119893,119893],"disallowed"],[[119894,119894],"mapped",[105]],[[119895,119895],"mapped",[106]],[[119896,119896],"mapped",[107]],[[119897,119897],"mapped",[108]],[[119898,119898],"mapped",[109]],[[119899,119899],"mapped",[110]],[[119900,119900],"mapped",[111]],[[119901,119901],"mapped",[112]],[[119902,119902],"mapped",[113]],[[119903,119903],"mapped",[114]],[[119904,119904],"mapped",[115]],[[119905,119905],"mapped",[116]],[[119906,119906],"mapped",[117]],[[119907,119907],"mapped",[118]],[[119908,119908],"mapped",[119]],[[119909,119909],"mapped",[120]],[[119910,119910],"mapped",[121]],[[119911,119911],"mapped",[122]],[[119912,119912],"mapped",[97]],[[119913,119913],"mapped",[98]],[[119914,119914],"mapped",[99]],[[119915,119915],"mapped",[100]],[[119916,119916],"mapped",[101]],[[119917,119917],"mapped",[102]],[[119918,119918],"mapped",[103]],[[119919,119919],"mapped",[104]],[[119920,119920],"mapped",[105]],[[119921,119921],"mapped",[106]],[[119922,119922],"mapped",[107]],[[119923,119923],"mapped",[108]],[[119924,119924],"mapped",[109]],[[119925,119925],"mapped",[110]],[[119926,119926],"mapped",[111]],[[119927,119927],"mapped",[112]],[[119928,119928],"mapped",[113]],[[119929,119929],"mapped",[114]],[[119930,119930],"mapped",[115]],[[119931,119931],"mapped",[116]],[[119932,119932],"mapped",[117]],[[119933,119933],"mapped",[118]],[[119934,119934],"mapped",[119]],[[119935,119935],"mapped",[120]],[[119936,119936],"mapped",[121]],[[119937,119937],"mapped",[122]],[[119938,119938],"mapped",[97]],[[119939,119939],"mapped",[98]],[[119940,119940],"mapped",[99]],[[119941,119941],"mapped",[100]],[[119942,119942],"mapped",[101]],[[119943,119943],"mapped",[102]],[[119944,119944],"mapped",[103]],[[119945,119945],"mapped",[104]],[[119946,119946],"mapped",[105]],[[119947,119947],"mapped",[106]],[[119948,119948],"mapped",[107]],[[119949,119949],"mapped",[108]],[[119950,119950],"mapped",[109]],[[119951,119951],"mapped",[110]],[[119952,119952],"mapped",[111]],[[119953,119953],"mapped",[112]],[[119954,119954],"mapped",[113]],[[119955,119955],"mapped",[114]],[[119956,119956],"mapped",[115]],[[119957,119957],"mapped",[116]],[[119958,119958],"mapped",[117]],[[119959,119959],"mapped",[118]],[[119960,119960],"mapped",[119]],[[119961,119961],"mapped",[120]],[[119962,119962],"mapped",[121]],[[119963,119963],"mapped",[122]],[[119964,119964],"mapped",[97]],[[119965,119965],"disallowed"],[[119966,119966],"mapped",[99]],[[119967,119967],"mapped",[100]],[[119968,119969],"disallowed"],[[119970,119970],"mapped",[103]],[[119971,119972],"disallowed"],[[119973,119973],"mapped",[106]],[[119974,119974],"mapped",[107]],[[119975,119976],"disallowed"],[[119977,119977],"mapped",[110]],[[119978,119978],"mapped",[111]],[[119979,119979],"mapped",[112]],[[119980,119980],"mapped",[113]],[[119981,119981],"disallowed"],[[119982,119982],"mapped",[115]],[[119983,119983],"mapped",[116]],[[119984,119984],"mapped",[117]],[[119985,119985],"mapped",[118]],[[119986,119986],"mapped",[119]],[[119987,119987],"mapped",[120]],[[119988,119988],"mapped",[121]],[[119989,119989],"mapped",[122]],[[119990,119990],"mapped",[97]],[[119991,119991],"mapped",[98]],[[119992,119992],"mapped",[99]],[[119993,119993],"mapped",[100]],[[119994,119994],"disallowed"],[[119995,119995],"mapped",[102]],[[119996,119996],"disallowed"],[[119997,119997],"mapped",[104]],[[119998,119998],"mapped",[105]],[[119999,119999],"mapped",[106]],[[120000,120000],"mapped",[107]],[[120001,120001],"mapped",[108]],[[120002,120002],"mapped",[109]],[[120003,120003],"mapped",[110]],[[120004,120004],"disallowed"],[[120005,120005],"mapped",[112]],[[120006,120006],"mapped",[113]],[[120007,120007],"mapped",[114]],[[120008,120008],"mapped",[115]],[[120009,120009],"mapped",[116]],[[120010,120010],"mapped",[117]],[[120011,120011],"mapped",[118]],[[120012,120012],"mapped",[119]],[[120013,120013],"mapped",[120]],[[120014,120014],"mapped",[121]],[[120015,120015],"mapped",[122]],[[120016,120016],"mapped",[97]],[[120017,120017],"mapped",[98]],[[120018,120018],"mapped",[99]],[[120019,120019],"mapped",[100]],[[120020,120020],"mapped",[101]],[[120021,120021],"mapped",[102]],[[120022,120022],"mapped",[103]],[[120023,120023],"mapped",[104]],[[120024,120024],"mapped",[105]],[[120025,120025],"mapped",[106]],[[120026,120026],"mapped",[107]],[[120027,120027],"mapped",[108]],[[120028,120028],"mapped",[109]],[[120029,120029],"mapped",[110]],[[120030,120030],"mapped",[111]],[[120031,120031],"mapped",[112]],[[120032,120032],"mapped",[113]],[[120033,120033],"mapped",[114]],[[120034,120034],"mapped",[115]],[[120035,120035],"mapped",[116]],[[120036,120036],"mapped",[117]],[[120037,120037],"mapped",[118]],[[120038,120038],"mapped",[119]],[[120039,120039],"mapped",[120]],[[120040,120040],"mapped",[121]],[[120041,120041],"mapped",[122]],[[120042,120042],"mapped",[97]],[[120043,120043],"mapped",[98]],[[120044,120044],"mapped",[99]],[[120045,120045],"mapped",[100]],[[120046,120046],"mapped",[101]],[[120047,120047],"mapped",[102]],[[120048,120048],"mapped",[103]],[[120049,120049],"mapped",[104]],[[120050,120050],"mapped",[105]],[[120051,120051],"mapped",[106]],[[120052,120052],"mapped",[107]],[[120053,120053],"mapped",[108]],[[120054,120054],"mapped",[109]],[[120055,120055],"mapped",[110]],[[120056,120056],"mapped",[111]],[[120057,120057],"mapped",[112]],[[120058,120058],"mapped",[113]],[[120059,120059],"mapped",[114]],[[120060,120060],"mapped",[115]],[[120061,120061],"mapped",[116]],[[120062,120062],"mapped",[117]],[[120063,120063],"mapped",[118]],[[120064,120064],"mapped",[119]],[[120065,120065],"mapped",[120]],[[120066,120066],"mapped",[121]],[[120067,120067],"mapped",[122]],[[120068,120068],"mapped",[97]],[[120069,120069],"mapped",[98]],[[120070,120070],"disallowed"],[[120071,120071],"mapped",[100]],[[120072,120072],"mapped",[101]],[[120073,120073],"mapped",[102]],[[120074,120074],"mapped",[103]],[[120075,120076],"disallowed"],[[120077,120077],"mapped",[106]],[[120078,120078],"mapped",[107]],[[120079,120079],"mapped",[108]],[[120080,120080],"mapped",[109]],[[120081,120081],"mapped",[110]],[[120082,120082],"mapped",[111]],[[120083,120083],"mapped",[112]],[[120084,120084],"mapped",[113]],[[120085,120085],"disallowed"],[[120086,120086],"mapped",[115]],[[120087,120087],"mapped",[116]],[[120088,120088],"mapped",[117]],[[120089,120089],"mapped",[118]],[[120090,120090],"mapped",[119]],[[120091,120091],"mapped",[120]],[[120092,120092],"mapped",[121]],[[120093,120093],"disallowed"],[[120094,120094],"mapped",[97]],[[120095,120095],"mapped",[98]],[[120096,120096],"mapped",[99]],[[120097,120097],"mapped",[100]],[[120098,120098],"mapped",[101]],[[120099,120099],"mapped",[102]],[[120100,120100],"mapped",[103]],[[120101,120101],"mapped",[104]],[[120102,120102],"mapped",[105]],[[120103,120103],"mapped",[106]],[[120104,120104],"mapped",[107]],[[120105,120105],"mapped",[108]],[[120106,120106],"mapped",[109]],[[120107,120107],"mapped",[110]],[[120108,120108],"mapped",[111]],[[120109,120109],"mapped",[112]],[[120110,120110],"mapped",[113]],[[120111,120111],"mapped",[114]],[[120112,120112],"mapped",[115]],[[120113,120113],"mapped",[116]],[[120114,120114],"mapped",[117]],[[120115,120115],"mapped",[118]],[[120116,120116],"mapped",[119]],[[120117,120117],"mapped",[120]],[[120118,120118],"mapped",[121]],[[120119,120119],"mapped",[122]],[[120120,120120],"mapped",[97]],[[120121,120121],"mapped",[98]],[[120122,120122],"disallowed"],[[120123,120123],"mapped",[100]],[[120124,120124],"mapped",[101]],[[120125,120125],"mapped",[102]],[[120126,120126],"mapped",[103]],[[120127,120127],"disallowed"],[[120128,120128],"mapped",[105]],[[120129,120129],"mapped",[106]],[[120130,120130],"mapped",[107]],[[120131,120131],"mapped",[108]],[[120132,120132],"mapped",[109]],[[120133,120133],"disallowed"],[[120134,120134],"mapped",[111]],[[120135,120137],"disallowed"],[[120138,120138],"mapped",[115]],[[120139,120139],"mapped",[116]],[[120140,120140],"mapped",[117]],[[120141,120141],"mapped",[118]],[[120142,120142],"mapped",[119]],[[120143,120143],"mapped",[120]],[[120144,120144],"mapped",[121]],[[120145,120145],"disallowed"],[[120146,120146],"mapped",[97]],[[120147,120147],"mapped",[98]],[[120148,120148],"mapped",[99]],[[120149,120149],"mapped",[100]],[[120150,120150],"mapped",[101]],[[120151,120151],"mapped",[102]],[[120152,120152],"mapped",[103]],[[120153,120153],"mapped",[104]],[[120154,120154],"mapped",[105]],[[120155,120155],"mapped",[106]],[[120156,120156],"mapped",[107]],[[120157,120157],"mapped",[108]],[[120158,120158],"mapped",[109]],[[120159,120159],"mapped",[110]],[[120160,120160],"mapped",[111]],[[120161,120161],"mapped",[112]],[[120162,120162],"mapped",[113]],[[120163,120163],"mapped",[114]],[[120164,120164],"mapped",[115]],[[120165,120165],"mapped",[116]],[[120166,120166],"mapped",[117]],[[120167,120167],"mapped",[118]],[[120168,120168],"mapped",[119]],[[120169,120169],"mapped",[120]],[[120170,120170],"mapped",[121]],[[120171,120171],"mapped",[122]],[[120172,120172],"mapped",[97]],[[120173,120173],"mapped",[98]],[[120174,120174],"mapped",[99]],[[120175,120175],"mapped",[100]],[[120176,120176],"mapped",[101]],[[120177,120177],"mapped",[102]],[[120178,120178],"mapped",[103]],[[120179,120179],"mapped",[104]],[[120180,120180],"mapped",[105]],[[120181,120181],"mapped",[106]],[[120182,120182],"mapped",[107]],[[120183,120183],"mapped",[108]],[[120184,120184],"mapped",[109]],[[120185,120185],"mapped",[110]],[[120186,120186],"mapped",[111]],[[120187,120187],"mapped",[112]],[[120188,120188],"mapped",[113]],[[120189,120189],"mapped",[114]],[[120190,120190],"mapped",[115]],[[120191,120191],"mapped",[116]],[[120192,120192],"mapped",[117]],[[120193,120193],"mapped",[118]],[[120194,120194],"mapped",[119]],[[120195,120195],"mapped",[120]],[[120196,120196],"mapped",[121]],[[120197,120197],"mapped",[122]],[[120198,120198],"mapped",[97]],[[120199,120199],"mapped",[98]],[[120200,120200],"mapped",[99]],[[120201,120201],"mapped",[100]],[[120202,120202],"mapped",[101]],[[120203,120203],"mapped",[102]],[[120204,120204],"mapped",[103]],[[120205,120205],"mapped",[104]],[[120206,120206],"mapped",[105]],[[120207,120207],"mapped",[106]],[[120208,120208],"mapped",[107]],[[120209,120209],"mapped",[108]],[[120210,120210],"mapped",[109]],[[120211,120211],"mapped",[110]],[[120212,120212],"mapped",[111]],[[120213,120213],"mapped",[112]],[[120214,120214],"mapped",[113]],[[120215,120215],"mapped",[114]],[[120216,120216],"mapped",[115]],[[120217,120217],"mapped",[116]],[[120218,120218],"mapped",[117]],[[120219,120219],"mapped",[118]],[[120220,120220],"mapped",[119]],[[120221,120221],"mapped",[120]],[[120222,120222],"mapped",[121]],[[120223,120223],"mapped",[122]],[[120224,120224],"mapped",[97]],[[120225,120225],"mapped",[98]],[[120226,120226],"mapped",[99]],[[120227,120227],"mapped",[100]],[[120228,120228],"mapped",[101]],[[120229,120229],"mapped",[102]],[[120230,120230],"mapped",[103]],[[120231,120231],"mapped",[104]],[[120232,120232],"mapped",[105]],[[120233,120233],"mapped",[106]],[[120234,120234],"mapped",[107]],[[120235,120235],"mapped",[108]],[[120236,120236],"mapped",[109]],[[120237,120237],"mapped",[110]],[[120238,120238],"mapped",[111]],[[120239,120239],"mapped",[112]],[[120240,120240],"mapped",[113]],[[120241,120241],"mapped",[114]],[[120242,120242],"mapped",[115]],[[120243,120243],"mapped",[116]],[[120244,120244],"mapped",[117]],[[120245,120245],"mapped",[118]],[[120246,120246],"mapped",[119]],[[120247,120247],"mapped",[120]],[[120248,120248],"mapped",[121]],[[120249,120249],"mapped",[122]],[[120250,120250],"mapped",[97]],[[120251,120251],"mapped",[98]],[[120252,120252],"mapped",[99]],[[120253,120253],"mapped",[100]],[[120254,120254],"mapped",[101]],[[120255,120255],"mapped",[102]],[[120256,120256],"mapped",[103]],[[120257,120257],"mapped",[104]],[[120258,120258],"mapped",[105]],[[120259,120259],"mapped",[106]],[[120260,120260],"mapped",[107]],[[120261,120261],"mapped",[108]],[[120262,120262],"mapped",[109]],[[120263,120263],"mapped",[110]],[[120264,120264],"mapped",[111]],[[120265,120265],"mapped",[112]],[[120266,120266],"mapped",[113]],[[120267,120267],"mapped",[114]],[[120268,120268],"mapped",[115]],[[120269,120269],"mapped",[116]],[[120270,120270],"mapped",[117]],[[120271,120271],"mapped",[118]],[[120272,120272],"mapped",[119]],[[120273,120273],"mapped",[120]],[[120274,120274],"mapped",[121]],[[120275,120275],"mapped",[122]],[[120276,120276],"mapped",[97]],[[120277,120277],"mapped",[98]],[[120278,120278],"mapped",[99]],[[120279,120279],"mapped",[100]],[[120280,120280],"mapped",[101]],[[120281,120281],"mapped",[102]],[[120282,120282],"mapped",[103]],[[120283,120283],"mapped",[104]],[[120284,120284],"mapped",[105]],[[120285,120285],"mapped",[106]],[[120286,120286],"mapped",[107]],[[120287,120287],"mapped",[108]],[[120288,120288],"mapped",[109]],[[120289,120289],"mapped",[110]],[[120290,120290],"mapped",[111]],[[120291,120291],"mapped",[112]],[[120292,120292],"mapped",[113]],[[120293,120293],"mapped",[114]],[[120294,120294],"mapped",[115]],[[120295,120295],"mapped",[116]],[[120296,120296],"mapped",[117]],[[120297,120297],"mapped",[118]],[[120298,120298],"mapped",[119]],[[120299,120299],"mapped",[120]],[[120300,120300],"mapped",[121]],[[120301,120301],"mapped",[122]],[[120302,120302],"mapped",[97]],[[120303,120303],"mapped",[98]],[[120304,120304],"mapped",[99]],[[120305,120305],"mapped",[100]],[[120306,120306],"mapped",[101]],[[120307,120307],"mapped",[102]],[[120308,120308],"mapped",[103]],[[120309,120309],"mapped",[104]],[[120310,120310],"mapped",[105]],[[120311,120311],"mapped",[106]],[[120312,120312],"mapped",[107]],[[120313,120313],"mapped",[108]],[[120314,120314],"mapped",[109]],[[120315,120315],"mapped",[110]],[[120316,120316],"mapped",[111]],[[120317,120317],"mapped",[112]],[[120318,120318],"mapped",[113]],[[120319,120319],"mapped",[114]],[[120320,120320],"mapped",[115]],[[120321,120321],"mapped",[116]],[[120322,120322],"mapped",[117]],[[120323,120323],"mapped",[118]],[[120324,120324],"mapped",[119]],[[120325,120325],"mapped",[120]],[[120326,120326],"mapped",[121]],[[120327,120327],"mapped",[122]],[[120328,120328],"mapped",[97]],[[120329,120329],"mapped",[98]],[[120330,120330],"mapped",[99]],[[120331,120331],"mapped",[100]],[[120332,120332],"mapped",[101]],[[120333,120333],"mapped",[102]],[[120334,120334],"mapped",[103]],[[120335,120335],"mapped",[104]],[[120336,120336],"mapped",[105]],[[120337,120337],"mapped",[106]],[[120338,120338],"mapped",[107]],[[120339,120339],"mapped",[108]],[[120340,120340],"mapped",[109]],[[120341,120341],"mapped",[110]],[[120342,120342],"mapped",[111]],[[120343,120343],"mapped",[112]],[[120344,120344],"mapped",[113]],[[120345,120345],"mapped",[114]],[[120346,120346],"mapped",[115]],[[120347,120347],"mapped",[116]],[[120348,120348],"mapped",[117]],[[120349,120349],"mapped",[118]],[[120350,120350],"mapped",[119]],[[120351,120351],"mapped",[120]],[[120352,120352],"mapped",[121]],[[120353,120353],"mapped",[122]],[[120354,120354],"mapped",[97]],[[120355,120355],"mapped",[98]],[[120356,120356],"mapped",[99]],[[120357,120357],"mapped",[100]],[[120358,120358],"mapped",[101]],[[120359,120359],"mapped",[102]],[[120360,120360],"mapped",[103]],[[120361,120361],"mapped",[104]],[[120362,120362],"mapped",[105]],[[120363,120363],"mapped",[106]],[[120364,120364],"mapped",[107]],[[120365,120365],"mapped",[108]],[[120366,120366],"mapped",[109]],[[120367,120367],"mapped",[110]],[[120368,120368],"mapped",[111]],[[120369,120369],"mapped",[112]],[[120370,120370],"mapped",[113]],[[120371,120371],"mapped",[114]],[[120372,120372],"mapped",[115]],[[120373,120373],"mapped",[116]],[[120374,120374],"mapped",[117]],[[120375,120375],"mapped",[118]],[[120376,120376],"mapped",[119]],[[120377,120377],"mapped",[120]],[[120378,120378],"mapped",[121]],[[120379,120379],"mapped",[122]],[[120380,120380],"mapped",[97]],[[120381,120381],"mapped",[98]],[[120382,120382],"mapped",[99]],[[120383,120383],"mapped",[100]],[[120384,120384],"mapped",[101]],[[120385,120385],"mapped",[102]],[[120386,120386],"mapped",[103]],[[120387,120387],"mapped",[104]],[[120388,120388],"mapped",[105]],[[120389,120389],"mapped",[106]],[[120390,120390],"mapped",[107]],[[120391,120391],"mapped",[108]],[[120392,120392],"mapped",[109]],[[120393,120393],"mapped",[110]],[[120394,120394],"mapped",[111]],[[120395,120395],"mapped",[112]],[[120396,120396],"mapped",[113]],[[120397,120397],"mapped",[114]],[[120398,120398],"mapped",[115]],[[120399,120399],"mapped",[116]],[[120400,120400],"mapped",[117]],[[120401,120401],"mapped",[118]],[[120402,120402],"mapped",[119]],[[120403,120403],"mapped",[120]],[[120404,120404],"mapped",[121]],[[120405,120405],"mapped",[122]],[[120406,120406],"mapped",[97]],[[120407,120407],"mapped",[98]],[[120408,120408],"mapped",[99]],[[120409,120409],"mapped",[100]],[[120410,120410],"mapped",[101]],[[120411,120411],"mapped",[102]],[[120412,120412],"mapped",[103]],[[120413,120413],"mapped",[104]],[[120414,120414],"mapped",[105]],[[120415,120415],"mapped",[106]],[[120416,120416],"mapped",[107]],[[120417,120417],"mapped",[108]],[[120418,120418],"mapped",[109]],[[120419,120419],"mapped",[110]],[[120420,120420],"mapped",[111]],[[120421,120421],"mapped",[112]],[[120422,120422],"mapped",[113]],[[120423,120423],"mapped",[114]],[[120424,120424],"mapped",[115]],[[120425,120425],"mapped",[116]],[[120426,120426],"mapped",[117]],[[120427,120427],"mapped",[118]],[[120428,120428],"mapped",[119]],[[120429,120429],"mapped",[120]],[[120430,120430],"mapped",[121]],[[120431,120431],"mapped",[122]],[[120432,120432],"mapped",[97]],[[120433,120433],"mapped",[98]],[[120434,120434],"mapped",[99]],[[120435,120435],"mapped",[100]],[[120436,120436],"mapped",[101]],[[120437,120437],"mapped",[102]],[[120438,120438],"mapped",[103]],[[120439,120439],"mapped",[104]],[[120440,120440],"mapped",[105]],[[120441,120441],"mapped",[106]],[[120442,120442],"mapped",[107]],[[120443,120443],"mapped",[108]],[[120444,120444],"mapped",[109]],[[120445,120445],"mapped",[110]],[[120446,120446],"mapped",[111]],[[120447,120447],"mapped",[112]],[[120448,120448],"mapped",[113]],[[120449,120449],"mapped",[114]],[[120450,120450],"mapped",[115]],[[120451,120451],"mapped",[116]],[[120452,120452],"mapped",[117]],[[120453,120453],"mapped",[118]],[[120454,120454],"mapped",[119]],[[120455,120455],"mapped",[120]],[[120456,120456],"mapped",[121]],[[120457,120457],"mapped",[122]],[[120458,120458],"mapped",[97]],[[120459,120459],"mapped",[98]],[[120460,120460],"mapped",[99]],[[120461,120461],"mapped",[100]],[[120462,120462],"mapped",[101]],[[120463,120463],"mapped",[102]],[[120464,120464],"mapped",[103]],[[120465,120465],"mapped",[104]],[[120466,120466],"mapped",[105]],[[120467,120467],"mapped",[106]],[[120468,120468],"mapped",[107]],[[120469,120469],"mapped",[108]],[[120470,120470],"mapped",[109]],[[120471,120471],"mapped",[110]],[[120472,120472],"mapped",[111]],[[120473,120473],"mapped",[112]],[[120474,120474],"mapped",[113]],[[120475,120475],"mapped",[114]],[[120476,120476],"mapped",[115]],[[120477,120477],"mapped",[116]],[[120478,120478],"mapped",[117]],[[120479,120479],"mapped",[118]],[[120480,120480],"mapped",[119]],[[120481,120481],"mapped",[120]],[[120482,120482],"mapped",[121]],[[120483,120483],"mapped",[122]],[[120484,120484],"mapped",[305]],[[120485,120485],"mapped",[567]],[[120486,120487],"disallowed"],[[120488,120488],"mapped",[945]],[[120489,120489],"mapped",[946]],[[120490,120490],"mapped",[947]],[[120491,120491],"mapped",[948]],[[120492,120492],"mapped",[949]],[[120493,120493],"mapped",[950]],[[120494,120494],"mapped",[951]],[[120495,120495],"mapped",[952]],[[120496,120496],"mapped",[953]],[[120497,120497],"mapped",[954]],[[120498,120498],"mapped",[955]],[[120499,120499],"mapped",[956]],[[120500,120500],"mapped",[957]],[[120501,120501],"mapped",[958]],[[120502,120502],"mapped",[959]],[[120503,120503],"mapped",[960]],[[120504,120504],"mapped",[961]],[[120505,120505],"mapped",[952]],[[120506,120506],"mapped",[963]],[[120507,120507],"mapped",[964]],[[120508,120508],"mapped",[965]],[[120509,120509],"mapped",[966]],[[120510,120510],"mapped",[967]],[[120511,120511],"mapped",[968]],[[120512,120512],"mapped",[969]],[[120513,120513],"mapped",[8711]],[[120514,120514],"mapped",[945]],[[120515,120515],"mapped",[946]],[[120516,120516],"mapped",[947]],[[120517,120517],"mapped",[948]],[[120518,120518],"mapped",[949]],[[120519,120519],"mapped",[950]],[[120520,120520],"mapped",[951]],[[120521,120521],"mapped",[952]],[[120522,120522],"mapped",[953]],[[120523,120523],"mapped",[954]],[[120524,120524],"mapped",[955]],[[120525,120525],"mapped",[956]],[[120526,120526],"mapped",[957]],[[120527,120527],"mapped",[958]],[[120528,120528],"mapped",[959]],[[120529,120529],"mapped",[960]],[[120530,120530],"mapped",[961]],[[120531,120532],"mapped",[963]],[[120533,120533],"mapped",[964]],[[120534,120534],"mapped",[965]],[[120535,120535],"mapped",[966]],[[120536,120536],"mapped",[967]],[[120537,120537],"mapped",[968]],[[120538,120538],"mapped",[969]],[[120539,120539],"mapped",[8706]],[[120540,120540],"mapped",[949]],[[120541,120541],"mapped",[952]],[[120542,120542],"mapped",[954]],[[120543,120543],"mapped",[966]],[[120544,120544],"mapped",[961]],[[120545,120545],"mapped",[960]],[[120546,120546],"mapped",[945]],[[120547,120547],"mapped",[946]],[[120548,120548],"mapped",[947]],[[120549,120549],"mapped",[948]],[[120550,120550],"mapped",[949]],[[120551,120551],"mapped",[950]],[[120552,120552],"mapped",[951]],[[120553,120553],"mapped",[952]],[[120554,120554],"mapped",[953]],[[120555,120555],"mapped",[954]],[[120556,120556],"mapped",[955]],[[120557,120557],"mapped",[956]],[[120558,120558],"mapped",[957]],[[120559,120559],"mapped",[958]],[[120560,120560],"mapped",[959]],[[120561,120561],"mapped",[960]],[[120562,120562],"mapped",[961]],[[120563,120563],"mapped",[952]],[[120564,120564],"mapped",[963]],[[120565,120565],"mapped",[964]],[[120566,120566],"mapped",[965]],[[120567,120567],"mapped",[966]],[[120568,120568],"mapped",[967]],[[120569,120569],"mapped",[968]],[[120570,120570],"mapped",[969]],[[120571,120571],"mapped",[8711]],[[120572,120572],"mapped",[945]],[[120573,120573],"mapped",[946]],[[120574,120574],"mapped",[947]],[[120575,120575],"mapped",[948]],[[120576,120576],"mapped",[949]],[[120577,120577],"mapped",[950]],[[120578,120578],"mapped",[951]],[[120579,120579],"mapped",[952]],[[120580,120580],"mapped",[953]],[[120581,120581],"mapped",[954]],[[120582,120582],"mapped",[955]],[[120583,120583],"mapped",[956]],[[120584,120584],"mapped",[957]],[[120585,120585],"mapped",[958]],[[120586,120586],"mapped",[959]],[[120587,120587],"mapped",[960]],[[120588,120588],"mapped",[961]],[[120589,120590],"mapped",[963]],[[120591,120591],"mapped",[964]],[[120592,120592],"mapped",[965]],[[120593,120593],"mapped",[966]],[[120594,120594],"mapped",[967]],[[120595,120595],"mapped",[968]],[[120596,120596],"mapped",[969]],[[120597,120597],"mapped",[8706]],[[120598,120598],"mapped",[949]],[[120599,120599],"mapped",[952]],[[120600,120600],"mapped",[954]],[[120601,120601],"mapped",[966]],[[120602,120602],"mapped",[961]],[[120603,120603],"mapped",[960]],[[120604,120604],"mapped",[945]],[[120605,120605],"mapped",[946]],[[120606,120606],"mapped",[947]],[[120607,120607],"mapped",[948]],[[120608,120608],"mapped",[949]],[[120609,120609],"mapped",[950]],[[120610,120610],"mapped",[951]],[[120611,120611],"mapped",[952]],[[120612,120612],"mapped",[953]],[[120613,120613],"mapped",[954]],[[120614,120614],"mapped",[955]],[[120615,120615],"mapped",[956]],[[120616,120616],"mapped",[957]],[[120617,120617],"mapped",[958]],[[120618,120618],"mapped",[959]],[[120619,120619],"mapped",[960]],[[120620,120620],"mapped",[961]],[[120621,120621],"mapped",[952]],[[120622,120622],"mapped",[963]],[[120623,120623],"mapped",[964]],[[120624,120624],"mapped",[965]],[[120625,120625],"mapped",[966]],[[120626,120626],"mapped",[967]],[[120627,120627],"mapped",[968]],[[120628,120628],"mapped",[969]],[[120629,120629],"mapped",[8711]],[[120630,120630],"mapped",[945]],[[120631,120631],"mapped",[946]],[[120632,120632],"mapped",[947]],[[120633,120633],"mapped",[948]],[[120634,120634],"mapped",[949]],[[120635,120635],"mapped",[950]],[[120636,120636],"mapped",[951]],[[120637,120637],"mapped",[952]],[[120638,120638],"mapped",[953]],[[120639,120639],"mapped",[954]],[[120640,120640],"mapped",[955]],[[120641,120641],"mapped",[956]],[[120642,120642],"mapped",[957]],[[120643,120643],"mapped",[958]],[[120644,120644],"mapped",[959]],[[120645,120645],"mapped",[960]],[[120646,120646],"mapped",[961]],[[120647,120648],"mapped",[963]],[[120649,120649],"mapped",[964]],[[120650,120650],"mapped",[965]],[[120651,120651],"mapped",[966]],[[120652,120652],"mapped",[967]],[[120653,120653],"mapped",[968]],[[120654,120654],"mapped",[969]],[[120655,120655],"mapped",[8706]],[[120656,120656],"mapped",[949]],[[120657,120657],"mapped",[952]],[[120658,120658],"mapped",[954]],[[120659,120659],"mapped",[966]],[[120660,120660],"mapped",[961]],[[120661,120661],"mapped",[960]],[[120662,120662],"mapped",[945]],[[120663,120663],"mapped",[946]],[[120664,120664],"mapped",[947]],[[120665,120665],"mapped",[948]],[[120666,120666],"mapped",[949]],[[120667,120667],"mapped",[950]],[[120668,120668],"mapped",[951]],[[120669,120669],"mapped",[952]],[[120670,120670],"mapped",[953]],[[120671,120671],"mapped",[954]],[[120672,120672],"mapped",[955]],[[120673,120673],"mapped",[956]],[[120674,120674],"mapped",[957]],[[120675,120675],"mapped",[958]],[[120676,120676],"mapped",[959]],[[120677,120677],"mapped",[960]],[[120678,120678],"mapped",[961]],[[120679,120679],"mapped",[952]],[[120680,120680],"mapped",[963]],[[120681,120681],"mapped",[964]],[[120682,120682],"mapped",[965]],[[120683,120683],"mapped",[966]],[[120684,120684],"mapped",[967]],[[120685,120685],"mapped",[968]],[[120686,120686],"mapped",[969]],[[120687,120687],"mapped",[8711]],[[120688,120688],"mapped",[945]],[[120689,120689],"mapped",[946]],[[120690,120690],"mapped",[947]],[[120691,120691],"mapped",[948]],[[120692,120692],"mapped",[949]],[[120693,120693],"mapped",[950]],[[120694,120694],"mapped",[951]],[[120695,120695],"mapped",[952]],[[120696,120696],"mapped",[953]],[[120697,120697],"mapped",[954]],[[120698,120698],"mapped",[955]],[[120699,120699],"mapped",[956]],[[120700,120700],"mapped",[957]],[[120701,120701],"mapped",[958]],[[120702,120702],"mapped",[959]],[[120703,120703],"mapped",[960]],[[120704,120704],"mapped",[961]],[[120705,120706],"mapped",[963]],[[120707,120707],"mapped",[964]],[[120708,120708],"mapped",[965]],[[120709,120709],"mapped",[966]],[[120710,120710],"mapped",[967]],[[120711,120711],"mapped",[968]],[[120712,120712],"mapped",[969]],[[120713,120713],"mapped",[8706]],[[120714,120714],"mapped",[949]],[[120715,120715],"mapped",[952]],[[120716,120716],"mapped",[954]],[[120717,120717],"mapped",[966]],[[120718,120718],"mapped",[961]],[[120719,120719],"mapped",[960]],[[120720,120720],"mapped",[945]],[[120721,120721],"mapped",[946]],[[120722,120722],"mapped",[947]],[[120723,120723],"mapped",[948]],[[120724,120724],"mapped",[949]],[[120725,120725],"mapped",[950]],[[120726,120726],"mapped",[951]],[[120727,120727],"mapped",[952]],[[120728,120728],"mapped",[953]],[[120729,120729],"mapped",[954]],[[120730,120730],"mapped",[955]],[[120731,120731],"mapped",[956]],[[120732,120732],"mapped",[957]],[[120733,120733],"mapped",[958]],[[120734,120734],"mapped",[959]],[[120735,120735],"mapped",[960]],[[120736,120736],"mapped",[961]],[[120737,120737],"mapped",[952]],[[120738,120738],"mapped",[963]],[[120739,120739],"mapped",[964]],[[120740,120740],"mapped",[965]],[[120741,120741],"mapped",[966]],[[120742,120742],"mapped",[967]],[[120743,120743],"mapped",[968]],[[120744,120744],"mapped",[969]],[[120745,120745],"mapped",[8711]],[[120746,120746],"mapped",[945]],[[120747,120747],"mapped",[946]],[[120748,120748],"mapped",[947]],[[120749,120749],"mapped",[948]],[[120750,120750],"mapped",[949]],[[120751,120751],"mapped",[950]],[[120752,120752],"mapped",[951]],[[120753,120753],"mapped",[952]],[[120754,120754],"mapped",[953]],[[120755,120755],"mapped",[954]],[[120756,120756],"mapped",[955]],[[120757,120757],"mapped",[956]],[[120758,120758],"mapped",[957]],[[120759,120759],"mapped",[958]],[[120760,120760],"mapped",[959]],[[120761,120761],"mapped",[960]],[[120762,120762],"mapped",[961]],[[120763,120764],"mapped",[963]],[[120765,120765],"mapped",[964]],[[120766,120766],"mapped",[965]],[[120767,120767],"mapped",[966]],[[120768,120768],"mapped",[967]],[[120769,120769],"mapped",[968]],[[120770,120770],"mapped",[969]],[[120771,120771],"mapped",[8706]],[[120772,120772],"mapped",[949]],[[120773,120773],"mapped",[952]],[[120774,120774],"mapped",[954]],[[120775,120775],"mapped",[966]],[[120776,120776],"mapped",[961]],[[120777,120777],"mapped",[960]],[[120778,120779],"mapped",[989]],[[120780,120781],"disallowed"],[[120782,120782],"mapped",[48]],[[120783,120783],"mapped",[49]],[[120784,120784],"mapped",[50]],[[120785,120785],"mapped",[51]],[[120786,120786],"mapped",[52]],[[120787,120787],"mapped",[53]],[[120788,120788],"mapped",[54]],[[120789,120789],"mapped",[55]],[[120790,120790],"mapped",[56]],[[120791,120791],"mapped",[57]],[[120792,120792],"mapped",[48]],[[120793,120793],"mapped",[49]],[[120794,120794],"mapped",[50]],[[120795,120795],"mapped",[51]],[[120796,120796],"mapped",[52]],[[120797,120797],"mapped",[53]],[[120798,120798],"mapped",[54]],[[120799,120799],"mapped",[55]],[[120800,120800],"mapped",[56]],[[120801,120801],"mapped",[57]],[[120802,120802],"mapped",[48]],[[120803,120803],"mapped",[49]],[[120804,120804],"mapped",[50]],[[120805,120805],"mapped",[51]],[[120806,120806],"mapped",[52]],[[120807,120807],"mapped",[53]],[[120808,120808],"mapped",[54]],[[120809,120809],"mapped",[55]],[[120810,120810],"mapped",[56]],[[120811,120811],"mapped",[57]],[[120812,120812],"mapped",[48]],[[120813,120813],"mapped",[49]],[[120814,120814],"mapped",[50]],[[120815,120815],"mapped",[51]],[[120816,120816],"mapped",[52]],[[120817,120817],"mapped",[53]],[[120818,120818],"mapped",[54]],[[120819,120819],"mapped",[55]],[[120820,120820],"mapped",[56]],[[120821,120821],"mapped",[57]],[[120822,120822],"mapped",[48]],[[120823,120823],"mapped",[49]],[[120824,120824],"mapped",[50]],[[120825,120825],"mapped",[51]],[[120826,120826],"mapped",[52]],[[120827,120827],"mapped",[53]],[[120828,120828],"mapped",[54]],[[120829,120829],"mapped",[55]],[[120830,120830],"mapped",[56]],[[120831,120831],"mapped",[57]],[[120832,121343],"valid",[],"NV8"],[[121344,121398],"valid"],[[121399,121402],"valid",[],"NV8"],[[121403,121452],"valid"],[[121453,121460],"valid",[],"NV8"],[[121461,121461],"valid"],[[121462,121475],"valid",[],"NV8"],[[121476,121476],"valid"],[[121477,121483],"valid",[],"NV8"],[[121484,121498],"disallowed"],[[121499,121503],"valid"],[[121504,121504],"disallowed"],[[121505,121519],"valid"],[[121520,124927],"disallowed"],[[124928,125124],"valid"],[[125125,125126],"disallowed"],[[125127,125135],"valid",[],"NV8"],[[125136,125142],"valid"],[[125143,126463],"disallowed"],[[126464,126464],"mapped",[1575]],[[126465,126465],"mapped",[1576]],[[126466,126466],"mapped",[1580]],[[126467,126467],"mapped",[1583]],[[126468,126468],"disallowed"],[[126469,126469],"mapped",[1608]],[[126470,126470],"mapped",[1586]],[[126471,126471],"mapped",[1581]],[[126472,126472],"mapped",[1591]],[[126473,126473],"mapped",[1610]],[[126474,126474],"mapped",[1603]],[[126475,126475],"mapped",[1604]],[[126476,126476],"mapped",[1605]],[[126477,126477],"mapped",[1606]],[[126478,126478],"mapped",[1587]],[[126479,126479],"mapped",[1593]],[[126480,126480],"mapped",[1601]],[[126481,126481],"mapped",[1589]],[[126482,126482],"mapped",[1602]],[[126483,126483],"mapped",[1585]],[[126484,126484],"mapped",[1588]],[[126485,126485],"mapped",[1578]],[[126486,126486],"mapped",[1579]],[[126487,126487],"mapped",[1582]],[[126488,126488],"mapped",[1584]],[[126489,126489],"mapped",[1590]],[[126490,126490],"mapped",[1592]],[[126491,126491],"mapped",[1594]],[[126492,126492],"mapped",[1646]],[[126493,126493],"mapped",[1722]],[[126494,126494],"mapped",[1697]],[[126495,126495],"mapped",[1647]],[[126496,126496],"disallowed"],[[126497,126497],"mapped",[1576]],[[126498,126498],"mapped",[1580]],[[126499,126499],"disallowed"],[[126500,126500],"mapped",[1607]],[[126501,126502],"disallowed"],[[126503,126503],"mapped",[1581]],[[126504,126504],"disallowed"],[[126505,126505],"mapped",[1610]],[[126506,126506],"mapped",[1603]],[[126507,126507],"mapped",[1604]],[[126508,126508],"mapped",[1605]],[[126509,126509],"mapped",[1606]],[[126510,126510],"mapped",[1587]],[[126511,126511],"mapped",[1593]],[[126512,126512],"mapped",[1601]],[[126513,126513],"mapped",[1589]],[[126514,126514],"mapped",[1602]],[[126515,126515],"disallowed"],[[126516,126516],"mapped",[1588]],[[126517,126517],"mapped",[1578]],[[126518,126518],"mapped",[1579]],[[126519,126519],"mapped",[1582]],[[126520,126520],"disallowed"],[[126521,126521],"mapped",[1590]],[[126522,126522],"disallowed"],[[126523,126523],"mapped",[1594]],[[126524,126529],"disallowed"],[[126530,126530],"mapped",[1580]],[[126531,126534],"disallowed"],[[126535,126535],"mapped",[1581]],[[126536,126536],"disallowed"],[[126537,126537],"mapped",[1610]],[[126538,126538],"disallowed"],[[126539,126539],"mapped",[1604]],[[126540,126540],"disallowed"],[[126541,126541],"mapped",[1606]],[[126542,126542],"mapped",[1587]],[[126543,126543],"mapped",[1593]],[[126544,126544],"disallowed"],[[126545,126545],"mapped",[1589]],[[126546,126546],"mapped",[1602]],[[126547,126547],"disallowed"],[[126548,126548],"mapped",[1588]],[[126549,126550],"disallowed"],[[126551,126551],"mapped",[1582]],[[126552,126552],"disallowed"],[[126553,126553],"mapped",[1590]],[[126554,126554],"disallowed"],[[126555,126555],"mapped",[1594]],[[126556,126556],"disallowed"],[[126557,126557],"mapped",[1722]],[[126558,126558],"disallowed"],[[126559,126559],"mapped",[1647]],[[126560,126560],"disallowed"],[[126561,126561],"mapped",[1576]],[[126562,126562],"mapped",[1580]],[[126563,126563],"disallowed"],[[126564,126564],"mapped",[1607]],[[126565,126566],"disallowed"],[[126567,126567],"mapped",[1581]],[[126568,126568],"mapped",[1591]],[[126569,126569],"mapped",[1610]],[[126570,126570],"mapped",[1603]],[[126571,126571],"disallowed"],[[126572,126572],"mapped",[1605]],[[126573,126573],"mapped",[1606]],[[126574,126574],"mapped",[1587]],[[126575,126575],"mapped",[1593]],[[126576,126576],"mapped",[1601]],[[126577,126577],"mapped",[1589]],[[126578,126578],"mapped",[1602]],[[126579,126579],"disallowed"],[[126580,126580],"mapped",[1588]],[[126581,126581],"mapped",[1578]],[[126582,126582],"mapped",[1579]],[[126583,126583],"mapped",[1582]],[[126584,126584],"disallowed"],[[126585,126585],"mapped",[1590]],[[126586,126586],"mapped",[1592]],[[126587,126587],"mapped",[1594]],[[126588,126588],"mapped",[1646]],[[126589,126589],"disallowed"],[[126590,126590],"mapped",[1697]],[[126591,126591],"disallowed"],[[126592,126592],"mapped",[1575]],[[126593,126593],"mapped",[1576]],[[126594,126594],"mapped",[1580]],[[126595,126595],"mapped",[1583]],[[126596,126596],"mapped",[1607]],[[126597,126597],"mapped",[1608]],[[126598,126598],"mapped",[1586]],[[126599,126599],"mapped",[1581]],[[126600,126600],"mapped",[1591]],[[126601,126601],"mapped",[1610]],[[126602,126602],"disallowed"],[[126603,126603],"mapped",[1604]],[[126604,126604],"mapped",[1605]],[[126605,126605],"mapped",[1606]],[[126606,126606],"mapped",[1587]],[[126607,126607],"mapped",[1593]],[[126608,126608],"mapped",[1601]],[[126609,126609],"mapped",[1589]],[[126610,126610],"mapped",[1602]],[[126611,126611],"mapped",[1585]],[[126612,126612],"mapped",[1588]],[[126613,126613],"mapped",[1578]],[[126614,126614],"mapped",[1579]],[[126615,126615],"mapped",[1582]],[[126616,126616],"mapped",[1584]],[[126617,126617],"mapped",[1590]],[[126618,126618],"mapped",[1592]],[[126619,126619],"mapped",[1594]],[[126620,126624],"disallowed"],[[126625,126625],"mapped",[1576]],[[126626,126626],"mapped",[1580]],[[126627,126627],"mapped",[1583]],[[126628,126628],"disallowed"],[[126629,126629],"mapped",[1608]],[[126630,126630],"mapped",[1586]],[[126631,126631],"mapped",[1581]],[[126632,126632],"mapped",[1591]],[[126633,126633],"mapped",[1610]],[[126634,126634],"disallowed"],[[126635,126635],"mapped",[1604]],[[126636,126636],"mapped",[1605]],[[126637,126637],"mapped",[1606]],[[126638,126638],"mapped",[1587]],[[126639,126639],"mapped",[1593]],[[126640,126640],"mapped",[1601]],[[126641,126641],"mapped",[1589]],[[126642,126642],"mapped",[1602]],[[126643,126643],"mapped",[1585]],[[126644,126644],"mapped",[1588]],[[126645,126645],"mapped",[1578]],[[126646,126646],"mapped",[1579]],[[126647,126647],"mapped",[1582]],[[126648,126648],"mapped",[1584]],[[126649,126649],"mapped",[1590]],[[126650,126650],"mapped",[1592]],[[126651,126651],"mapped",[1594]],[[126652,126703],"disallowed"],[[126704,126705],"valid",[],"NV8"],[[126706,126975],"disallowed"],[[126976,127019],"valid",[],"NV8"],[[127020,127023],"disallowed"],[[127024,127123],"valid",[],"NV8"],[[127124,127135],"disallowed"],[[127136,127150],"valid",[],"NV8"],[[127151,127152],"disallowed"],[[127153,127166],"valid",[],"NV8"],[[127167,127167],"valid",[],"NV8"],[[127168,127168],"disallowed"],[[127169,127183],"valid",[],"NV8"],[[127184,127184],"disallowed"],[[127185,127199],"valid",[],"NV8"],[[127200,127221],"valid",[],"NV8"],[[127222,127231],"disallowed"],[[127232,127232],"disallowed"],[[127233,127233],"disallowed_STD3_mapped",[48,44]],[[127234,127234],"disallowed_STD3_mapped",[49,44]],[[127235,127235],"disallowed_STD3_mapped",[50,44]],[[127236,127236],"disallowed_STD3_mapped",[51,44]],[[127237,127237],"disallowed_STD3_mapped",[52,44]],[[127238,127238],"disallowed_STD3_mapped",[53,44]],[[127239,127239],"disallowed_STD3_mapped",[54,44]],[[127240,127240],"disallowed_STD3_mapped",[55,44]],[[127241,127241],"disallowed_STD3_mapped",[56,44]],[[127242,127242],"disallowed_STD3_mapped",[57,44]],[[127243,127244],"valid",[],"NV8"],[[127245,127247],"disallowed"],[[127248,127248],"disallowed_STD3_mapped",[40,97,41]],[[127249,127249],"disallowed_STD3_mapped",[40,98,41]],[[127250,127250],"disallowed_STD3_mapped",[40,99,41]],[[127251,127251],"disallowed_STD3_mapped",[40,100,41]],[[127252,127252],"disallowed_STD3_mapped",[40,101,41]],[[127253,127253],"disallowed_STD3_mapped",[40,102,41]],[[127254,127254],"disallowed_STD3_mapped",[40,103,41]],[[127255,127255],"disallowed_STD3_mapped",[40,104,41]],[[127256,127256],"disallowed_STD3_mapped",[40,105,41]],[[127257,127257],"disallowed_STD3_mapped",[40,106,41]],[[127258,127258],"disallowed_STD3_mapped",[40,107,41]],[[127259,127259],"disallowed_STD3_mapped",[40,108,41]],[[127260,127260],"disallowed_STD3_mapped",[40,109,41]],[[127261,127261],"disallowed_STD3_mapped",[40,110,41]],[[127262,127262],"disallowed_STD3_mapped",[40,111,41]],[[127263,127263],"disallowed_STD3_mapped",[40,112,41]],[[127264,127264],"disallowed_STD3_mapped",[40,113,41]],[[127265,127265],"disallowed_STD3_mapped",[40,114,41]],[[127266,127266],"disallowed_STD3_mapped",[40,115,41]],[[127267,127267],"disallowed_STD3_mapped",[40,116,41]],[[127268,127268],"disallowed_STD3_mapped",[40,117,41]],[[127269,127269],"disallowed_STD3_mapped",[40,118,41]],[[127270,127270],"disallowed_STD3_mapped",[40,119,41]],[[127271,127271],"disallowed_STD3_mapped",[40,120,41]],[[127272,127272],"disallowed_STD3_mapped",[40,121,41]],[[127273,127273],"disallowed_STD3_mapped",[40,122,41]],[[127274,127274],"mapped",[12308,115,12309]],[[127275,127275],"mapped",[99]],[[127276,127276],"mapped",[114]],[[127277,127277],"mapped",[99,100]],[[127278,127278],"mapped",[119,122]],[[127279,127279],"disallowed"],[[127280,127280],"mapped",[97]],[[127281,127281],"mapped",[98]],[[127282,127282],"mapped",[99]],[[127283,127283],"mapped",[100]],[[127284,127284],"mapped",[101]],[[127285,127285],"mapped",[102]],[[127286,127286],"mapped",[103]],[[127287,127287],"mapped",[104]],[[127288,127288],"mapped",[105]],[[127289,127289],"mapped",[106]],[[127290,127290],"mapped",[107]],[[127291,127291],"mapped",[108]],[[127292,127292],"mapped",[109]],[[127293,127293],"mapped",[110]],[[127294,127294],"mapped",[111]],[[127295,127295],"mapped",[112]],[[127296,127296],"mapped",[113]],[[127297,127297],"mapped",[114]],[[127298,127298],"mapped",[115]],[[127299,127299],"mapped",[116]],[[127300,127300],"mapped",[117]],[[127301,127301],"mapped",[118]],[[127302,127302],"mapped",[119]],[[127303,127303],"mapped",[120]],[[127304,127304],"mapped",[121]],[[127305,127305],"mapped",[122]],[[127306,127306],"mapped",[104,118]],[[127307,127307],"mapped",[109,118]],[[127308,127308],"mapped",[115,100]],[[127309,127309],"mapped",[115,115]],[[127310,127310],"mapped",[112,112,118]],[[127311,127311],"mapped",[119,99]],[[127312,127318],"valid",[],"NV8"],[[127319,127319],"valid",[],"NV8"],[[127320,127326],"valid",[],"NV8"],[[127327,127327],"valid",[],"NV8"],[[127328,127337],"valid",[],"NV8"],[[127338,127338],"mapped",[109,99]],[[127339,127339],"mapped",[109,100]],[[127340,127343],"disallowed"],[[127344,127352],"valid",[],"NV8"],[[127353,127353],"valid",[],"NV8"],[[127354,127354],"valid",[],"NV8"],[[127355,127356],"valid",[],"NV8"],[[127357,127358],"valid",[],"NV8"],[[127359,127359],"valid",[],"NV8"],[[127360,127369],"valid",[],"NV8"],[[127370,127373],"valid",[],"NV8"],[[127374,127375],"valid",[],"NV8"],[[127376,127376],"mapped",[100,106]],[[127377,127386],"valid",[],"NV8"],[[127387,127461],"disallowed"],[[127462,127487],"valid",[],"NV8"],[[127488,127488],"mapped",[12411,12363]],[[127489,127489],"mapped",[12467,12467]],[[127490,127490],"mapped",[12469]],[[127491,127503],"disallowed"],[[127504,127504],"mapped",[25163]],[[127505,127505],"mapped",[23383]],[[127506,127506],"mapped",[21452]],[[127507,127507],"mapped",[12487]],[[127508,127508],"mapped",[20108]],[[127509,127509],"mapped",[22810]],[[127510,127510],"mapped",[35299]],[[127511,127511],"mapped",[22825]],[[127512,127512],"mapped",[20132]],[[127513,127513],"mapped",[26144]],[[127514,127514],"mapped",[28961]],[[127515,127515],"mapped",[26009]],[[127516,127516],"mapped",[21069]],[[127517,127517],"mapped",[24460]],[[127518,127518],"mapped",[20877]],[[127519,127519],"mapped",[26032]],[[127520,127520],"mapped",[21021]],[[127521,127521],"mapped",[32066]],[[127522,127522],"mapped",[29983]],[[127523,127523],"mapped",[36009]],[[127524,127524],"mapped",[22768]],[[127525,127525],"mapped",[21561]],[[127526,127526],"mapped",[28436]],[[127527,127527],"mapped",[25237]],[[127528,127528],"mapped",[25429]],[[127529,127529],"mapped",[19968]],[[127530,127530],"mapped",[19977]],[[127531,127531],"mapped",[36938]],[[127532,127532],"mapped",[24038]],[[127533,127533],"mapped",[20013]],[[127534,127534],"mapped",[21491]],[[127535,127535],"mapped",[25351]],[[127536,127536],"mapped",[36208]],[[127537,127537],"mapped",[25171]],[[127538,127538],"mapped",[31105]],[[127539,127539],"mapped",[31354]],[[127540,127540],"mapped",[21512]],[[127541,127541],"mapped",[28288]],[[127542,127542],"mapped",[26377]],[[127543,127543],"mapped",[26376]],[[127544,127544],"mapped",[30003]],[[127545,127545],"mapped",[21106]],[[127546,127546],"mapped",[21942]],[[127547,127551],"disallowed"],[[127552,127552],"mapped",[12308,26412,12309]],[[127553,127553],"mapped",[12308,19977,12309]],[[127554,127554],"mapped",[12308,20108,12309]],[[127555,127555],"mapped",[12308,23433,12309]],[[127556,127556],"mapped",[12308,28857,12309]],[[127557,127557],"mapped",[12308,25171,12309]],[[127558,127558],"mapped",[12308,30423,12309]],[[127559,127559],"mapped",[12308,21213,12309]],[[127560,127560],"mapped",[12308,25943,12309]],[[127561,127567],"disallowed"],[[127568,127568],"mapped",[24471]],[[127569,127569],"mapped",[21487]],[[127570,127743],"disallowed"],[[127744,127776],"valid",[],"NV8"],[[127777,127788],"valid",[],"NV8"],[[127789,127791],"valid",[],"NV8"],[[127792,127797],"valid",[],"NV8"],[[127798,127798],"valid",[],"NV8"],[[127799,127868],"valid",[],"NV8"],[[127869,127869],"valid",[],"NV8"],[[127870,127871],"valid",[],"NV8"],[[127872,127891],"valid",[],"NV8"],[[127892,127903],"valid",[],"NV8"],[[127904,127940],"valid",[],"NV8"],[[127941,127941],"valid",[],"NV8"],[[127942,127946],"valid",[],"NV8"],[[127947,127950],"valid",[],"NV8"],[[127951,127955],"valid",[],"NV8"],[[127956,127967],"valid",[],"NV8"],[[127968,127984],"valid",[],"NV8"],[[127985,127991],"valid",[],"NV8"],[[127992,127999],"valid",[],"NV8"],[[128000,128062],"valid",[],"NV8"],[[128063,128063],"valid",[],"NV8"],[[128064,128064],"valid",[],"NV8"],[[128065,128065],"valid",[],"NV8"],[[128066,128247],"valid",[],"NV8"],[[128248,128248],"valid",[],"NV8"],[[128249,128252],"valid",[],"NV8"],[[128253,128254],"valid",[],"NV8"],[[128255,128255],"valid",[],"NV8"],[[128256,128317],"valid",[],"NV8"],[[128318,128319],"valid",[],"NV8"],[[128320,128323],"valid",[],"NV8"],[[128324,128330],"valid",[],"NV8"],[[128331,128335],"valid",[],"NV8"],[[128336,128359],"valid",[],"NV8"],[[128360,128377],"valid",[],"NV8"],[[128378,128378],"disallowed"],[[128379,128419],"valid",[],"NV8"],[[128420,128420],"disallowed"],[[128421,128506],"valid",[],"NV8"],[[128507,128511],"valid",[],"NV8"],[[128512,128512],"valid",[],"NV8"],[[128513,128528],"valid",[],"NV8"],[[128529,128529],"valid",[],"NV8"],[[128530,128532],"valid",[],"NV8"],[[128533,128533],"valid",[],"NV8"],[[128534,128534],"valid",[],"NV8"],[[128535,128535],"valid",[],"NV8"],[[128536,128536],"valid",[],"NV8"],[[128537,128537],"valid",[],"NV8"],[[128538,128538],"valid",[],"NV8"],[[128539,128539],"valid",[],"NV8"],[[128540,128542],"valid",[],"NV8"],[[128543,128543],"valid",[],"NV8"],[[128544,128549],"valid",[],"NV8"],[[128550,128551],"valid",[],"NV8"],[[128552,128555],"valid",[],"NV8"],[[128556,128556],"valid",[],"NV8"],[[128557,128557],"valid",[],"NV8"],[[128558,128559],"valid",[],"NV8"],[[128560,128563],"valid",[],"NV8"],[[128564,128564],"valid",[],"NV8"],[[128565,128576],"valid",[],"NV8"],[[128577,128578],"valid",[],"NV8"],[[128579,128580],"valid",[],"NV8"],[[128581,128591],"valid",[],"NV8"],[[128592,128639],"valid",[],"NV8"],[[128640,128709],"valid",[],"NV8"],[[128710,128719],"valid",[],"NV8"],[[128720,128720],"valid",[],"NV8"],[[128721,128735],"disallowed"],[[128736,128748],"valid",[],"NV8"],[[128749,128751],"disallowed"],[[128752,128755],"valid",[],"NV8"],[[128756,128767],"disallowed"],[[128768,128883],"valid",[],"NV8"],[[128884,128895],"disallowed"],[[128896,128980],"valid",[],"NV8"],[[128981,129023],"disallowed"],[[129024,129035],"valid",[],"NV8"],[[129036,129039],"disallowed"],[[129040,129095],"valid",[],"NV8"],[[129096,129103],"disallowed"],[[129104,129113],"valid",[],"NV8"],[[129114,129119],"disallowed"],[[129120,129159],"valid",[],"NV8"],[[129160,129167],"disallowed"],[[129168,129197],"valid",[],"NV8"],[[129198,129295],"disallowed"],[[129296,129304],"valid",[],"NV8"],[[129305,129407],"disallowed"],[[129408,129412],"valid",[],"NV8"],[[129413,129471],"disallowed"],[[129472,129472],"valid",[],"NV8"],[[129473,131069],"disallowed"],[[131070,131071],"disallowed"],[[131072,173782],"valid"],[[173783,173823],"disallowed"],[[173824,177972],"valid"],[[177973,177983],"disallowed"],[[177984,178205],"valid"],[[178206,178207],"disallowed"],[[178208,183969],"valid"],[[183970,194559],"disallowed"],[[194560,194560],"mapped",[20029]],[[194561,194561],"mapped",[20024]],[[194562,194562],"mapped",[20033]],[[194563,194563],"mapped",[131362]],[[194564,194564],"mapped",[20320]],[[194565,194565],"mapped",[20398]],[[194566,194566],"mapped",[20411]],[[194567,194567],"mapped",[20482]],[[194568,194568],"mapped",[20602]],[[194569,194569],"mapped",[20633]],[[194570,194570],"mapped",[20711]],[[194571,194571],"mapped",[20687]],[[194572,194572],"mapped",[13470]],[[194573,194573],"mapped",[132666]],[[194574,194574],"mapped",[20813]],[[194575,194575],"mapped",[20820]],[[194576,194576],"mapped",[20836]],[[194577,194577],"mapped",[20855]],[[194578,194578],"mapped",[132380]],[[194579,194579],"mapped",[13497]],[[194580,194580],"mapped",[20839]],[[194581,194581],"mapped",[20877]],[[194582,194582],"mapped",[132427]],[[194583,194583],"mapped",[20887]],[[194584,194584],"mapped",[20900]],[[194585,194585],"mapped",[20172]],[[194586,194586],"mapped",[20908]],[[194587,194587],"mapped",[20917]],[[194588,194588],"mapped",[168415]],[[194589,194589],"mapped",[20981]],[[194590,194590],"mapped",[20995]],[[194591,194591],"mapped",[13535]],[[194592,194592],"mapped",[21051]],[[194593,194593],"mapped",[21062]],[[194594,194594],"mapped",[21106]],[[194595,194595],"mapped",[21111]],[[194596,194596],"mapped",[13589]],[[194597,194597],"mapped",[21191]],[[194598,194598],"mapped",[21193]],[[194599,194599],"mapped",[21220]],[[194600,194600],"mapped",[21242]],[[194601,194601],"mapped",[21253]],[[194602,194602],"mapped",[21254]],[[194603,194603],"mapped",[21271]],[[194604,194604],"mapped",[21321]],[[194605,194605],"mapped",[21329]],[[194606,194606],"mapped",[21338]],[[194607,194607],"mapped",[21363]],[[194608,194608],"mapped",[21373]],[[194609,194611],"mapped",[21375]],[[194612,194612],"mapped",[133676]],[[194613,194613],"mapped",[28784]],[[194614,194614],"mapped",[21450]],[[194615,194615],"mapped",[21471]],[[194616,194616],"mapped",[133987]],[[194617,194617],"mapped",[21483]],[[194618,194618],"mapped",[21489]],[[194619,194619],"mapped",[21510]],[[194620,194620],"mapped",[21662]],[[194621,194621],"mapped",[21560]],[[194622,194622],"mapped",[21576]],[[194623,194623],"mapped",[21608]],[[194624,194624],"mapped",[21666]],[[194625,194625],"mapped",[21750]],[[194626,194626],"mapped",[21776]],[[194627,194627],"mapped",[21843]],[[194628,194628],"mapped",[21859]],[[194629,194630],"mapped",[21892]],[[194631,194631],"mapped",[21913]],[[194632,194632],"mapped",[21931]],[[194633,194633],"mapped",[21939]],[[194634,194634],"mapped",[21954]],[[194635,194635],"mapped",[22294]],[[194636,194636],"mapped",[22022]],[[194637,194637],"mapped",[22295]],[[194638,194638],"mapped",[22097]],[[194639,194639],"mapped",[22132]],[[194640,194640],"mapped",[20999]],[[194641,194641],"mapped",[22766]],[[194642,194642],"mapped",[22478]],[[194643,194643],"mapped",[22516]],[[194644,194644],"mapped",[22541]],[[194645,194645],"mapped",[22411]],[[194646,194646],"mapped",[22578]],[[194647,194647],"mapped",[22577]],[[194648,194648],"mapped",[22700]],[[194649,194649],"mapped",[136420]],[[194650,194650],"mapped",[22770]],[[194651,194651],"mapped",[22775]],[[194652,194652],"mapped",[22790]],[[194653,194653],"mapped",[22810]],[[194654,194654],"mapped",[22818]],[[194655,194655],"mapped",[22882]],[[194656,194656],"mapped",[136872]],[[194657,194657],"mapped",[136938]],[[194658,194658],"mapped",[23020]],[[194659,194659],"mapped",[23067]],[[194660,194660],"mapped",[23079]],[[194661,194661],"mapped",[23000]],[[194662,194662],"mapped",[23142]],[[194663,194663],"mapped",[14062]],[[194664,194664],"disallowed"],[[194665,194665],"mapped",[23304]],[[194666,194667],"mapped",[23358]],[[194668,194668],"mapped",[137672]],[[194669,194669],"mapped",[23491]],[[194670,194670],"mapped",[23512]],[[194671,194671],"mapped",[23527]],[[194672,194672],"mapped",[23539]],[[194673,194673],"mapped",[138008]],[[194674,194674],"mapped",[23551]],[[194675,194675],"mapped",[23558]],[[194676,194676],"disallowed"],[[194677,194677],"mapped",[23586]],[[194678,194678],"mapped",[14209]],[[194679,194679],"mapped",[23648]],[[194680,194680],"mapped",[23662]],[[194681,194681],"mapped",[23744]],[[194682,194682],"mapped",[23693]],[[194683,194683],"mapped",[138724]],[[194684,194684],"mapped",[23875]],[[194685,194685],"mapped",[138726]],[[194686,194686],"mapped",[23918]],[[194687,194687],"mapped",[23915]],[[194688,194688],"mapped",[23932]],[[194689,194689],"mapped",[24033]],[[194690,194690],"mapped",[24034]],[[194691,194691],"mapped",[14383]],[[194692,194692],"mapped",[24061]],[[194693,194693],"mapped",[24104]],[[194694,194694],"mapped",[24125]],[[194695,194695],"mapped",[24169]],[[194696,194696],"mapped",[14434]],[[194697,194697],"mapped",[139651]],[[194698,194698],"mapped",[14460]],[[194699,194699],"mapped",[24240]],[[194700,194700],"mapped",[24243]],[[194701,194701],"mapped",[24246]],[[194702,194702],"mapped",[24266]],[[194703,194703],"mapped",[172946]],[[194704,194704],"mapped",[24318]],[[194705,194706],"mapped",[140081]],[[194707,194707],"mapped",[33281]],[[194708,194709],"mapped",[24354]],[[194710,194710],"mapped",[14535]],[[194711,194711],"mapped",[144056]],[[194712,194712],"mapped",[156122]],[[194713,194713],"mapped",[24418]],[[194714,194714],"mapped",[24427]],[[194715,194715],"mapped",[14563]],[[194716,194716],"mapped",[24474]],[[194717,194717],"mapped",[24525]],[[194718,194718],"mapped",[24535]],[[194719,194719],"mapped",[24569]],[[194720,194720],"mapped",[24705]],[[194721,194721],"mapped",[14650]],[[194722,194722],"mapped",[14620]],[[194723,194723],"mapped",[24724]],[[194724,194724],"mapped",[141012]],[[194725,194725],"mapped",[24775]],[[194726,194726],"mapped",[24904]],[[194727,194727],"mapped",[24908]],[[194728,194728],"mapped",[24910]],[[194729,194729],"mapped",[24908]],[[194730,194730],"mapped",[24954]],[[194731,194731],"mapped",[24974]],[[194732,194732],"mapped",[25010]],[[194733,194733],"mapped",[24996]],[[194734,194734],"mapped",[25007]],[[194735,194735],"mapped",[25054]],[[194736,194736],"mapped",[25074]],[[194737,194737],"mapped",[25078]],[[194738,194738],"mapped",[25104]],[[194739,194739],"mapped",[25115]],[[194740,194740],"mapped",[25181]],[[194741,194741],"mapped",[25265]],[[194742,194742],"mapped",[25300]],[[194743,194743],"mapped",[25424]],[[194744,194744],"mapped",[142092]],[[194745,194745],"mapped",[25405]],[[194746,194746],"mapped",[25340]],[[194747,194747],"mapped",[25448]],[[194748,194748],"mapped",[25475]],[[194749,194749],"mapped",[25572]],[[194750,194750],"mapped",[142321]],[[194751,194751],"mapped",[25634]],[[194752,194752],"mapped",[25541]],[[194753,194753],"mapped",[25513]],[[194754,194754],"mapped",[14894]],[[194755,194755],"mapped",[25705]],[[194756,194756],"mapped",[25726]],[[194757,194757],"mapped",[25757]],[[194758,194758],"mapped",[25719]],[[194759,194759],"mapped",[14956]],[[194760,194760],"mapped",[25935]],[[194761,194761],"mapped",[25964]],[[194762,194762],"mapped",[143370]],[[194763,194763],"mapped",[26083]],[[194764,194764],"mapped",[26360]],[[194765,194765],"mapped",[26185]],[[194766,194766],"mapped",[15129]],[[194767,194767],"mapped",[26257]],[[194768,194768],"mapped",[15112]],[[194769,194769],"mapped",[15076]],[[194770,194770],"mapped",[20882]],[[194771,194771],"mapped",[20885]],[[194772,194772],"mapped",[26368]],[[194773,194773],"mapped",[26268]],[[194774,194774],"mapped",[32941]],[[194775,194775],"mapped",[17369]],[[194776,194776],"mapped",[26391]],[[194777,194777],"mapped",[26395]],[[194778,194778],"mapped",[26401]],[[194779,194779],"mapped",[26462]],[[194780,194780],"mapped",[26451]],[[194781,194781],"mapped",[144323]],[[194782,194782],"mapped",[15177]],[[194783,194783],"mapped",[26618]],[[194784,194784],"mapped",[26501]],[[194785,194785],"mapped",[26706]],[[194786,194786],"mapped",[26757]],[[194787,194787],"mapped",[144493]],[[194788,194788],"mapped",[26766]],[[194789,194789],"mapped",[26655]],[[194790,194790],"mapped",[26900]],[[194791,194791],"mapped",[15261]],[[194792,194792],"mapped",[26946]],[[194793,194793],"mapped",[27043]],[[194794,194794],"mapped",[27114]],[[194795,194795],"mapped",[27304]],[[194796,194796],"mapped",[145059]],[[194797,194797],"mapped",[27355]],[[194798,194798],"mapped",[15384]],[[194799,194799],"mapped",[27425]],[[194800,194800],"mapped",[145575]],[[194801,194801],"mapped",[27476]],[[194802,194802],"mapped",[15438]],[[194803,194803],"mapped",[27506]],[[194804,194804],"mapped",[27551]],[[194805,194805],"mapped",[27578]],[[194806,194806],"mapped",[27579]],[[194807,194807],"mapped",[146061]],[[194808,194808],"mapped",[138507]],[[194809,194809],"mapped",[146170]],[[194810,194810],"mapped",[27726]],[[194811,194811],"mapped",[146620]],[[194812,194812],"mapped",[27839]],[[194813,194813],"mapped",[27853]],[[194814,194814],"mapped",[27751]],[[194815,194815],"mapped",[27926]],[[194816,194816],"mapped",[27966]],[[194817,194817],"mapped",[28023]],[[194818,194818],"mapped",[27969]],[[194819,194819],"mapped",[28009]],[[194820,194820],"mapped",[28024]],[[194821,194821],"mapped",[28037]],[[194822,194822],"mapped",[146718]],[[194823,194823],"mapped",[27956]],[[194824,194824],"mapped",[28207]],[[194825,194825],"mapped",[28270]],[[194826,194826],"mapped",[15667]],[[194827,194827],"mapped",[28363]],[[194828,194828],"mapped",[28359]],[[194829,194829],"mapped",[147153]],[[194830,194830],"mapped",[28153]],[[194831,194831],"mapped",[28526]],[[194832,194832],"mapped",[147294]],[[194833,194833],"mapped",[147342]],[[194834,194834],"mapped",[28614]],[[194835,194835],"mapped",[28729]],[[194836,194836],"mapped",[28702]],[[194837,194837],"mapped",[28699]],[[194838,194838],"mapped",[15766]],[[194839,194839],"mapped",[28746]],[[194840,194840],"mapped",[28797]],[[194841,194841],"mapped",[28791]],[[194842,194842],"mapped",[28845]],[[194843,194843],"mapped",[132389]],[[194844,194844],"mapped",[28997]],[[194845,194845],"mapped",[148067]],[[194846,194846],"mapped",[29084]],[[194847,194847],"disallowed"],[[194848,194848],"mapped",[29224]],[[194849,194849],"mapped",[29237]],[[194850,194850],"mapped",[29264]],[[194851,194851],"mapped",[149000]],[[194852,194852],"mapped",[29312]],[[194853,194853],"mapped",[29333]],[[194854,194854],"mapped",[149301]],[[194855,194855],"mapped",[149524]],[[194856,194856],"mapped",[29562]],[[194857,194857],"mapped",[29579]],[[194858,194858],"mapped",[16044]],[[194859,194859],"mapped",[29605]],[[194860,194861],"mapped",[16056]],[[194862,194862],"mapped",[29767]],[[194863,194863],"mapped",[29788]],[[194864,194864],"mapped",[29809]],[[194865,194865],"mapped",[29829]],[[194866,194866],"mapped",[29898]],[[194867,194867],"mapped",[16155]],[[194868,194868],"mapped",[29988]],[[194869,194869],"mapped",[150582]],[[194870,194870],"mapped",[30014]],[[194871,194871],"mapped",[150674]],[[194872,194872],"mapped",[30064]],[[194873,194873],"mapped",[139679]],[[194874,194874],"mapped",[30224]],[[194875,194875],"mapped",[151457]],[[194876,194876],"mapped",[151480]],[[194877,194877],"mapped",[151620]],[[194878,194878],"mapped",[16380]],[[194879,194879],"mapped",[16392]],[[194880,194880],"mapped",[30452]],[[194881,194881],"mapped",[151795]],[[194882,194882],"mapped",[151794]],[[194883,194883],"mapped",[151833]],[[194884,194884],"mapped",[151859]],[[194885,194885],"mapped",[30494]],[[194886,194887],"mapped",[30495]],[[194888,194888],"mapped",[30538]],[[194889,194889],"mapped",[16441]],[[194890,194890],"mapped",[30603]],[[194891,194891],"mapped",[16454]],[[194892,194892],"mapped",[16534]],[[194893,194893],"mapped",[152605]],[[194894,194894],"mapped",[30798]],[[194895,194895],"mapped",[30860]],[[194896,194896],"mapped",[30924]],[[194897,194897],"mapped",[16611]],[[194898,194898],"mapped",[153126]],[[194899,194899],"mapped",[31062]],[[194900,194900],"mapped",[153242]],[[194901,194901],"mapped",[153285]],[[194902,194902],"mapped",[31119]],[[194903,194903],"mapped",[31211]],[[194904,194904],"mapped",[16687]],[[194905,194905],"mapped",[31296]],[[194906,194906],"mapped",[31306]],[[194907,194907],"mapped",[31311]],[[194908,194908],"mapped",[153980]],[[194909,194910],"mapped",[154279]],[[194911,194911],"disallowed"],[[194912,194912],"mapped",[16898]],[[194913,194913],"mapped",[154539]],[[194914,194914],"mapped",[31686]],[[194915,194915],"mapped",[31689]],[[194916,194916],"mapped",[16935]],[[194917,194917],"mapped",[154752]],[[194918,194918],"mapped",[31954]],[[194919,194919],"mapped",[17056]],[[194920,194920],"mapped",[31976]],[[194921,194921],"mapped",[31971]],[[194922,194922],"mapped",[32000]],[[194923,194923],"mapped",[155526]],[[194924,194924],"mapped",[32099]],[[194925,194925],"mapped",[17153]],[[194926,194926],"mapped",[32199]],[[194927,194927],"mapped",[32258]],[[194928,194928],"mapped",[32325]],[[194929,194929],"mapped",[17204]],[[194930,194930],"mapped",[156200]],[[194931,194931],"mapped",[156231]],[[194932,194932],"mapped",[17241]],[[194933,194933],"mapped",[156377]],[[194934,194934],"mapped",[32634]],[[194935,194935],"mapped",[156478]],[[194936,194936],"mapped",[32661]],[[194937,194937],"mapped",[32762]],[[194938,194938],"mapped",[32773]],[[194939,194939],"mapped",[156890]],[[194940,194940],"mapped",[156963]],[[194941,194941],"mapped",[32864]],[[194942,194942],"mapped",[157096]],[[194943,194943],"mapped",[32880]],[[194944,194944],"mapped",[144223]],[[194945,194945],"mapped",[17365]],[[194946,194946],"mapped",[32946]],[[194947,194947],"mapped",[33027]],[[194948,194948],"mapped",[17419]],[[194949,194949],"mapped",[33086]],[[194950,194950],"mapped",[23221]],[[194951,194951],"mapped",[157607]],[[194952,194952],"mapped",[157621]],[[194953,194953],"mapped",[144275]],[[194954,194954],"mapped",[144284]],[[194955,194955],"mapped",[33281]],[[194956,194956],"mapped",[33284]],[[194957,194957],"mapped",[36766]],[[194958,194958],"mapped",[17515]],[[194959,194959],"mapped",[33425]],[[194960,194960],"mapped",[33419]],[[194961,194961],"mapped",[33437]],[[194962,194962],"mapped",[21171]],[[194963,194963],"mapped",[33457]],[[194964,194964],"mapped",[33459]],[[194965,194965],"mapped",[33469]],[[194966,194966],"mapped",[33510]],[[194967,194967],"mapped",[158524]],[[194968,194968],"mapped",[33509]],[[194969,194969],"mapped",[33565]],[[194970,194970],"mapped",[33635]],[[194971,194971],"mapped",[33709]],[[194972,194972],"mapped",[33571]],[[194973,194973],"mapped",[33725]],[[194974,194974],"mapped",[33767]],[[194975,194975],"mapped",[33879]],[[194976,194976],"mapped",[33619]],[[194977,194977],"mapped",[33738]],[[194978,194978],"mapped",[33740]],[[194979,194979],"mapped",[33756]],[[194980,194980],"mapped",[158774]],[[194981,194981],"mapped",[159083]],[[194982,194982],"mapped",[158933]],[[194983,194983],"mapped",[17707]],[[194984,194984],"mapped",[34033]],[[194985,194985],"mapped",[34035]],[[194986,194986],"mapped",[34070]],[[194987,194987],"mapped",[160714]],[[194988,194988],"mapped",[34148]],[[194989,194989],"mapped",[159532]],[[194990,194990],"mapped",[17757]],[[194991,194991],"mapped",[17761]],[[194992,194992],"mapped",[159665]],[[194993,194993],"mapped",[159954]],[[194994,194994],"mapped",[17771]],[[194995,194995],"mapped",[34384]],[[194996,194996],"mapped",[34396]],[[194997,194997],"mapped",[34407]],[[194998,194998],"mapped",[34409]],[[194999,194999],"mapped",[34473]],[[195000,195000],"mapped",[34440]],[[195001,195001],"mapped",[34574]],[[195002,195002],"mapped",[34530]],[[195003,195003],"mapped",[34681]],[[195004,195004],"mapped",[34600]],[[195005,195005],"mapped",[34667]],[[195006,195006],"mapped",[34694]],[[195007,195007],"disallowed"],[[195008,195008],"mapped",[34785]],[[195009,195009],"mapped",[34817]],[[195010,195010],"mapped",[17913]],[[195011,195011],"mapped",[34912]],[[195012,195012],"mapped",[34915]],[[195013,195013],"mapped",[161383]],[[195014,195014],"mapped",[35031]],[[195015,195015],"mapped",[35038]],[[195016,195016],"mapped",[17973]],[[195017,195017],"mapped",[35066]],[[195018,195018],"mapped",[13499]],[[195019,195019],"mapped",[161966]],[[195020,195020],"mapped",[162150]],[[195021,195021],"mapped",[18110]],[[195022,195022],"mapped",[18119]],[[195023,195023],"mapped",[35488]],[[195024,195024],"mapped",[35565]],[[195025,195025],"mapped",[35722]],[[195026,195026],"mapped",[35925]],[[195027,195027],"mapped",[162984]],[[195028,195028],"mapped",[36011]],[[195029,195029],"mapped",[36033]],[[195030,195030],"mapped",[36123]],[[195031,195031],"mapped",[36215]],[[195032,195032],"mapped",[163631]],[[195033,195033],"mapped",[133124]],[[195034,195034],"mapped",[36299]],[[195035,195035],"mapped",[36284]],[[195036,195036],"mapped",[36336]],[[195037,195037],"mapped",[133342]],[[195038,195038],"mapped",[36564]],[[195039,195039],"mapped",[36664]],[[195040,195040],"mapped",[165330]],[[195041,195041],"mapped",[165357]],[[195042,195042],"mapped",[37012]],[[195043,195043],"mapped",[37105]],[[195044,195044],"mapped",[37137]],[[195045,195045],"mapped",[165678]],[[195046,195046],"mapped",[37147]],[[195047,195047],"mapped",[37432]],[[195048,195048],"mapped",[37591]],[[195049,195049],"mapped",[37592]],[[195050,195050],"mapped",[37500]],[[195051,195051],"mapped",[37881]],[[195052,195052],"mapped",[37909]],[[195053,195053],"mapped",[166906]],[[195054,195054],"mapped",[38283]],[[195055,195055],"mapped",[18837]],[[195056,195056],"mapped",[38327]],[[195057,195057],"mapped",[167287]],[[195058,195058],"mapped",[18918]],[[195059,195059],"mapped",[38595]],[[195060,195060],"mapped",[23986]],[[195061,195061],"mapped",[38691]],[[195062,195062],"mapped",[168261]],[[195063,195063],"mapped",[168474]],[[195064,195064],"mapped",[19054]],[[195065,195065],"mapped",[19062]],[[195066,195066],"mapped",[38880]],[[195067,195067],"mapped",[168970]],[[195068,195068],"mapped",[19122]],[[195069,195069],"mapped",[169110]],[[195070,195071],"mapped",[38923]],[[195072,195072],"mapped",[38953]],[[195073,195073],"mapped",[169398]],[[195074,195074],"mapped",[39138]],[[195075,195075],"mapped",[19251]],[[195076,195076],"mapped",[39209]],[[195077,195077],"mapped",[39335]],[[195078,195078],"mapped",[39362]],[[195079,195079],"mapped",[39422]],[[195080,195080],"mapped",[19406]],[[195081,195081],"mapped",[170800]],[[195082,195082],"mapped",[39698]],[[195083,195083],"mapped",[40000]],[[195084,195084],"mapped",[40189]],[[195085,195085],"mapped",[19662]],[[195086,195086],"mapped",[19693]],[[195087,195087],"mapped",[40295]],[[195088,195088],"mapped",[172238]],[[195089,195089],"mapped",[19704]],[[195090,195090],"mapped",[172293]],[[195091,195091],"mapped",[172558]],[[195092,195092],"mapped",[172689]],[[195093,195093],"mapped",[40635]],[[195094,195094],"mapped",[19798]],[[195095,195095],"mapped",[40697]],[[195096,195096],"mapped",[40702]],[[195097,195097],"mapped",[40709]],[[195098,195098],"mapped",[40719]],[[195099,195099],"mapped",[40726]],[[195100,195100],"mapped",[40763]],[[195101,195101],"mapped",[173568]],[[195102,196605],"disallowed"],[[196606,196607],"disallowed"],[[196608,262141],"disallowed"],[[262142,262143],"disallowed"],[[262144,327677],"disallowed"],[[327678,327679],"disallowed"],[[327680,393213],"disallowed"],[[393214,393215],"disallowed"],[[393216,458749],"disallowed"],[[458750,458751],"disallowed"],[[458752,524285],"disallowed"],[[524286,524287],"disallowed"],[[524288,589821],"disallowed"],[[589822,589823],"disallowed"],[[589824,655357],"disallowed"],[[655358,655359],"disallowed"],[[655360,720893],"disallowed"],[[720894,720895],"disallowed"],[[720896,786429],"disallowed"],[[786430,786431],"disallowed"],[[786432,851965],"disallowed"],[[851966,851967],"disallowed"],[[851968,917501],"disallowed"],[[917502,917503],"disallowed"],[[917504,917504],"disallowed"],[[917505,917505],"disallowed"],[[917506,917535],"disallowed"],[[917536,917631],"disallowed"],[[917632,917759],"disallowed"],[[917760,917999],"ignored"],[[918000,983037],"disallowed"],[[983038,983039],"disallowed"],[[983040,1048573],"disallowed"],[[1048574,1048575],"disallowed"],[[1048576,1114109],"disallowed"],[[1114110,1114111],"disallowed"]]');
+
+
+const RE_PLUS = /\+/g
+
+const HEX = [
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
+ 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+]
+
+function Decoder () {
+ this.buffer = undefined
+}
+Decoder.prototype.write = function (str) {
+ // Replace '+' with ' ' before decoding
+ str = str.replace(RE_PLUS, ' ')
+ let res = ''
+ let i = 0; let p = 0; const len = str.length
+ for (; i < len; ++i) {
+ if (this.buffer !== undefined) {
+ if (!HEX[str.charCodeAt(i)]) {
+ res += '%' + this.buffer
+ this.buffer = undefined
+ --i // retry character
+ } else {
+ this.buffer += str[i]
+ ++p
+ if (this.buffer.length === 2) {
+ res += String.fromCharCode(parseInt(this.buffer, 16))
+ this.buffer = undefined
+ }
+ }
+ } else if (str[i] === '%') {
+ if (i > p) {
+ res += str.substring(p, i)
+ p = i
+ }
+ this.buffer = ''
+ ++p
+ }
+ }
+ if (p < len && this.buffer === undefined) { res += str.substring(p) }
+ return res
+}
+Decoder.prototype.reset = function () {
+ this.buffer = undefined
+}
+
+module.exports = Decoder
+
+
+/***/ }),
+
+/***/ 8647:
+/***/ ((module) => {
+
+"use strict";
+
+
+module.exports = function basename (path) {
+ if (typeof path !== 'string') { return '' }
+ for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var
+ switch (path.charCodeAt(i)) {
+ case 0x2F: // '/'
+ case 0x5C: // '\'
+ path = path.slice(i + 1)
+ return (path === '..' || path === '.' ? '' : path)
+ }
+ }
+ return (path === '..' || path === '.' ? '' : path)
+}
+
+
+/***/ }),
+
+/***/ 4619:
+/***/ (function(module) {
+
+"use strict";
+
+
+// Node has always utf-8
+const utf8Decoder = new TextDecoder('utf-8')
+const textDecoders = new Map([
+ ['utf-8', utf8Decoder],
+ ['utf8', utf8Decoder]
+])
+
+function getDecoder (charset) {
+ let lc
+ while (true) {
+ switch (charset) {
+ case 'utf-8':
+ case 'utf8':
+ return decoders.utf8
+ case 'latin1':
+ case 'ascii': // TODO: Make these a separate, strict decoder?
+ case 'us-ascii':
+ case 'iso-8859-1':
+ case 'iso8859-1':
+ case 'iso88591':
+ case 'iso_8859-1':
+ case 'windows-1252':
+ case 'iso_8859-1:1987':
+ case 'cp1252':
+ case 'x-cp1252':
+ return decoders.latin1
+ case 'utf16le':
+ case 'utf-16le':
+ case 'ucs2':
+ case 'ucs-2':
+ return decoders.utf16le
+ case 'base64':
+ return decoders.base64
+ default:
+ if (lc === undefined) {
+ lc = true
+ charset = charset.toLowerCase()
+ continue
+ }
+ return decoders.other.bind(charset)
+ }
+ }
+}
+
+const decoders = {
+ utf8: (data, sourceEncoding) => {
+ if (data.length === 0) {
+ return ''
+ }
+ if (typeof data === 'string') {
+ data = Buffer.from(data, sourceEncoding)
+ }
+ return data.utf8Slice(0, data.length)
+ },
+
+ latin1: (data, sourceEncoding) => {
+ if (data.length === 0) {
+ return ''
+ }
+ if (typeof data === 'string') {
+ return data
+ }
+ return data.latin1Slice(0, data.length)
+ },
+
+ utf16le: (data, sourceEncoding) => {
+ if (data.length === 0) {
+ return ''
+ }
+ if (typeof data === 'string') {
+ data = Buffer.from(data, sourceEncoding)
+ }
+ return data.ucs2Slice(0, data.length)
+ },
+
+ base64: (data, sourceEncoding) => {
+ if (data.length === 0) {
+ return ''
+ }
+ if (typeof data === 'string') {
+ data = Buffer.from(data, sourceEncoding)
+ }
+ return data.base64Slice(0, data.length)
+ },
+
+ other: (data, sourceEncoding) => {
+ if (data.length === 0) {
+ return ''
+ }
+ if (typeof data === 'string') {
+ data = Buffer.from(data, sourceEncoding)
+ }
+
+ if (textDecoders.has(this.toString())) {
+ try {
+ return textDecoders.get(this).decode(data)
+ } catch (e) { }
+ }
+ return typeof data === 'string'
+ ? data
+ : data.toString()
+ }
+}
+
+function decodeText (text, sourceEncoding, destEncoding) {
+ if (text) {
+ return getDecoder(destEncoding)(text, sourceEncoding)
+ }
+ return text
+}
+
+module.exports = decodeText
+
+
+/***/ }),
+
+/***/ 1467:
+/***/ ((module) => {
+
+"use strict";
+
+
+module.exports = function getLimit (limits, name, defaultLimit) {
+ if (
+ !limits ||
+ limits[name] === undefined ||
+ limits[name] === null
+ ) { return defaultLimit }
+
+ if (
+ typeof limits[name] !== 'number' ||
+ isNaN(limits[name])
+ ) { throw new TypeError('Limit ' + name + ' is not a valid number') }
+
+ return limits[name]
+}
+
+
+/***/ }),
+
+/***/ 1854:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+/* eslint-disable object-property-newline */
+
+
+const decodeText = __nccwpck_require__(4619)
+
+const RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g
+
+const EncodedLookup = {
+ '%00': '\x00', '%01': '\x01', '%02': '\x02', '%03': '\x03', '%04': '\x04',
+ '%05': '\x05', '%06': '\x06', '%07': '\x07', '%08': '\x08', '%09': '\x09',
+ '%0a': '\x0a', '%0A': '\x0a', '%0b': '\x0b', '%0B': '\x0b', '%0c': '\x0c',
+ '%0C': '\x0c', '%0d': '\x0d', '%0D': '\x0d', '%0e': '\x0e', '%0E': '\x0e',
+ '%0f': '\x0f', '%0F': '\x0f', '%10': '\x10', '%11': '\x11', '%12': '\x12',
+ '%13': '\x13', '%14': '\x14', '%15': '\x15', '%16': '\x16', '%17': '\x17',
+ '%18': '\x18', '%19': '\x19', '%1a': '\x1a', '%1A': '\x1a', '%1b': '\x1b',
+ '%1B': '\x1b', '%1c': '\x1c', '%1C': '\x1c', '%1d': '\x1d', '%1D': '\x1d',
+ '%1e': '\x1e', '%1E': '\x1e', '%1f': '\x1f', '%1F': '\x1f', '%20': '\x20',
+ '%21': '\x21', '%22': '\x22', '%23': '\x23', '%24': '\x24', '%25': '\x25',
+ '%26': '\x26', '%27': '\x27', '%28': '\x28', '%29': '\x29', '%2a': '\x2a',
+ '%2A': '\x2a', '%2b': '\x2b', '%2B': '\x2b', '%2c': '\x2c', '%2C': '\x2c',
+ '%2d': '\x2d', '%2D': '\x2d', '%2e': '\x2e', '%2E': '\x2e', '%2f': '\x2f',
+ '%2F': '\x2f', '%30': '\x30', '%31': '\x31', '%32': '\x32', '%33': '\x33',
+ '%34': '\x34', '%35': '\x35', '%36': '\x36', '%37': '\x37', '%38': '\x38',
+ '%39': '\x39', '%3a': '\x3a', '%3A': '\x3a', '%3b': '\x3b', '%3B': '\x3b',
+ '%3c': '\x3c', '%3C': '\x3c', '%3d': '\x3d', '%3D': '\x3d', '%3e': '\x3e',
+ '%3E': '\x3e', '%3f': '\x3f', '%3F': '\x3f', '%40': '\x40', '%41': '\x41',
+ '%42': '\x42', '%43': '\x43', '%44': '\x44', '%45': '\x45', '%46': '\x46',
+ '%47': '\x47', '%48': '\x48', '%49': '\x49', '%4a': '\x4a', '%4A': '\x4a',
+ '%4b': '\x4b', '%4B': '\x4b', '%4c': '\x4c', '%4C': '\x4c', '%4d': '\x4d',
+ '%4D': '\x4d', '%4e': '\x4e', '%4E': '\x4e', '%4f': '\x4f', '%4F': '\x4f',
+ '%50': '\x50', '%51': '\x51', '%52': '\x52', '%53': '\x53', '%54': '\x54',
+ '%55': '\x55', '%56': '\x56', '%57': '\x57', '%58': '\x58', '%59': '\x59',
+ '%5a': '\x5a', '%5A': '\x5a', '%5b': '\x5b', '%5B': '\x5b', '%5c': '\x5c',
+ '%5C': '\x5c', '%5d': '\x5d', '%5D': '\x5d', '%5e': '\x5e', '%5E': '\x5e',
+ '%5f': '\x5f', '%5F': '\x5f', '%60': '\x60', '%61': '\x61', '%62': '\x62',
+ '%63': '\x63', '%64': '\x64', '%65': '\x65', '%66': '\x66', '%67': '\x67',
+ '%68': '\x68', '%69': '\x69', '%6a': '\x6a', '%6A': '\x6a', '%6b': '\x6b',
+ '%6B': '\x6b', '%6c': '\x6c', '%6C': '\x6c', '%6d': '\x6d', '%6D': '\x6d',
+ '%6e': '\x6e', '%6E': '\x6e', '%6f': '\x6f', '%6F': '\x6f', '%70': '\x70',
+ '%71': '\x71', '%72': '\x72', '%73': '\x73', '%74': '\x74', '%75': '\x75',
+ '%76': '\x76', '%77': '\x77', '%78': '\x78', '%79': '\x79', '%7a': '\x7a',
+ '%7A': '\x7a', '%7b': '\x7b', '%7B': '\x7b', '%7c': '\x7c', '%7C': '\x7c',
+ '%7d': '\x7d', '%7D': '\x7d', '%7e': '\x7e', '%7E': '\x7e', '%7f': '\x7f',
+ '%7F': '\x7f', '%80': '\x80', '%81': '\x81', '%82': '\x82', '%83': '\x83',
+ '%84': '\x84', '%85': '\x85', '%86': '\x86', '%87': '\x87', '%88': '\x88',
+ '%89': '\x89', '%8a': '\x8a', '%8A': '\x8a', '%8b': '\x8b', '%8B': '\x8b',
+ '%8c': '\x8c', '%8C': '\x8c', '%8d': '\x8d', '%8D': '\x8d', '%8e': '\x8e',
+ '%8E': '\x8e', '%8f': '\x8f', '%8F': '\x8f', '%90': '\x90', '%91': '\x91',
+ '%92': '\x92', '%93': '\x93', '%94': '\x94', '%95': '\x95', '%96': '\x96',
+ '%97': '\x97', '%98': '\x98', '%99': '\x99', '%9a': '\x9a', '%9A': '\x9a',
+ '%9b': '\x9b', '%9B': '\x9b', '%9c': '\x9c', '%9C': '\x9c', '%9d': '\x9d',
+ '%9D': '\x9d', '%9e': '\x9e', '%9E': '\x9e', '%9f': '\x9f', '%9F': '\x9f',
+ '%a0': '\xa0', '%A0': '\xa0', '%a1': '\xa1', '%A1': '\xa1', '%a2': '\xa2',
+ '%A2': '\xa2', '%a3': '\xa3', '%A3': '\xa3', '%a4': '\xa4', '%A4': '\xa4',
+ '%a5': '\xa5', '%A5': '\xa5', '%a6': '\xa6', '%A6': '\xa6', '%a7': '\xa7',
+ '%A7': '\xa7', '%a8': '\xa8', '%A8': '\xa8', '%a9': '\xa9', '%A9': '\xa9',
+ '%aa': '\xaa', '%Aa': '\xaa', '%aA': '\xaa', '%AA': '\xaa', '%ab': '\xab',
+ '%Ab': '\xab', '%aB': '\xab', '%AB': '\xab', '%ac': '\xac', '%Ac': '\xac',
+ '%aC': '\xac', '%AC': '\xac', '%ad': '\xad', '%Ad': '\xad', '%aD': '\xad',
+ '%AD': '\xad', '%ae': '\xae', '%Ae': '\xae', '%aE': '\xae', '%AE': '\xae',
+ '%af': '\xaf', '%Af': '\xaf', '%aF': '\xaf', '%AF': '\xaf', '%b0': '\xb0',
+ '%B0': '\xb0', '%b1': '\xb1', '%B1': '\xb1', '%b2': '\xb2', '%B2': '\xb2',
+ '%b3': '\xb3', '%B3': '\xb3', '%b4': '\xb4', '%B4': '\xb4', '%b5': '\xb5',
+ '%B5': '\xb5', '%b6': '\xb6', '%B6': '\xb6', '%b7': '\xb7', '%B7': '\xb7',
+ '%b8': '\xb8', '%B8': '\xb8', '%b9': '\xb9', '%B9': '\xb9', '%ba': '\xba',
+ '%Ba': '\xba', '%bA': '\xba', '%BA': '\xba', '%bb': '\xbb', '%Bb': '\xbb',
+ '%bB': '\xbb', '%BB': '\xbb', '%bc': '\xbc', '%Bc': '\xbc', '%bC': '\xbc',
+ '%BC': '\xbc', '%bd': '\xbd', '%Bd': '\xbd', '%bD': '\xbd', '%BD': '\xbd',
+ '%be': '\xbe', '%Be': '\xbe', '%bE': '\xbe', '%BE': '\xbe', '%bf': '\xbf',
+ '%Bf': '\xbf', '%bF': '\xbf', '%BF': '\xbf', '%c0': '\xc0', '%C0': '\xc0',
+ '%c1': '\xc1', '%C1': '\xc1', '%c2': '\xc2', '%C2': '\xc2', '%c3': '\xc3',
+ '%C3': '\xc3', '%c4': '\xc4', '%C4': '\xc4', '%c5': '\xc5', '%C5': '\xc5',
+ '%c6': '\xc6', '%C6': '\xc6', '%c7': '\xc7', '%C7': '\xc7', '%c8': '\xc8',
+ '%C8': '\xc8', '%c9': '\xc9', '%C9': '\xc9', '%ca': '\xca', '%Ca': '\xca',
+ '%cA': '\xca', '%CA': '\xca', '%cb': '\xcb', '%Cb': '\xcb', '%cB': '\xcb',
+ '%CB': '\xcb', '%cc': '\xcc', '%Cc': '\xcc', '%cC': '\xcc', '%CC': '\xcc',
+ '%cd': '\xcd', '%Cd': '\xcd', '%cD': '\xcd', '%CD': '\xcd', '%ce': '\xce',
+ '%Ce': '\xce', '%cE': '\xce', '%CE': '\xce', '%cf': '\xcf', '%Cf': '\xcf',
+ '%cF': '\xcf', '%CF': '\xcf', '%d0': '\xd0', '%D0': '\xd0', '%d1': '\xd1',
+ '%D1': '\xd1', '%d2': '\xd2', '%D2': '\xd2', '%d3': '\xd3', '%D3': '\xd3',
+ '%d4': '\xd4', '%D4': '\xd4', '%d5': '\xd5', '%D5': '\xd5', '%d6': '\xd6',
+ '%D6': '\xd6', '%d7': '\xd7', '%D7': '\xd7', '%d8': '\xd8', '%D8': '\xd8',
+ '%d9': '\xd9', '%D9': '\xd9', '%da': '\xda', '%Da': '\xda', '%dA': '\xda',
+ '%DA': '\xda', '%db': '\xdb', '%Db': '\xdb', '%dB': '\xdb', '%DB': '\xdb',
+ '%dc': '\xdc', '%Dc': '\xdc', '%dC': '\xdc', '%DC': '\xdc', '%dd': '\xdd',
+ '%Dd': '\xdd', '%dD': '\xdd', '%DD': '\xdd', '%de': '\xde', '%De': '\xde',
+ '%dE': '\xde', '%DE': '\xde', '%df': '\xdf', '%Df': '\xdf', '%dF': '\xdf',
+ '%DF': '\xdf', '%e0': '\xe0', '%E0': '\xe0', '%e1': '\xe1', '%E1': '\xe1',
+ '%e2': '\xe2', '%E2': '\xe2', '%e3': '\xe3', '%E3': '\xe3', '%e4': '\xe4',
+ '%E4': '\xe4', '%e5': '\xe5', '%E5': '\xe5', '%e6': '\xe6', '%E6': '\xe6',
+ '%e7': '\xe7', '%E7': '\xe7', '%e8': '\xe8', '%E8': '\xe8', '%e9': '\xe9',
+ '%E9': '\xe9', '%ea': '\xea', '%Ea': '\xea', '%eA': '\xea', '%EA': '\xea',
+ '%eb': '\xeb', '%Eb': '\xeb', '%eB': '\xeb', '%EB': '\xeb', '%ec': '\xec',
+ '%Ec': '\xec', '%eC': '\xec', '%EC': '\xec', '%ed': '\xed', '%Ed': '\xed',
+ '%eD': '\xed', '%ED': '\xed', '%ee': '\xee', '%Ee': '\xee', '%eE': '\xee',
+ '%EE': '\xee', '%ef': '\xef', '%Ef': '\xef', '%eF': '\xef', '%EF': '\xef',
+ '%f0': '\xf0', '%F0': '\xf0', '%f1': '\xf1', '%F1': '\xf1', '%f2': '\xf2',
+ '%F2': '\xf2', '%f3': '\xf3', '%F3': '\xf3', '%f4': '\xf4', '%F4': '\xf4',
+ '%f5': '\xf5', '%F5': '\xf5', '%f6': '\xf6', '%F6': '\xf6', '%f7': '\xf7',
+ '%F7': '\xf7', '%f8': '\xf8', '%F8': '\xf8', '%f9': '\xf9', '%F9': '\xf9',
+ '%fa': '\xfa', '%Fa': '\xfa', '%fA': '\xfa', '%FA': '\xfa', '%fb': '\xfb',
+ '%Fb': '\xfb', '%fB': '\xfb', '%FB': '\xfb', '%fc': '\xfc', '%Fc': '\xfc',
+ '%fC': '\xfc', '%FC': '\xfc', '%fd': '\xfd', '%Fd': '\xfd', '%fD': '\xfd',
+ '%FD': '\xfd', '%fe': '\xfe', '%Fe': '\xfe', '%fE': '\xfe', '%FE': '\xfe',
+ '%ff': '\xff', '%Ff': '\xff', '%fF': '\xff', '%FF': '\xff'
+}
+
+function encodedReplacer (match) {
+ return EncodedLookup[match]
+}
+
+const STATE_KEY = 0
+const STATE_VALUE = 1
+const STATE_CHARSET = 2
+const STATE_LANG = 3
+
+function parseParams (str) {
+ const res = []
+ let state = STATE_KEY
+ let charset = ''
+ let inquote = false
+ let escaping = false
+ let p = 0
+ let tmp = ''
+ const len = str.length
+
+ for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
+ const char = str[i]
+ if (char === '\\' && inquote) {
+ if (escaping) { escaping = false } else {
+ escaping = true
+ continue
+ }
+ } else if (char === '"') {
+ if (!escaping) {
+ if (inquote) {
+ inquote = false
+ state = STATE_KEY
+ } else { inquote = true }
+ continue
+ } else { escaping = false }
+ } else {
+ if (escaping && inquote) { tmp += '\\' }
+ escaping = false
+ if ((state === STATE_CHARSET || state === STATE_LANG) && char === "'") {
+ if (state === STATE_CHARSET) {
+ state = STATE_LANG
+ charset = tmp.substring(1)
+ } else { state = STATE_VALUE }
+ tmp = ''
+ continue
+ } else if (state === STATE_KEY &&
+ (char === '*' || char === '=') &&
+ res.length) {
+ state = char === '*'
+ ? STATE_CHARSET
+ : STATE_VALUE
+ res[p] = [tmp, undefined]
+ tmp = ''
+ continue
+ } else if (!inquote && char === ';') {
+ state = STATE_KEY
+ if (charset) {
+ if (tmp.length) {
+ tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
+ 'binary',
+ charset)
+ }
+ charset = ''
+ } else if (tmp.length) {
+ tmp = decodeText(tmp, 'binary', 'utf8')
+ }
+ if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp }
+ tmp = ''
+ ++p
+ continue
+ } else if (!inquote && (char === ' ' || char === '\t')) { continue }
+ }
+ tmp += char
+ }
+ if (charset && tmp.length) {
+ tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
+ 'binary',
+ charset)
+ } else if (tmp) {
+ tmp = decodeText(tmp, 'binary', 'utf8')
+ }
+
+ if (res[p] === undefined) {
+ if (tmp) { res[p] = tmp }
+ } else { res[p][1] = tmp }
+
+ return res
+}
+
+module.exports = parseParams
+
/***/ })
diff --git a/dist/index.js.map b/dist/index.js.map
new file mode 100644
index 0000000..4568f81
--- /dev/null
+++ b/dist/index.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"index.js","sources":["../webpack://test-check/./lib/input-providers/artifact-provider.js","../webpack://test-check/./lib/input-providers/local-file-provider.js","../webpack://test-check/./lib/main.js","../webpack://test-check/./lib/parsers/dart-json/dart-json-parser.js","../webpack://test-check/./lib/parsers/dart-json/dart-json-types.js","../webpack://test-check/./lib/parsers/dotnet-trx/dotnet-trx-parser.js","../webpack://test-check/./lib/parsers/java-junit/java-junit-parser.js","../webpack://test-check/./lib/parsers/jest-junit/jest-junit-parser.js","../webpack://test-check/./lib/parsers/mocha-json/mocha-json-parser.js","../webpack://test-check/./lib/report/get-annotations.js","../webpack://test-check/./lib/report/get-report.js","../webpack://test-check/./lib/test-results.js","../webpack://test-check/./lib/utils/exec.js","../webpack://test-check/./lib/utils/git.js","../webpack://test-check/./lib/utils/github-utils.js","../webpack://test-check/./lib/utils/markdown-utils.js","../webpack://test-check/./lib/utils/node-utils.js","../webpack://test-check/./lib/utils/parse-utils.js","../webpack://test-check/./lib/utils/path-utils.js","../webpack://test-check/./lib/utils/slugger.js","../webpack://test-check/./node_modules/@actions/core/lib/command.js","../webpack://test-check/./node_modules/@actions/core/lib/core.js","../webpack://test-check/./node_modules/@actions/core/lib/file-command.js","../webpack://test-check/./node_modules/@actions/core/lib/utils.js","../webpack://test-check/./node_modules/@actions/exec/lib/exec.js","../webpack://test-check/./node_modules/@actions/exec/lib/toolrunner.js","../webpack://test-check/./node_modules/@actions/github/lib/context.js","../webpack://test-check/./node_modules/@actions/github/lib/github.js","../webpack://test-check/./node_modules/@actions/github/lib/internal/utils.js","../webpack://test-check/./node_modules/@actions/github/lib/utils.js","../webpack://test-check/./node_modules/@actions/http-client/index.js","../webpack://test-check/./node_modules/@actions/http-client/proxy.js","../webpack://test-check/./node_modules/@actions/io/lib/io-util.js","../webpack://test-check/./node_modules/@actions/io/lib/io.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/adapters/fs.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/constants.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/index.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/providers/async.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/providers/common.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/providers/sync.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/settings.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/utils/fs.js","../webpack://test-check/./node_modules/@nodelib/fs.scandir/out/utils/index.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/adapters/fs.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/index.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/providers/async.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/providers/sync.js","../webpack://test-check/./node_modules/@nodelib/fs.stat/out/settings.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/index.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/providers/async.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/providers/stream.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/providers/sync.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/readers/async.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/readers/common.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/readers/reader.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/readers/sync.js","../webpack://test-check/./node_modules/@nodelib/fs.walk/out/settings.js","../webpack://test-check/./node_modules/@octokit/auth-token/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/core/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/endpoint/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/endpoint/node_modules/is-plain-object/dist/is-plain-object.js","../webpack://test-check/./node_modules/@octokit/graphql/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/plugin-paginate-rest/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/request-error/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/request/dist-node/index.js","../webpack://test-check/./node_modules/@octokit/request/node_modules/is-plain-object/dist/is-plain-object.js","../webpack://test-check/./node_modules/@sindresorhus/is/dist/index.js","../webpack://test-check/./node_modules/@szmarczak/http-timer/dist/source/index.js","../webpack://test-check/./node_modules/adm-zip/adm-zip.js","../webpack://test-check/./node_modules/adm-zip/headers/entryHeader.js","../webpack://test-check/./node_modules/adm-zip/headers/index.js","../webpack://test-check/./node_modules/adm-zip/headers/mainHeader.js","../webpack://test-check/./node_modules/adm-zip/methods/deflater.js","../webpack://test-check/./node_modules/adm-zip/methods/index.js","../webpack://test-check/./node_modules/adm-zip/methods/inflater.js","../webpack://test-check/./node_modules/adm-zip/methods/zipcrypto.js","../webpack://test-check/./node_modules/adm-zip/util/constants.js","../webpack://test-check/./node_modules/adm-zip/util/errors.js","../webpack://test-check/./node_modules/adm-zip/util/fattr.js","../webpack://test-check/./node_modules/adm-zip/util/fileSystem.js","../webpack://test-check/./node_modules/adm-zip/util/index.js","../webpack://test-check/./node_modules/adm-zip/util/utils.js","../webpack://test-check/./node_modules/adm-zip/zipEntry.js","../webpack://test-check/./node_modules/adm-zip/zipFile.js","../webpack://test-check/./node_modules/before-after-hook/index.js","../webpack://test-check/./node_modules/before-after-hook/lib/add.js","../webpack://test-check/./node_modules/before-after-hook/lib/register.js","../webpack://test-check/./node_modules/before-after-hook/lib/remove.js","../webpack://test-check/./node_modules/cacheable-lookup/source/index.js","../webpack://test-check/./node_modules/cacheable-request/node_modules/get-stream/buffer-stream.js","../webpack://test-check/./node_modules/cacheable-request/node_modules/get-stream/index.js","../webpack://test-check/./node_modules/cacheable-request/src/index.js","../webpack://test-check/./node_modules/clone-response/src/index.js","../webpack://test-check/./node_modules/decompress-response/index.js","../webpack://test-check/./node_modules/decompress-response/node_modules/mimic-response/index.js","../webpack://test-check/./node_modules/defer-to-connect/dist/source/index.js","../webpack://test-check/./node_modules/deprecation/dist-node/index.js","../webpack://test-check/./node_modules/end-of-stream/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/compile.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/constants.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/expand.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/parse.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/stringify.js","../webpack://test-check/./node_modules/fast-glob/node_modules/braces/lib/utils.js","../webpack://test-check/./node_modules/fast-glob/node_modules/fill-range/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/is-number/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/micromatch/index.js","../webpack://test-check/./node_modules/fast-glob/node_modules/to-regex-range/index.js","../webpack://test-check/./node_modules/fast-glob/out/index.js","../webpack://test-check/./node_modules/fast-glob/out/managers/tasks.js","../webpack://test-check/./node_modules/fast-glob/out/providers/async.js","../webpack://test-check/./node_modules/fast-glob/out/providers/filters/deep.js","../webpack://test-check/./node_modules/fast-glob/out/providers/filters/entry.js","../webpack://test-check/./node_modules/fast-glob/out/providers/filters/error.js","../webpack://test-check/./node_modules/fast-glob/out/providers/matchers/matcher.js","../webpack://test-check/./node_modules/fast-glob/out/providers/matchers/partial.js","../webpack://test-check/./node_modules/fast-glob/out/providers/provider.js","../webpack://test-check/./node_modules/fast-glob/out/providers/stream.js","../webpack://test-check/./node_modules/fast-glob/out/providers/sync.js","../webpack://test-check/./node_modules/fast-glob/out/providers/transformers/entry.js","../webpack://test-check/./node_modules/fast-glob/out/readers/reader.js","../webpack://test-check/./node_modules/fast-glob/out/readers/stream.js","../webpack://test-check/./node_modules/fast-glob/out/readers/sync.js","../webpack://test-check/./node_modules/fast-glob/out/settings.js","../webpack://test-check/./node_modules/fast-glob/out/utils/array.js","../webpack://test-check/./node_modules/fast-glob/out/utils/errno.js","../webpack://test-check/./node_modules/fast-glob/out/utils/fs.js","../webpack://test-check/./node_modules/fast-glob/out/utils/index.js","../webpack://test-check/./node_modules/fast-glob/out/utils/path.js","../webpack://test-check/./node_modules/fast-glob/out/utils/pattern.js","../webpack://test-check/./node_modules/fast-glob/out/utils/stream.js","../webpack://test-check/./node_modules/fast-glob/out/utils/string.js","../webpack://test-check/./node_modules/fastq/queue.js","../webpack://test-check/./node_modules/glob-parent/index.js","../webpack://test-check/./node_modules/got/dist/source/as-promise/create-rejection.js","../webpack://test-check/./node_modules/got/dist/source/as-promise/index.js","../webpack://test-check/./node_modules/got/dist/source/as-promise/normalize-arguments.js","../webpack://test-check/./node_modules/got/dist/source/as-promise/parse-body.js","../webpack://test-check/./node_modules/got/dist/source/as-promise/types.js","../webpack://test-check/./node_modules/got/dist/source/core/calculate-retry-delay.js","../webpack://test-check/./node_modules/got/dist/source/core/index.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/dns-ip-version.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/get-body-size.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/get-buffer.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/is-form-data.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/is-response-ok.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/options-to-url.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/proxy-events.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/timed-out.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/unhandle.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/url-to-options.js","../webpack://test-check/./node_modules/got/dist/source/core/utils/weakable-map.js","../webpack://test-check/./node_modules/got/dist/source/create.js","../webpack://test-check/./node_modules/got/dist/source/index.js","../webpack://test-check/./node_modules/got/dist/source/types.js","../webpack://test-check/./node_modules/got/dist/source/utils/deep-freeze.js","../webpack://test-check/./node_modules/got/dist/source/utils/deprecation-warning.js","../webpack://test-check/./node_modules/http-cache-semantics/index.js","../webpack://test-check/./node_modules/http2-wrapper/source/agent.js","../webpack://test-check/./node_modules/http2-wrapper/source/auto.js","../webpack://test-check/./node_modules/http2-wrapper/source/client-request.js","../webpack://test-check/./node_modules/http2-wrapper/source/incoming-message.js","../webpack://test-check/./node_modules/http2-wrapper/source/index.js","../webpack://test-check/./node_modules/http2-wrapper/source/utils/calculate-server-name.js","../webpack://test-check/./node_modules/http2-wrapper/source/utils/errors.js","../webpack://test-check/./node_modules/http2-wrapper/source/utils/is-request-pseudo-header.js","../webpack://test-check/./node_modules/http2-wrapper/source/utils/proxy-events.js","../webpack://test-check/./node_modules/http2-wrapper/source/utils/url-to-options.js","../webpack://test-check/./node_modules/is-extglob/index.js","../webpack://test-check/./node_modules/is-glob/index.js","../webpack://test-check/./node_modules/json-buffer/index.js","../webpack://test-check/./node_modules/keyv/src/index.js","../webpack://test-check/./node_modules/lowercase-keys/index.js","../webpack://test-check/./node_modules/merge2/index.js","../webpack://test-check/./node_modules/mimic-response/index.js","../webpack://test-check/./node_modules/node-fetch/lib/index.js","../webpack://test-check/./node_modules/normalize-url/index.js","../webpack://test-check/./node_modules/once/once.js","../webpack://test-check/./node_modules/p-cancelable/index.js","../webpack://test-check/./node_modules/picomatch/index.js","../webpack://test-check/./node_modules/picomatch/lib/constants.js","../webpack://test-check/./node_modules/picomatch/lib/parse.js","../webpack://test-check/./node_modules/picomatch/lib/picomatch.js","../webpack://test-check/./node_modules/picomatch/lib/scan.js","../webpack://test-check/./node_modules/picomatch/lib/utils.js","../webpack://test-check/./node_modules/pump/index.js","../webpack://test-check/./node_modules/quick-lru/index.js","../webpack://test-check/./node_modules/resolve-alpn/index.js","../webpack://test-check/./node_modules/responselike/src/index.js","../webpack://test-check/./node_modules/reusify/reusify.js","../webpack://test-check/./node_modules/run-parallel/index.js","../webpack://test-check/./node_modules/sax/lib/sax.js","../webpack://test-check/./node_modules/tunnel/index.js","../webpack://test-check/./node_modules/tunnel/lib/tunnel.js","../webpack://test-check/./node_modules/universal-user-agent/dist-node/index.js","../webpack://test-check/./node_modules/wrappy/wrappy.js","../webpack://test-check/./node_modules/xml2js/lib/bom.js","../webpack://test-check/./node_modules/xml2js/lib/builder.js","../webpack://test-check/./node_modules/xml2js/lib/defaults.js","../webpack://test-check/./node_modules/xml2js/lib/parser.js","../webpack://test-check/./node_modules/xml2js/lib/processors.js","../webpack://test-check/./node_modules/xml2js/lib/xml2js.js","../webpack://test-check/./node_modules/xmlbuilder/lib/DocumentPosition.js","../webpack://test-check/./node_modules/xmlbuilder/lib/NodeType.js","../webpack://test-check/./node_modules/xmlbuilder/lib/Utility.js","../webpack://test-check/./node_modules/xmlbuilder/lib/WriterState.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLAttribute.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLCData.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLCharacterData.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLComment.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMConfiguration.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMErrorHandler.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMImplementation.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDOMStringList.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDAttList.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDElement.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDEntity.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDTDNotation.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDeclaration.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDocType.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDocument.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDocumentCB.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLDummy.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLElement.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLNamedNodeMap.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLNode.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLNodeList.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLProcessingInstruction.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLRaw.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLStreamWriter.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLStringWriter.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLStringifier.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLText.js","../webpack://test-check/./node_modules/xmlbuilder/lib/XMLWriterBase.js","../webpack://test-check/./node_modules/xmlbuilder/lib/index.js","../webpack://test-check/./node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../webpack://test-check/external \"assert\"","../webpack://test-check/external \"buffer\"","../webpack://test-check/external \"child_process\"","../webpack://test-check/external \"dns\"","../webpack://test-check/external \"events\"","../webpack://test-check/external \"fs\"","../webpack://test-check/external \"http\"","../webpack://test-check/external \"http2\"","../webpack://test-check/external \"https\"","../webpack://test-check/external \"net\"","../webpack://test-check/external \"os\"","../webpack://test-check/external \"path\"","../webpack://test-check/external \"stream\"","../webpack://test-check/external \"string_decoder\"","../webpack://test-check/external \"timers\"","../webpack://test-check/external \"tls\"","../webpack://test-check/external \"url\"","../webpack://test-check/external \"util\"","../webpack://test-check/external \"zlib\"","../webpack://test-check/webpack/bootstrap","../webpack://test-check/webpack/runtime/compat","../webpack://test-check/webpack/startup"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ArtifactProvider = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst github = __importStar(require(\"@actions/github\"));\nconst adm_zip_1 = __importDefault(require(\"adm-zip\"));\nconst picomatch_1 = __importDefault(require(\"picomatch\"));\nconst github_utils_1 = require(\"../utils/github-utils\");\nclass ArtifactProvider {\n constructor(octokit, artifact, name, pattern, sha, runId, token) {\n this.octokit = octokit;\n this.artifact = artifact;\n this.name = name;\n this.pattern = pattern;\n this.sha = sha;\n this.runId = runId;\n this.token = token;\n if (this.artifact.startsWith('/')) {\n const endIndex = this.artifact.lastIndexOf('/');\n const rePattern = this.artifact.substring(1, endIndex);\n const reOpts = this.artifact.substring(endIndex + 1);\n const re = new RegExp(rePattern, reOpts);\n this.artifactNameMatch = (str) => re.test(str);\n this.getReportName = (str) => {\n const match = str.match(re);\n if (match === null) {\n throw new Error(`Artifact name '${str}' does not match regex ${this.artifact}`);\n }\n let reportName = this.name;\n for (let i = 1; i < match.length; i++) {\n reportName = reportName.replace(new RegExp(`\\\\$${i}`, 'g'), match[i]);\n }\n return reportName;\n };\n }\n else {\n this.artifactNameMatch = (str) => str === this.artifact;\n this.getReportName = () => this.name;\n }\n this.fileNameMatch = picomatch_1.default(pattern);\n }\n async load() {\n const result = {};\n const resp = await this.octokit.actions.listWorkflowRunArtifacts({\n ...github.context.repo,\n run_id: this.runId\n });\n if (resp.data.artifacts.length === 0) {\n core.warning(`No artifacts found in run ${this.runId}`);\n return {};\n }\n const artifacts = resp.data.artifacts.filter(a => this.artifactNameMatch(a.name));\n if (artifacts.length === 0) {\n core.warning(`No artifact matches ${this.artifact}`);\n return {};\n }\n for (const art of artifacts) {\n const fileName = `${art.name}.zip`;\n await github_utils_1.downloadArtifact(this.octokit, art.id, fileName, this.token);\n core.startGroup(`Reading archive ${fileName}`);\n try {\n const reportName = this.getReportName(art.name);\n core.info(`Report name: ${reportName}`);\n const files = [];\n const zip = new adm_zip_1.default(fileName);\n for (const entry of zip.getEntries()) {\n const file = entry.entryName;\n if (entry.isDirectory) {\n core.info(`Skipping ${file}: entry is a directory`);\n continue;\n }\n if (!this.fileNameMatch(file)) {\n core.info(`Skipping ${file}: filename does not match pattern`);\n continue;\n }\n const content = zip.readAsText(entry);\n files.push({ file, content });\n core.info(`Read ${file}: ${content.length} chars`);\n }\n if (result[reportName]) {\n result[reportName].push(...files);\n }\n else {\n result[reportName] = files;\n }\n }\n finally {\n core.endGroup();\n }\n }\n return result;\n }\n async listTrackedFiles() {\n return github_utils_1.listFiles(this.octokit, this.sha);\n }\n}\nexports.ArtifactProvider = ArtifactProvider;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.LocalFileProvider = void 0;\nconst fs = __importStar(require(\"fs\"));\nconst fast_glob_1 = __importDefault(require(\"fast-glob\"));\nconst git_1 = require(\"../utils/git\");\nclass LocalFileProvider {\n constructor(name, pattern) {\n this.name = name;\n this.pattern = pattern;\n }\n async load() {\n const result = [];\n for (const pat of this.pattern) {\n const paths = await fast_glob_1.default(pat, { dot: true });\n for (const file of paths) {\n const content = await fs.promises.readFile(file, { encoding: 'utf8' });\n result.push({ file, content });\n }\n }\n return { [this.name]: result };\n }\n async listTrackedFiles() {\n return git_1.listFiles();\n }\n}\nexports.LocalFileProvider = LocalFileProvider;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst core = __importStar(require(\"@actions/core\"));\nconst github = __importStar(require(\"@actions/github\"));\nconst artifact_provider_1 = require(\"./input-providers/artifact-provider\");\nconst local_file_provider_1 = require(\"./input-providers/local-file-provider\");\nconst get_annotations_1 = require(\"./report/get-annotations\");\nconst get_report_1 = require(\"./report/get-report\");\nconst dart_json_parser_1 = require(\"./parsers/dart-json/dart-json-parser\");\nconst dotnet_trx_parser_1 = require(\"./parsers/dotnet-trx/dotnet-trx-parser\");\nconst java_junit_parser_1 = require(\"./parsers/java-junit/java-junit-parser\");\nconst jest_junit_parser_1 = require(\"./parsers/jest-junit/jest-junit-parser\");\nconst mocha_json_parser_1 = require(\"./parsers/mocha-json/mocha-json-parser\");\nconst path_utils_1 = require(\"./utils/path-utils\");\nconst github_utils_1 = require(\"./utils/github-utils\");\nasync function main() {\n try {\n const testReporter = new TestReporter();\n await testReporter.run();\n }\n catch (error) {\n core.setFailed(error.message);\n }\n}\nclass TestReporter {\n constructor() {\n this.artifact = core.getInput('artifact', { required: false });\n this.name = core.getInput('name', { required: true });\n this.path = core.getInput('path', { required: true });\n this.pathReplaceBackslashes = core.getInput('path-replace-backslashes', { required: false }) === 'true';\n this.reporter = core.getInput('reporter', { required: true });\n this.listSuites = core.getInput('list-suites', { required: true });\n this.listTests = core.getInput('list-tests', { required: true });\n this.maxAnnotations = parseInt(core.getInput('max-annotations', { required: true }));\n this.failOnError = core.getInput('fail-on-error', { required: true }) === 'true';\n this.workDirInput = core.getInput('working-directory', { required: false });\n this.onlySummary = core.getInput('only-summary', { required: false }) === 'true';\n this.token = core.getInput('token', { required: true });\n this.context = github_utils_1.getCheckRunContext();\n this.octokit = github.getOctokit(this.token);\n if (this.listSuites !== 'all' && this.listSuites !== 'failed') {\n core.setFailed(`Input parameter 'list-suites' has invalid value`);\n return;\n }\n if (this.listTests !== 'all' && this.listTests !== 'failed' && this.listTests !== 'none') {\n core.setFailed(`Input parameter 'list-tests' has invalid value`);\n return;\n }\n if (isNaN(this.maxAnnotations) || this.maxAnnotations < 0 || this.maxAnnotations > 50) {\n core.setFailed(`Input parameter 'max-annotations' has invalid value`);\n return;\n }\n }\n async run() {\n if (this.workDirInput) {\n core.info(`Changing directory to '${this.workDirInput}'`);\n process.chdir(this.workDirInput);\n }\n core.info(`Check runs will be created with SHA=${this.context.sha}`);\n // Split path pattern by ',' and optionally convert all backslashes to forward slashes\n // fast-glob (micromatch) always interprets backslashes as escape characters instead of directory separators\n const pathsList = this.path.split(',');\n const pattern = this.pathReplaceBackslashes ? pathsList.map(path_utils_1.normalizeFilePath) : pathsList;\n const inputProvider = this.artifact\n ? new artifact_provider_1.ArtifactProvider(this.octokit, this.artifact, this.name, pattern, this.context.sha, this.context.runId, this.token)\n : new local_file_provider_1.LocalFileProvider(this.name, pattern);\n const parseErrors = this.maxAnnotations > 0;\n const trackedFiles = await inputProvider.listTrackedFiles();\n const workDir = this.artifact ? undefined : path_utils_1.normalizeDirPath(process.cwd(), true);\n core.info(`Found ${trackedFiles.length} files tracked by GitHub`);\n const options = {\n workDir,\n trackedFiles,\n parseErrors\n };\n core.info(`Using test report parser '${this.reporter}'`);\n const parser = this.getParser(this.reporter, options);\n const results = [];\n const input = await inputProvider.load();\n for (const [reportName, files] of Object.entries(input)) {\n try {\n core.startGroup(`Creating test report ${reportName}`);\n const tr = await this.createReport(parser, reportName, files);\n results.push(...tr);\n }\n finally {\n core.endGroup();\n }\n }\n const isFailed = results.some(tr => tr.result === 'failed');\n const conclusion = isFailed ? 'failure' : 'success';\n const passed = results.reduce((sum, tr) => sum + tr.passed, 0);\n const failed = results.reduce((sum, tr) => sum + tr.failed, 0);\n const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0);\n const time = results.reduce((sum, tr) => sum + tr.time, 0);\n core.setOutput('conclusion', conclusion);\n core.setOutput('passed', passed);\n core.setOutput('failed', failed);\n core.setOutput('skipped', skipped);\n core.setOutput('time', time);\n if (this.failOnError && isFailed) {\n core.setFailed(`Failed test were found and 'fail-on-error' option is set to ${this.failOnError}`);\n return;\n }\n if (results.length === 0) {\n core.setFailed(`No test report files were found`);\n return;\n }\n }\n async createReport(parser, name, files) {\n if (files.length === 0) {\n core.warning(`No file matches path ${this.path}`);\n return [];\n }\n const results = [];\n for (const { file, content } of files) {\n core.info(`Processing test results from ${file}`);\n const tr = await parser.parse(file, content);\n results.push(tr);\n }\n core.info(`Creating check run ${name}`);\n const createResp = await this.octokit.checks.create({\n head_sha: this.context.sha,\n name,\n status: 'in_progress',\n output: {\n title: name,\n summary: ''\n },\n ...github.context.repo\n });\n core.info('Creating report summary');\n const { listSuites, listTests, onlySummary } = this;\n const baseUrl = createResp.data.html_url;\n const summary = get_report_1.getReport(results, { listSuites, listTests, baseUrl, onlySummary });\n core.info('Creating annotations');\n const annotations = get_annotations_1.getAnnotations(results, this.maxAnnotations);\n const isFailed = results.some(tr => tr.result === 'failed');\n const conclusion = isFailed ? 'failure' : 'success';\n const passed = results.reduce((sum, tr) => sum + tr.passed, 0);\n const failed = results.reduce((sum, tr) => sum + tr.failed, 0);\n const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0);\n const shortSummary = `${passed} passed, ${failed} failed and ${skipped} skipped `;\n core.info(`Updating check run conclusion (${conclusion}) and output`);\n const resp = await this.octokit.checks.update({\n check_run_id: createResp.data.id,\n conclusion,\n status: 'completed',\n output: {\n title: shortSummary,\n summary,\n annotations\n },\n ...github.context.repo\n });\n core.info(`Check run create response: ${resp.status}`);\n core.info(`Check run URL: ${resp.data.url}`);\n core.info(`Check run HTML: ${resp.data.html_url}`);\n return results;\n }\n getParser(reporter, options) {\n switch (reporter) {\n case 'dart-json':\n return new dart_json_parser_1.DartJsonParser(options, 'dart');\n case 'dotnet-trx':\n return new dotnet_trx_parser_1.DotnetTrxParser(options);\n case 'flutter-json':\n return new dart_json_parser_1.DartJsonParser(options, 'flutter');\n case 'java-junit':\n return new java_junit_parser_1.JavaJunitParser(options);\n case 'jest-junit':\n return new jest_junit_parser_1.JestJunitParser(options);\n case 'mocha-json':\n return new mocha_json_parser_1.MochaJsonParser(options);\n default:\n throw new Error(`Input variable 'reporter' is set to invalid value '${reporter}'`);\n }\n }\n}\nmain();\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DartJsonParser = void 0;\nconst path_utils_1 = require(\"../../utils/path-utils\");\nconst dart_json_types_1 = require(\"./dart-json-types\");\nconst test_results_1 = require(\"../../test-results\");\nclass TestRun {\n constructor(path, suites, success, time) {\n this.path = path;\n this.suites = suites;\n this.success = success;\n this.time = time;\n }\n}\nclass TestSuite {\n constructor(suite) {\n this.suite = suite;\n this.groups = {};\n }\n}\nclass TestGroup {\n constructor(group) {\n this.group = group;\n this.tests = [];\n }\n}\nclass TestCase {\n constructor(testStart) {\n this.testStart = testStart;\n this.print = [];\n this.groupId = testStart.test.groupIDs[testStart.test.groupIDs.length - 1];\n }\n get result() {\n var _a, _b, _c, _d;\n if ((_a = this.testDone) === null || _a === void 0 ? void 0 : _a.skipped) {\n return 'skipped';\n }\n if (((_b = this.testDone) === null || _b === void 0 ? void 0 : _b.result) === 'success') {\n return 'success';\n }\n if (((_c = this.testDone) === null || _c === void 0 ? void 0 : _c.result) === 'error' || ((_d = this.testDone) === null || _d === void 0 ? void 0 : _d.result) === 'failure') {\n return 'failed';\n }\n return undefined;\n }\n get time() {\n return this.testDone !== undefined ? this.testDone.time - this.testStart.time : 0;\n }\n}\nclass DartJsonParser {\n constructor(options, sdk) {\n this.options = options;\n this.sdk = sdk;\n }\n async parse(path, content) {\n const tr = this.getTestRun(path, content);\n const result = this.getTestRunResult(tr);\n return Promise.resolve(result);\n }\n getTestRun(path, content) {\n const lines = content.split(/\\n\\r?/g);\n const events = lines\n .map((str, i) => {\n if (str.trim() === '') {\n return null;\n }\n try {\n return JSON.parse(str);\n }\n catch (e) {\n const col = e.columnNumber !== undefined ? `:${e.columnNumber}` : '';\n throw new Error(`Invalid JSON at ${path}:${i + 1}${col}\\n\\n${e}`);\n }\n })\n .filter(evt => evt != null);\n let success = false;\n let totalTime = 0;\n const suites = {};\n const tests = {};\n for (const evt of events) {\n if (dart_json_types_1.isSuiteEvent(evt)) {\n suites[evt.suite.id] = new TestSuite(evt.suite);\n }\n else if (dart_json_types_1.isGroupEvent(evt)) {\n suites[evt.group.suiteID].groups[evt.group.id] = new TestGroup(evt.group);\n }\n else if (dart_json_types_1.isTestStartEvent(evt) && evt.test.url !== null) {\n const test = new TestCase(evt);\n const suite = suites[evt.test.suiteID];\n const group = suite.groups[evt.test.groupIDs[evt.test.groupIDs.length - 1]];\n group.tests.push(test);\n tests[evt.test.id] = test;\n }\n else if (dart_json_types_1.isTestDoneEvent(evt) && !evt.hidden && tests[evt.testID]) {\n tests[evt.testID].testDone = evt;\n }\n else if (dart_json_types_1.isErrorEvent(evt) && tests[evt.testID]) {\n tests[evt.testID].error = evt;\n }\n else if (dart_json_types_1.isMessageEvent(evt) && tests[evt.testID]) {\n tests[evt.testID].print.push(evt);\n }\n else if (dart_json_types_1.isDoneEvent(evt)) {\n success = evt.success;\n totalTime = evt.time;\n }\n }\n return new TestRun(path, Object.values(suites), success, totalTime);\n }\n getTestRunResult(tr) {\n const suites = tr.suites.map(s => {\n return new test_results_1.TestSuiteResult(this.getRelativePath(s.suite.path), this.getGroups(s));\n });\n return new test_results_1.TestRunResult(tr.path, suites, tr.time);\n }\n getGroups(suite) {\n const groups = Object.values(suite.groups).filter(grp => grp.tests.length > 0);\n groups.sort((a, b) => { var _a, _b; return ((_a = a.group.line) !== null && _a !== void 0 ? _a : 0) - ((_b = b.group.line) !== null && _b !== void 0 ? _b : 0); });\n return groups.map(group => {\n group.tests.sort((a, b) => { var _a, _b; return ((_a = a.testStart.test.line) !== null && _a !== void 0 ? _a : 0) - ((_b = b.testStart.test.line) !== null && _b !== void 0 ? _b : 0); });\n const tests = group.tests.map(tc => {\n const error = this.getError(suite, tc);\n const testName = group.group.name !== undefined && tc.testStart.test.name.startsWith(group.group.name)\n ? tc.testStart.test.name.slice(group.group.name.length).trim()\n : tc.testStart.test.name.trim();\n return new test_results_1.TestCaseResult(testName, tc.result, tc.time, error);\n });\n return new test_results_1.TestGroupResult(group.group.name, tests);\n });\n }\n getError(testSuite, test) {\n var _a, _b, _c, _d, _e, _f;\n if (!this.options.parseErrors || !test.error) {\n return undefined;\n }\n const { trackedFiles } = this.options;\n const stackTrace = (_b = (_a = test.error) === null || _a === void 0 ? void 0 : _a.stackTrace) !== null && _b !== void 0 ? _b : '';\n const print = test.print\n .filter(p => p.messageType === 'print')\n .map(p => p.message)\n .join('\\n');\n const details = [print, stackTrace].filter(str => str !== '').join('\\n');\n const src = this.exceptionThrowSource(details, trackedFiles);\n const message = this.getErrorMessage((_d = (_c = test.error) === null || _c === void 0 ? void 0 : _c.error) !== null && _d !== void 0 ? _d : '', print);\n let path;\n let line;\n if (src !== undefined) {\n path = src.path;\n line = src.line;\n }\n else {\n const testStartPath = this.getRelativePath(testSuite.suite.path);\n if (trackedFiles.includes(testStartPath)) {\n path = testStartPath;\n line = (_f = (_e = test.testStart.test.root_line) !== null && _e !== void 0 ? _e : test.testStart.test.line) !== null && _f !== void 0 ? _f : undefined;\n }\n }\n return {\n path,\n line,\n message,\n details\n };\n }\n getErrorMessage(message, print) {\n if (this.sdk === 'flutter') {\n const uselessMessageRe = /^Test failed\\. See exception logs above\\.\\nThe test description was:/m;\n const flutterPrintRe = /^══╡ EXCEPTION CAUGHT BY FLUTTER TEST FRAMEWORK ╞═+\\s+(.*)\\s+When the exception was thrown, this was the stack:/ms;\n if (uselessMessageRe.test(message)) {\n const match = print.match(flutterPrintRe);\n if (match !== null) {\n return match[1];\n }\n }\n }\n return message || print;\n }\n exceptionThrowSource(ex, trackedFiles) {\n const lines = ex.split(/\\r?\\n/g);\n // regexp to extract file path and line number from stack trace\n const dartRe = /^(?!package:)(.*)\\s+(\\d+):\\d+\\s+/;\n const flutterRe = /^#\\d+\\s+.*\\((?!package:)(.*):(\\d+):\\d+\\)$/;\n const re = this.sdk === 'dart' ? dartRe : flutterRe;\n for (const str of lines) {\n const match = str.match(re);\n if (match !== null) {\n const [_, pathStr, lineStr] = match;\n const path = path_utils_1.normalizeFilePath(this.getRelativePath(pathStr));\n if (trackedFiles.includes(path)) {\n const line = parseInt(lineStr);\n return { path, line };\n }\n }\n }\n }\n getRelativePath(path) {\n const prefix = 'file://';\n if (path.startsWith(prefix)) {\n path = path.substr(prefix.length);\n }\n path = path_utils_1.normalizeFilePath(path);\n const workDir = this.getWorkDir(path);\n if (workDir !== undefined && path.startsWith(workDir)) {\n path = path.substr(workDir.length);\n }\n return path;\n }\n getWorkDir(path) {\n var _a, _b;\n return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles)));\n }\n}\nexports.DartJsonParser = DartJsonParser;\n","\"use strict\";\n/// reflects documentation at https://github.com/dart-lang/test/blob/master/pkgs/test/doc/json_reporter.md\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isMessageEvent = exports.isDoneEvent = exports.isErrorEvent = exports.isTestDoneEvent = exports.isTestStartEvent = exports.isGroupEvent = exports.isSuiteEvent = void 0;\nfunction isSuiteEvent(event) {\n return event.type === 'suite';\n}\nexports.isSuiteEvent = isSuiteEvent;\nfunction isGroupEvent(event) {\n return event.type === 'group';\n}\nexports.isGroupEvent = isGroupEvent;\nfunction isTestStartEvent(event) {\n return event.type === 'testStart';\n}\nexports.isTestStartEvent = isTestStartEvent;\nfunction isTestDoneEvent(event) {\n return event.type === 'testDone';\n}\nexports.isTestDoneEvent = isTestDoneEvent;\nfunction isErrorEvent(event) {\n return event.type === 'error';\n}\nexports.isErrorEvent = isErrorEvent;\nfunction isDoneEvent(event) {\n return event.type === 'done';\n}\nexports.isDoneEvent = isDoneEvent;\nfunction isMessageEvent(event) {\n return event.type === 'print';\n}\nexports.isMessageEvent = isMessageEvent;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.DotnetTrxParser = void 0;\nconst xml2js_1 = require(\"xml2js\");\nconst path_utils_1 = require(\"../../utils/path-utils\");\nconst parse_utils_1 = require(\"../../utils/parse-utils\");\nconst test_results_1 = require(\"../../test-results\");\nclass TestClass {\n constructor(name) {\n this.name = name;\n this.tests = [];\n }\n}\nclass Test {\n constructor(name, outcome, duration, error) {\n this.name = name;\n this.outcome = outcome;\n this.duration = duration;\n this.error = error;\n }\n get result() {\n switch (this.outcome) {\n case 'Passed':\n return 'success';\n case 'NotExecuted':\n return 'skipped';\n case 'Failed':\n return 'failed';\n }\n }\n}\nclass DotnetTrxParser {\n constructor(options) {\n this.options = options;\n }\n async parse(path, content) {\n const trx = await this.getTrxReport(path, content);\n const tc = this.getTestClasses(trx);\n const tr = this.getTestRunResult(path, trx, tc);\n tr.sort(true);\n return tr;\n }\n async getTrxReport(path, content) {\n try {\n return (await xml2js_1.parseStringPromise(content));\n }\n catch (e) {\n throw new Error(`Invalid XML at ${path}\\n\\n${e}`);\n }\n }\n getTestClasses(trx) {\n if (trx.TestRun.TestDefinitions === undefined || trx.TestRun.Results === undefined) {\n return [];\n }\n const unitTests = {};\n for (const td of trx.TestRun.TestDefinitions) {\n for (const ut of td.UnitTest) {\n unitTests[ut.$.id] = ut;\n }\n }\n const unitTestsResults = trx.TestRun.Results.flatMap(r => r.UnitTestResult).flatMap(result => ({\n result,\n test: unitTests[result.$.testId]\n }));\n const testClasses = {};\n for (const r of unitTestsResults) {\n const className = r.test.TestMethod[0].$.className;\n let tc = testClasses[className];\n if (tc === undefined) {\n tc = new TestClass(className);\n testClasses[tc.name] = tc;\n }\n const error = this.getErrorInfo(r.result);\n const durationAttr = r.result.$.duration;\n const duration = durationAttr ? parse_utils_1.parseNetDuration(durationAttr) : 0;\n const resultTestName = r.result.$.testName;\n const testName = resultTestName.startsWith(className) && resultTestName[className.length] === '.'\n ? resultTestName.substr(className.length + 1)\n : resultTestName;\n const test = new Test(testName, r.result.$.outcome, duration, error);\n tc.tests.push(test);\n }\n const result = Object.values(testClasses);\n return result;\n }\n getTestRunResult(path, trx, testClasses) {\n const times = trx.TestRun.Times[0].$;\n const totalTime = parse_utils_1.parseIsoDate(times.finish).getTime() - parse_utils_1.parseIsoDate(times.start).getTime();\n const suites = testClasses.map(testClass => {\n const tests = testClass.tests.map(test => {\n const error = this.getError(test);\n return new test_results_1.TestCaseResult(test.name, test.result, test.duration, error);\n });\n const group = new test_results_1.TestGroupResult(null, tests);\n return new test_results_1.TestSuiteResult(testClass.name, [group]);\n });\n return new test_results_1.TestRunResult(path, suites, totalTime);\n }\n getErrorInfo(testResult) {\n var _a;\n if (testResult.$.outcome !== 'Failed') {\n return undefined;\n }\n const output = testResult.Output;\n const error = (output === null || output === void 0 ? void 0 : output.length) > 0 && ((_a = output[0].ErrorInfo) === null || _a === void 0 ? void 0 : _a.length) > 0 ? output[0].ErrorInfo[0] : undefined;\n return error;\n }\n getError(test) {\n if (!this.options.parseErrors || !test.error) {\n return undefined;\n }\n const error = test.error;\n if (!Array.isArray(error.Message) ||\n error.Message.length === 0 ||\n !Array.isArray(error.StackTrace) ||\n error.StackTrace.length === 0) {\n return undefined;\n }\n const message = test.error.Message[0];\n const stackTrace = test.error.StackTrace[0];\n let path;\n let line;\n const src = this.exceptionThrowSource(stackTrace);\n if (src) {\n path = src.path;\n line = src.line;\n }\n return {\n path,\n line,\n message,\n details: `${message}\\n${stackTrace}`\n };\n }\n exceptionThrowSource(stackTrace) {\n const lines = stackTrace.split(/\\r*\\n/);\n const re = / in (.+):line (\\d+)$/;\n const { trackedFiles } = this.options;\n for (const str of lines) {\n const match = str.match(re);\n if (match !== null) {\n const [_, fileStr, lineStr] = match;\n const filePath = path_utils_1.normalizeFilePath(fileStr);\n const workDir = this.getWorkDir(filePath);\n if (workDir) {\n const file = filePath.substr(workDir.length);\n if (trackedFiles.includes(file)) {\n const line = parseInt(lineStr);\n return { path: file, line };\n }\n }\n }\n }\n }\n getWorkDir(path) {\n var _a, _b;\n return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles)));\n }\n}\nexports.DotnetTrxParser = DotnetTrxParser;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.JavaJunitParser = void 0;\nconst path = __importStar(require(\"path\"));\nconst xml2js_1 = require(\"xml2js\");\nconst path_utils_1 = require(\"../../utils/path-utils\");\nconst test_results_1 = require(\"../../test-results\");\nclass JavaJunitParser {\n constructor(options) {\n var _a;\n this.options = options;\n // Map to efficient lookup of all paths with given file name\n this.trackedFiles = {};\n for (const filePath of options.trackedFiles) {\n const fileName = path.basename(filePath);\n const files = (_a = this.trackedFiles[fileName]) !== null && _a !== void 0 ? _a : (this.trackedFiles[fileName] = []);\n files.push(path_utils_1.normalizeFilePath(filePath));\n }\n }\n async parse(filePath, content) {\n const reportOrSuite = await this.getJunitReport(filePath, content);\n const isReport = reportOrSuite.testsuites !== undefined;\n // XML might contain:\n // - multiple suites under root node\n // - single as root node\n let ju;\n if (isReport) {\n ju = reportOrSuite;\n }\n else {\n // Make it behave the same way as if suite was inside root node\n const suite = reportOrSuite.testsuite;\n ju = {\n testsuites: {\n $: { time: suite.$.time },\n testsuite: [suite]\n }\n };\n }\n return this.getTestRunResult(filePath, ju);\n }\n async getJunitReport(filePath, content) {\n try {\n return await xml2js_1.parseStringPromise(content);\n }\n catch (e) {\n throw new Error(`Invalid XML at ${filePath}\\n\\n${e}`);\n }\n }\n getTestRunResult(filePath, junit) {\n var _a;\n const suites = junit.testsuites.testsuite === undefined\n ? []\n : junit.testsuites.testsuite.map(ts => {\n const name = ts.$.name.trim();\n const time = parseFloat(ts.$.time) * 1000;\n const sr = new test_results_1.TestSuiteResult(name, this.getGroups(ts), time);\n return sr;\n });\n const seconds = parseFloat((_a = junit.testsuites.$) === null || _a === void 0 ? void 0 : _a.time);\n const time = isNaN(seconds) ? undefined : seconds * 1000;\n return new test_results_1.TestRunResult(filePath, suites, time);\n }\n getGroups(suite) {\n if (suite.testcase === undefined) {\n return [];\n }\n const groups = [];\n for (const tc of suite.testcase) {\n // Normally classname is same as suite name - both refer to same Java class\n // Therefore it doesn't make sense to process it as a group\n // and tests will be added to default group with empty name\n const className = tc.$.classname === suite.$.name ? '' : tc.$.classname;\n let grp = groups.find(g => g.name === className);\n if (grp === undefined) {\n grp = { name: className, tests: [] };\n groups.push(grp);\n }\n grp.tests.push(tc);\n }\n return groups.map(grp => {\n const tests = grp.tests.map(tc => {\n const name = tc.$.name.trim();\n const result = this.getTestCaseResult(tc);\n const time = parseFloat(tc.$.time) * 1000;\n const error = this.getTestCaseError(tc);\n return new test_results_1.TestCaseResult(name, result, time, error);\n });\n return new test_results_1.TestGroupResult(grp.name, tests);\n });\n }\n getTestCaseResult(test) {\n if (test.failure || test.error)\n return 'failed';\n if (test.skipped)\n return 'skipped';\n return 'success';\n }\n getTestCaseError(tc) {\n var _a;\n if (!this.options.parseErrors) {\n return undefined;\n }\n // We process and the same way\n const failures = (_a = tc.failure) !== null && _a !== void 0 ? _a : tc.error;\n if (!failures) {\n return undefined;\n }\n const failure = failures[0];\n const details = typeof failure === 'object' ? failure._ : failure;\n let filePath;\n let line;\n const src = this.exceptionThrowSource(details);\n if (src) {\n filePath = src.filePath;\n line = src.line;\n }\n return {\n path: filePath,\n line,\n details,\n message: typeof failure === 'object' ? failure.message : undefined\n };\n }\n exceptionThrowSource(stackTrace) {\n const lines = stackTrace.split(/\\r?\\n/);\n const re = /^at (.*)\\((.*):(\\d+)\\)$/;\n for (const str of lines) {\n const match = str.match(re);\n if (match !== null) {\n const [_, tracePath, fileName, lineStr] = match;\n const filePath = this.getFilePath(tracePath, fileName);\n if (filePath !== undefined) {\n const line = parseInt(lineStr);\n return { filePath, line };\n }\n }\n }\n }\n // Stacktrace in Java doesn't contain full paths to source file.\n // There are only package, file name and line.\n // Assuming folder structure matches package name (as it should in Java),\n // we can try to match tracked file.\n getFilePath(tracePath, fileName) {\n // Check if there is any tracked file with given name\n const files = this.trackedFiles[fileName];\n if (files === undefined) {\n return undefined;\n }\n // Remove class name and method name from trace.\n // Take parts until first item with capital letter - package names are lowercase while class name is CamelCase.\n const packageParts = tracePath.split(/\\./g);\n const packageIndex = packageParts.findIndex(part => part[0] <= 'Z');\n if (packageIndex !== -1) {\n packageParts.splice(packageIndex, packageParts.length - packageIndex);\n }\n if (packageParts.length === 0) {\n return undefined;\n }\n // Get right file\n // - file name matches\n // - parent folders structure must reflect the package name\n for (const filePath of files) {\n const dirs = path.dirname(filePath).split(/\\//g);\n if (packageParts.length > dirs.length) {\n continue;\n }\n // get only N parent folders, where N = length of package name parts\n if (dirs.length > packageParts.length) {\n dirs.splice(0, dirs.length - packageParts.length);\n }\n // check if parent folder structure matches package name\n const isMatch = packageParts.every((part, i) => part === dirs[i]);\n if (isMatch) {\n return filePath;\n }\n }\n return undefined;\n }\n}\nexports.JavaJunitParser = JavaJunitParser;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.JestJunitParser = void 0;\nconst xml2js_1 = require(\"xml2js\");\nconst node_utils_1 = require(\"../../utils/node-utils\");\nconst path_utils_1 = require(\"../../utils/path-utils\");\nconst test_results_1 = require(\"../../test-results\");\nclass JestJunitParser {\n constructor(options) {\n this.options = options;\n }\n async parse(path, content) {\n const ju = await this.getJunitReport(path, content);\n return this.getTestRunResult(path, ju);\n }\n async getJunitReport(path, content) {\n try {\n return (await xml2js_1.parseStringPromise(content));\n }\n catch (e) {\n throw new Error(`Invalid XML at ${path}\\n\\n${e}`);\n }\n }\n getTestRunResult(path, junit) {\n const suites = junit.testsuites.testsuite === undefined\n ? []\n : junit.testsuites.testsuite.map(ts => {\n const name = ts.$.name.trim();\n const time = parseFloat(ts.$.time) * 1000;\n const sr = new test_results_1.TestSuiteResult(name, this.getGroups(ts), time);\n return sr;\n });\n const time = parseFloat(junit.testsuites.$.time) * 1000;\n return new test_results_1.TestRunResult(path, suites, time);\n }\n getGroups(suite) {\n const groups = [];\n for (const tc of suite.testcase) {\n let grp = groups.find(g => g.describe === tc.$.classname);\n if (grp === undefined) {\n grp = { describe: tc.$.classname, tests: [] };\n groups.push(grp);\n }\n grp.tests.push(tc);\n }\n return groups.map(grp => {\n const tests = grp.tests.map(tc => {\n const name = tc.$.name.trim();\n const result = this.getTestCaseResult(tc);\n const time = parseFloat(tc.$.time) * 1000;\n const error = this.getTestCaseError(tc);\n return new test_results_1.TestCaseResult(name, result, time, error);\n });\n return new test_results_1.TestGroupResult(grp.describe, tests);\n });\n }\n getTestCaseResult(test) {\n if (test.failure)\n return 'failed';\n if (test.skipped)\n return 'skipped';\n return 'success';\n }\n getTestCaseError(tc) {\n if (!this.options.parseErrors || !tc.failure) {\n return undefined;\n }\n const details = tc.failure[0];\n let path;\n let line;\n const src = node_utils_1.getExceptionSource(details, this.options.trackedFiles, file => this.getRelativePath(file));\n if (src) {\n path = src.path;\n line = src.line;\n }\n return {\n path,\n line,\n details\n };\n }\n getRelativePath(path) {\n path = path_utils_1.normalizeFilePath(path);\n const workDir = this.getWorkDir(path);\n if (workDir !== undefined && path.startsWith(workDir)) {\n path = path.substr(workDir.length);\n }\n return path;\n }\n getWorkDir(path) {\n var _a, _b;\n return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles)));\n }\n}\nexports.JestJunitParser = JestJunitParser;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.MochaJsonParser = void 0;\nconst test_results_1 = require(\"../../test-results\");\nconst node_utils_1 = require(\"../../utils/node-utils\");\nconst path_utils_1 = require(\"../../utils/path-utils\");\nclass MochaJsonParser {\n constructor(options) {\n this.options = options;\n }\n async parse(path, content) {\n const mocha = this.getMochaJson(path, content);\n const result = this.getTestRunResult(path, mocha);\n result.sort(true);\n return Promise.resolve(result);\n }\n getMochaJson(path, content) {\n try {\n return JSON.parse(content);\n }\n catch (e) {\n throw new Error(`Invalid JSON at ${path}\\n\\n${e}`);\n }\n }\n getTestRunResult(resultsPath, mocha) {\n const suitesMap = {};\n const getSuite = (test) => {\n var _a;\n const path = this.getRelativePath(test.file);\n return (_a = suitesMap[path]) !== null && _a !== void 0 ? _a : (suitesMap[path] = new test_results_1.TestSuiteResult(path, []));\n };\n for (const test of mocha.passes) {\n const suite = getSuite(test);\n this.processTest(suite, test, 'success');\n }\n for (const test of mocha.failures) {\n const suite = getSuite(test);\n this.processTest(suite, test, 'failed');\n }\n for (const test of mocha.pending) {\n const suite = getSuite(test);\n this.processTest(suite, test, 'skipped');\n }\n const suites = Object.values(suitesMap);\n return new test_results_1.TestRunResult(resultsPath, suites, mocha.stats.duration);\n }\n processTest(suite, test, result) {\n var _a;\n const groupName = test.fullTitle !== test.title\n ? test.fullTitle.substr(0, test.fullTitle.length - test.title.length).trimEnd()\n : null;\n let group = suite.groups.find(grp => grp.name === groupName);\n if (group === undefined) {\n group = new test_results_1.TestGroupResult(groupName, []);\n suite.groups.push(group);\n }\n const error = this.getTestCaseError(test);\n const testCase = new test_results_1.TestCaseResult(test.title, result, (_a = test.duration) !== null && _a !== void 0 ? _a : 0, error);\n group.tests.push(testCase);\n }\n getTestCaseError(test) {\n const details = test.err.stack;\n const message = test.err.message;\n if (details === undefined) {\n return undefined;\n }\n let path;\n let line;\n const src = node_utils_1.getExceptionSource(details, this.options.trackedFiles, file => this.getRelativePath(file));\n if (src) {\n path = src.path;\n line = src.line;\n }\n return {\n path,\n line,\n message,\n details\n };\n }\n getRelativePath(path) {\n path = path_utils_1.normalizeFilePath(path);\n const workDir = this.getWorkDir(path);\n if (workDir !== undefined && path.startsWith(workDir)) {\n path = path.substr(workDir.length);\n }\n return path;\n }\n getWorkDir(path) {\n var _a, _b;\n return ((_b = (_a = this.options.workDir) !== null && _a !== void 0 ? _a : this.assumedWorkDir) !== null && _b !== void 0 ? _b : (this.assumedWorkDir = path_utils_1.getBasePath(path, this.options.trackedFiles)));\n }\n}\nexports.MochaJsonParser = MochaJsonParser;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getAnnotations = void 0;\nconst markdown_utils_1 = require(\"../utils/markdown-utils\");\nconst parse_utils_1 = require(\"../utils/parse-utils\");\nfunction getAnnotations(results, maxCount) {\n var _a, _b, _c, _d;\n if (maxCount === 0) {\n return [];\n }\n // Collect errors from TestRunResults\n // Merge duplicates if there are more test results files processed\n const errors = [];\n const mergeDup = results.length > 1;\n for (const tr of results) {\n for (const ts of tr.suites) {\n for (const tg of ts.groups) {\n for (const tc of tg.tests) {\n const err = tc.error;\n if (err === undefined) {\n continue;\n }\n const path = (_a = err.path) !== null && _a !== void 0 ? _a : tr.path;\n const line = (_b = err.line) !== null && _b !== void 0 ? _b : 0;\n if (mergeDup) {\n const dup = errors.find(e => path === e.path && line === e.line && err.details === e.details);\n if (dup !== undefined) {\n dup.testRunPaths.push(tr.path);\n continue;\n }\n }\n errors.push({\n testRunPaths: [tr.path],\n suiteName: ts.name,\n testName: tg.name ? `${tg.name} ► ${tc.name}` : tc.name,\n details: err.details,\n message: (_d = (_c = err.message) !== null && _c !== void 0 ? _c : parse_utils_1.getFirstNonEmptyLine(err.details)) !== null && _d !== void 0 ? _d : 'Test failed',\n path,\n line\n });\n }\n }\n }\n }\n // Limit number of created annotations\n errors.splice(maxCount + 1);\n const annotations = errors.map(e => {\n const message = [\n 'Failed test found in:',\n e.testRunPaths.map(p => ` ${p}`).join('\\n'),\n 'Error:',\n ident(markdown_utils_1.fixEol(e.message), ' ')\n ].join('\\n');\n return enforceCheckRunLimits({\n path: e.path,\n start_line: e.line,\n end_line: e.line,\n annotation_level: 'failure',\n title: `${e.suiteName} ► ${e.testName}`,\n raw_details: markdown_utils_1.fixEol(e.details),\n message\n });\n });\n return annotations;\n}\nexports.getAnnotations = getAnnotations;\nfunction enforceCheckRunLimits(err) {\n err.title = markdown_utils_1.ellipsis(err.title || '', 255);\n err.message = markdown_utils_1.ellipsis(err.message, 65535);\n if (err.raw_details) {\n err.raw_details = markdown_utils_1.ellipsis(err.raw_details, 65535);\n }\n return err;\n}\nfunction ident(text, prefix) {\n return text\n .split(/\\n/g)\n .map(line => prefix + line)\n .join('\\n');\n}\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getReport = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst markdown_utils_1 = require(\"../utils/markdown-utils\");\nconst parse_utils_1 = require(\"../utils/parse-utils\");\nconst slugger_1 = require(\"../utils/slugger\");\nconst MAX_REPORT_LENGTH = 65535;\nconst defaultOptions = {\n listSuites: 'all',\n listTests: 'all',\n baseUrl: '',\n onlySummary: false\n};\nfunction getReport(results, options = defaultOptions) {\n core.info('Generating check run summary');\n applySort(results);\n const opts = { ...options };\n let lines = renderReport(results, opts);\n let report = lines.join('\\n');\n if (getByteLength(report) <= MAX_REPORT_LENGTH) {\n return report;\n }\n if (opts.listTests === 'all') {\n core.info(\"Test report summary is too big - setting 'listTests' to 'failed'\");\n opts.listTests = 'failed';\n lines = renderReport(results, opts);\n report = lines.join('\\n');\n if (getByteLength(report) <= MAX_REPORT_LENGTH) {\n return report;\n }\n }\n core.warning(`Test report summary exceeded limit of ${MAX_REPORT_LENGTH} bytes and will be trimmed`);\n return trimReport(lines);\n}\nexports.getReport = getReport;\nfunction trimReport(lines) {\n const closingBlock = '```';\n const errorMsg = `**Report exceeded GitHub limit of ${MAX_REPORT_LENGTH} bytes and has been trimmed**`;\n const maxErrorMsgLength = closingBlock.length + errorMsg.length + 2;\n const maxReportLength = MAX_REPORT_LENGTH - maxErrorMsgLength;\n let reportLength = 0;\n let codeBlock = false;\n let endLineIndex = 0;\n for (endLineIndex = 0; endLineIndex < lines.length; endLineIndex++) {\n const line = lines[endLineIndex];\n const lineLength = getByteLength(line);\n reportLength += lineLength + 1;\n if (reportLength > maxReportLength) {\n break;\n }\n if (line === '```') {\n codeBlock = !codeBlock;\n }\n }\n const reportLines = lines.slice(0, endLineIndex);\n if (codeBlock) {\n reportLines.push('```');\n }\n reportLines.push(errorMsg);\n return reportLines.join('\\n');\n}\nfunction applySort(results) {\n results.sort((a, b) => a.path.localeCompare(b.path));\n for (const res of results) {\n res.suites.sort((a, b) => a.name.localeCompare(b.name));\n }\n}\nfunction getByteLength(text) {\n return Buffer.byteLength(text, 'utf8');\n}\nfunction renderReport(results, options) {\n const sections = [];\n const badge = getReportBadge(results);\n sections.push(badge);\n const runs = getTestRunsReport(results, options);\n sections.push(...runs);\n return sections;\n}\nfunction getReportBadge(results) {\n const passed = results.reduce((sum, tr) => sum + tr.passed, 0);\n const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0);\n const failed = results.reduce((sum, tr) => sum + tr.failed, 0);\n return getBadge(passed, failed, skipped);\n}\nfunction getBadge(passed, failed, skipped) {\n const text = [];\n if (passed > 0) {\n text.push(`${passed} passed`);\n }\n if (failed > 0) {\n text.push(`${failed} failed`);\n }\n if (skipped > 0) {\n text.push(`${skipped} skipped`);\n }\n const message = text.length > 0 ? text.join(', ') : 'none';\n let color = 'success';\n if (failed > 0) {\n color = 'critical';\n }\n else if (passed === 0 && failed === 0) {\n color = 'yellow';\n }\n const hint = failed > 0 ? 'Tests failed' : 'Tests passed successfully';\n const uri = encodeURIComponent(`tests-${message}-${color}`);\n return ``;\n}\nfunction getTestRunsReport(testRuns, options) {\n const sections = [];\n if (testRuns.length > 1 || options.onlySummary) {\n const tableData = testRuns.map((tr, runIndex) => {\n const time = markdown_utils_1.formatTime(tr.time);\n const name = tr.path;\n const addr = options.baseUrl + makeRunSlug(runIndex).link;\n const nameLink = markdown_utils_1.link(name, addr);\n const passed = tr.passed > 0 ? `${tr.passed}${markdown_utils_1.Icon.success}` : '';\n const failed = tr.failed > 0 ? `${tr.failed}${markdown_utils_1.Icon.fail}` : '';\n const skipped = tr.skipped > 0 ? `${tr.skipped}${markdown_utils_1.Icon.skip}` : '';\n return [nameLink, passed, failed, skipped, time];\n });\n const resultsTable = markdown_utils_1.table(['Report', 'Passed', 'Failed', 'Skipped', 'Time'], [markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...tableData);\n sections.push(resultsTable);\n }\n if (options.onlySummary === false) {\n const suitesReports = testRuns.map((tr, i) => getSuitesReport(tr, i, options)).flat();\n sections.push(...suitesReports);\n }\n return sections;\n}\nfunction getSuitesReport(tr, runIndex, options) {\n const sections = [];\n const trSlug = makeRunSlug(runIndex);\n const nameLink = `${tr.path}`;\n const icon = getResultIcon(tr.result);\n sections.push(`## ${icon}\\xa0${nameLink}`);\n const time = markdown_utils_1.formatTime(tr.time);\n const headingLine2 = tr.tests > 0\n ? `**${tr.tests}** tests were completed in **${time}** with **${tr.passed}** passed, **${tr.failed}** failed and **${tr.skipped}** skipped.`\n : 'No tests found';\n sections.push(headingLine2);\n const suites = options.listSuites === 'failed' ? tr.failedSuites : tr.suites;\n if (suites.length > 0) {\n const suitesTable = markdown_utils_1.table(['Test suite', 'Passed', 'Failed', 'Skipped', 'Time'], [markdown_utils_1.Align.Left, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right, markdown_utils_1.Align.Right], ...suites.map((s, suiteIndex) => {\n const tsTime = markdown_utils_1.formatTime(s.time);\n const tsName = s.name;\n const skipLink = options.listTests === 'none' || (options.listTests === 'failed' && s.result !== 'failed');\n const tsAddr = options.baseUrl + makeSuiteSlug(runIndex, suiteIndex).link;\n const tsNameLink = skipLink ? tsName : markdown_utils_1.link(tsName, tsAddr);\n const passed = s.passed > 0 ? `${s.passed}${markdown_utils_1.Icon.success}` : '';\n const failed = s.failed > 0 ? `${s.failed}${markdown_utils_1.Icon.fail}` : '';\n const skipped = s.skipped > 0 ? `${s.skipped}${markdown_utils_1.Icon.skip}` : '';\n return [tsNameLink, passed, failed, skipped, tsTime];\n }));\n sections.push(suitesTable);\n }\n if (options.listTests !== 'none') {\n const tests = suites.map((ts, suiteIndex) => getTestsReport(ts, runIndex, suiteIndex, options)).flat();\n if (tests.length > 1) {\n sections.push(...tests);\n }\n }\n return sections;\n}\nfunction getTestsReport(ts, runIndex, suiteIndex, options) {\n var _a, _b, _c;\n if (options.listTests === 'failed' && ts.result !== 'failed') {\n return [];\n }\n const groups = ts.groups;\n if (groups.length === 0) {\n return [];\n }\n const sections = [];\n const tsName = ts.name;\n const tsSlug = makeSuiteSlug(runIndex, suiteIndex);\n const tsNameLink = `${tsName}`;\n const icon = getResultIcon(ts.result);\n sections.push(`### ${icon}\\xa0${tsNameLink}`);\n sections.push('```');\n for (const grp of groups) {\n if (grp.name) {\n sections.push(grp.name);\n }\n const space = grp.name ? ' ' : '';\n for (const tc of grp.tests) {\n const result = getResultIcon(tc.result);\n sections.push(`${space}${result} ${tc.name}`);\n if (tc.error) {\n const lines = (_c = ((_a = tc.error.message) !== null && _a !== void 0 ? _a : (_b = parse_utils_1.getFirstNonEmptyLine(tc.error.details)) === null || _b === void 0 ? void 0 : _b.trim())) === null || _c === void 0 ? void 0 : _c.split(/\\r?\\n/g).map(l => '\\t' + l);\n if (lines) {\n sections.push(...lines);\n }\n }\n }\n }\n sections.push('```');\n return sections;\n}\nfunction makeRunSlug(runIndex) {\n // use prefix to avoid slug conflicts after escaping the paths\n return slugger_1.slug(`r${runIndex}`);\n}\nfunction makeSuiteSlug(runIndex, suiteIndex) {\n // use prefix to avoid slug conflicts after escaping the paths\n return slugger_1.slug(`r${runIndex}s${suiteIndex}`);\n}\nfunction getResultIcon(result) {\n switch (result) {\n case 'success':\n return markdown_utils_1.Icon.success;\n case 'skipped':\n return markdown_utils_1.Icon.skip;\n case 'failed':\n return markdown_utils_1.Icon.fail;\n default:\n return '';\n }\n}\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.TestCaseResult = exports.TestGroupResult = exports.TestSuiteResult = exports.TestRunResult = void 0;\nclass TestRunResult {\n constructor(path, suites, totalTime) {\n this.path = path;\n this.suites = suites;\n this.totalTime = totalTime;\n }\n get tests() {\n return this.suites.reduce((sum, g) => sum + g.tests, 0);\n }\n get passed() {\n return this.suites.reduce((sum, g) => sum + g.passed, 0);\n }\n get failed() {\n return this.suites.reduce((sum, g) => sum + g.failed, 0);\n }\n get skipped() {\n return this.suites.reduce((sum, g) => sum + g.skipped, 0);\n }\n get time() {\n var _a;\n return (_a = this.totalTime) !== null && _a !== void 0 ? _a : this.suites.reduce((sum, g) => sum + g.time, 0);\n }\n get result() {\n return this.suites.some(t => t.result === 'failed') ? 'failed' : 'success';\n }\n get failedSuites() {\n return this.suites.filter(s => s.result === 'failed');\n }\n sort(deep) {\n this.suites.sort((a, b) => a.name.localeCompare(b.name));\n if (deep) {\n for (const suite of this.suites) {\n suite.sort(deep);\n }\n }\n }\n}\nexports.TestRunResult = TestRunResult;\nclass TestSuiteResult {\n constructor(name, groups, totalTime) {\n this.name = name;\n this.groups = groups;\n this.totalTime = totalTime;\n }\n get tests() {\n return this.groups.reduce((sum, g) => sum + g.tests.length, 0);\n }\n get passed() {\n return this.groups.reduce((sum, g) => sum + g.passed, 0);\n }\n get failed() {\n return this.groups.reduce((sum, g) => sum + g.failed, 0);\n }\n get skipped() {\n return this.groups.reduce((sum, g) => sum + g.skipped, 0);\n }\n get time() {\n var _a;\n return (_a = this.totalTime) !== null && _a !== void 0 ? _a : this.groups.reduce((sum, g) => sum + g.time, 0);\n }\n get result() {\n return this.groups.some(t => t.result === 'failed') ? 'failed' : 'success';\n }\n get failedGroups() {\n return this.groups.filter(grp => grp.result === 'failed');\n }\n sort(deep) {\n this.groups.sort((a, b) => { var _a, _b; return ((_a = a.name) !== null && _a !== void 0 ? _a : '').localeCompare((_b = b.name) !== null && _b !== void 0 ? _b : ''); });\n if (deep) {\n for (const grp of this.groups) {\n grp.sort();\n }\n }\n }\n}\nexports.TestSuiteResult = TestSuiteResult;\nclass TestGroupResult {\n constructor(name, tests) {\n this.name = name;\n this.tests = tests;\n }\n get passed() {\n return this.tests.reduce((sum, t) => (t.result === 'success' ? sum + 1 : sum), 0);\n }\n get failed() {\n return this.tests.reduce((sum, t) => (t.result === 'failed' ? sum + 1 : sum), 0);\n }\n get skipped() {\n return this.tests.reduce((sum, t) => (t.result === 'skipped' ? sum + 1 : sum), 0);\n }\n get time() {\n return this.tests.reduce((sum, t) => sum + t.time, 0);\n }\n get result() {\n return this.tests.some(t => t.result === 'failed') ? 'failed' : 'success';\n }\n get failedTests() {\n return this.tests.filter(tc => tc.result === 'failed');\n }\n sort() {\n this.tests.sort((a, b) => a.name.localeCompare(b.name));\n }\n}\nexports.TestGroupResult = TestGroupResult;\nclass TestCaseResult {\n constructor(name, result, time, error) {\n this.name = name;\n this.result = result;\n this.time = time;\n this.error = error;\n }\n}\nexports.TestCaseResult = TestCaseResult;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst exec_1 = require(\"@actions/exec\");\n// Wraps original exec() function\n// Returns exit code and whole stdout/stderr\nasync function exec(commandLine, args, options) {\n options = options || {};\n let stdout = '';\n let stderr = '';\n options.listeners = {\n stdout: (data) => (stdout += data.toString()),\n stderr: (data) => (stderr += data.toString())\n };\n const code = await exec_1.exec(commandLine, args, options);\n return { code, stdout, stderr };\n}\nexports.default = exec;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.listFiles = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst exec_1 = __importDefault(require(\"./exec\"));\nasync function listFiles() {\n core.startGroup('Listing all files tracked by git');\n let output = '';\n try {\n output = (await exec_1.default('git', ['ls-files', '-z'])).stdout;\n }\n finally {\n fixStdOutNullTermination();\n core.endGroup();\n }\n return output.split('\\u0000').filter(s => s.length > 0);\n}\nexports.listFiles = listFiles;\nfunction fixStdOutNullTermination() {\n // Previous command uses NULL as delimiters and output is printed to stdout.\n // We have to make sure next thing written to stdout will start on new line.\n // Otherwise things like ::set-output wouldn't work.\n core.info('');\n}\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.listFiles = exports.downloadArtifact = exports.getCheckRunContext = void 0;\nconst fs_1 = require(\"fs\");\nconst core = __importStar(require(\"@actions/core\"));\nconst github = __importStar(require(\"@actions/github\"));\nconst stream = __importStar(require(\"stream\"));\nconst util_1 = require(\"util\");\nconst got_1 = __importDefault(require(\"got\"));\nconst asyncStream = util_1.promisify(stream.pipeline);\nfunction getCheckRunContext() {\n if (github.context.eventName === 'workflow_run') {\n core.info('Action was triggered by workflow_run: using SHA and RUN_ID from triggering workflow');\n const event = github.context.payload;\n if (!event.workflow_run) {\n throw new Error(\"Event of type 'workflow_run' is missing 'workflow_run' field\");\n }\n return {\n sha: event.workflow_run.head_commit.id,\n runId: event.workflow_run.id\n };\n }\n const runId = github.context.runId;\n if (github.context.payload.pull_request) {\n core.info(`Action was triggered by ${github.context.eventName}: using SHA from head of source branch`);\n const pr = github.context.payload.pull_request;\n return { sha: pr.head.sha, runId };\n }\n return { sha: github.context.sha, runId };\n}\nexports.getCheckRunContext = getCheckRunContext;\nasync function downloadArtifact(octokit, artifactId, fileName, token) {\n core.startGroup(`Downloading artifact ${fileName}`);\n try {\n core.info(`Artifact ID: ${artifactId}`);\n const req = octokit.actions.downloadArtifact.endpoint({\n ...github.context.repo,\n artifact_id: artifactId,\n archive_format: 'zip'\n });\n const headers = {\n Authorization: `Bearer ${token}`\n };\n const resp = await got_1.default(req.url, {\n headers,\n followRedirect: false\n });\n core.info(`Fetch artifact URL: ${resp.statusCode} ${resp.statusMessage}`);\n if (resp.statusCode !== 302) {\n throw new Error('Fetch artifact URL failed: received unexpected status code');\n }\n const url = resp.headers.location;\n if (url === undefined) {\n const receivedHeaders = Object.keys(resp.headers);\n core.info(`Received headers: ${receivedHeaders.join(', ')}`);\n throw new Error('Location header was not found in API response');\n }\n if (typeof url !== 'string') {\n throw new Error(`Location header has unexpected value: ${url}`);\n }\n const downloadStream = got_1.default.stream(url, { headers });\n const fileWriterStream = fs_1.createWriteStream(fileName);\n core.info(`Downloading ${url}`);\n downloadStream.on('downloadProgress', ({ transferred }) => {\n core.info(`Progress: ${transferred} B`);\n });\n await asyncStream(downloadStream, fileWriterStream);\n }\n finally {\n core.endGroup();\n }\n}\nexports.downloadArtifact = downloadArtifact;\nasync function listFiles(octokit, sha) {\n core.startGroup('Fetching list of tracked files from GitHub');\n try {\n const commit = await octokit.git.getCommit({\n commit_sha: sha,\n ...github.context.repo\n });\n const files = await listGitTree(octokit, commit.data.tree.sha, '');\n return files;\n }\n finally {\n core.endGroup();\n }\n}\nexports.listFiles = listFiles;\nasync function listGitTree(octokit, sha, path) {\n const pathLog = path ? ` at ${path}` : '';\n core.info(`Fetching tree ${sha}${pathLog}`);\n let truncated = false;\n let tree = await octokit.git.getTree({\n recursive: 'true',\n tree_sha: sha,\n ...github.context.repo\n });\n if (tree.data.truncated) {\n truncated = true;\n tree = await octokit.git.getTree({\n tree_sha: sha,\n ...github.context.repo\n });\n }\n const result = [];\n for (const tr of tree.data.tree) {\n const file = `${path}${tr.path}`;\n if (tr.type === 'blob') {\n result.push(file);\n }\n else if (tr.type === 'tree' && truncated) {\n const files = await listGitTree(octokit, tr.sha, `${file}/`);\n result.push(...files);\n }\n }\n return result;\n}\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.formatTime = exports.ellipsis = exports.fixEol = exports.tableEscape = exports.table = exports.link = exports.Icon = exports.Align = void 0;\nvar Align;\n(function (Align) {\n Align[\"Left\"] = \":---\";\n Align[\"Center\"] = \":---:\";\n Align[\"Right\"] = \"---:\";\n Align[\"None\"] = \"---\";\n})(Align = exports.Align || (exports.Align = {}));\nexports.Icon = {\n skip: '✖️',\n success: '✔️',\n fail: '❌' // ':x:'\n};\nfunction link(title, address) {\n return `[${title}](${address})`;\n}\nexports.link = link;\nfunction table(headers, align, ...rows) {\n const headerRow = `|${headers.map(tableEscape).join('|')}|`;\n const alignRow = `|${align.join('|')}|`;\n const contentRows = rows.map(row => `|${row.map(tableEscape).join('|')}|`).join('\\n');\n return [headerRow, alignRow, contentRows].join('\\n');\n}\nexports.table = table;\nfunction tableEscape(content) {\n return content.toString().replace('|', '\\\\|');\n}\nexports.tableEscape = tableEscape;\nfunction fixEol(text) {\n var _a;\n return (_a = text === null || text === void 0 ? void 0 : text.replace(/\\r/g, '')) !== null && _a !== void 0 ? _a : '';\n}\nexports.fixEol = fixEol;\nfunction ellipsis(text, maxLength) {\n if (text.length <= maxLength) {\n return text;\n }\n return text.substr(0, maxLength - 3) + '...';\n}\nexports.ellipsis = ellipsis;\nfunction formatTime(ms) {\n if (ms > 1000) {\n return `${Math.round(ms / 1000)}s`;\n }\n return `${Math.round(ms)}ms`;\n}\nexports.formatTime = formatTime;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getExceptionSource = void 0;\nconst path_utils_1 = require(\"./path-utils\");\nfunction getExceptionSource(stackTrace, trackedFiles, getRelativePath) {\n const lines = stackTrace.split(/\\r?\\n/);\n const re = /\\((.*):(\\d+):\\d+\\)$/;\n for (const str of lines) {\n const match = str.match(re);\n if (match !== null) {\n const [_, fileStr, lineStr] = match;\n const filePath = path_utils_1.normalizeFilePath(fileStr);\n if (filePath.startsWith('internal/') || filePath.includes('/node_modules/')) {\n continue;\n }\n const path = getRelativePath(filePath);\n if (!path) {\n continue;\n }\n if (trackedFiles.includes(path)) {\n const line = parseInt(lineStr);\n return { path, line };\n }\n }\n }\n}\nexports.getExceptionSource = getExceptionSource;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getFirstNonEmptyLine = exports.parseIsoDate = exports.parseNetDuration = void 0;\nfunction parseNetDuration(str) {\n const durationRe = /^(\\d\\d):(\\d\\d):(\\d\\d(?:\\.\\d+)?)$/;\n const durationMatch = str.match(durationRe);\n if (durationMatch === null) {\n throw new Error(`Invalid format: \"${str}\" is not NET duration`);\n }\n const [_, hourStr, minStr, secStr] = durationMatch;\n return (parseInt(hourStr) * 3600 + parseInt(minStr) * 60 + parseFloat(secStr)) * 1000;\n}\nexports.parseNetDuration = parseNetDuration;\nfunction parseIsoDate(str) {\n const isoDateRe = /^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d:[0-5]\\d|Z)$/;\n if (str === undefined || !isoDateRe.test(str)) {\n throw new Error(`Invalid format: \"${str}\" is not ISO date`);\n }\n return new Date(str);\n}\nexports.parseIsoDate = parseIsoDate;\nfunction getFirstNonEmptyLine(stackTrace) {\n const lines = stackTrace.split(/\\r?\\n/g);\n return lines.find(str => !/^\\s*$/.test(str));\n}\nexports.getFirstNonEmptyLine = getFirstNonEmptyLine;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getBasePath = exports.normalizeFilePath = exports.normalizeDirPath = void 0;\nfunction normalizeDirPath(path, addTrailingSlash) {\n if (!path) {\n return path;\n }\n path = normalizeFilePath(path);\n if (addTrailingSlash && !path.endsWith('/')) {\n path += '/';\n }\n return path;\n}\nexports.normalizeDirPath = normalizeDirPath;\nfunction normalizeFilePath(path) {\n if (!path) {\n return path;\n }\n return path.trim().replace(/\\\\/g, '/');\n}\nexports.normalizeFilePath = normalizeFilePath;\nfunction getBasePath(path, trackedFiles) {\n if (trackedFiles.includes(path)) {\n return '';\n }\n let max = '';\n for (const file of trackedFiles) {\n if (path.endsWith(file) && file.length > max.length) {\n max = file;\n }\n }\n if (max === '') {\n return undefined;\n }\n const base = path.substr(0, path.length - max.length);\n return base;\n}\nexports.getBasePath = getBasePath;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.slug = void 0;\n// Returns HTML element id and href link usable as manual anchor links\n// This is needed because Github in check run summary doesn't automatically\n// create links out of headings as it normally does for other markdown content\nfunction slug(name) {\n const slugId = name\n .trim()\n .replace(/_/g, '')\n .replace(/[./\\\\]/g, '-')\n .replace(/[^\\w-]/g, '');\n const id = `user-content-${slugId}`;\n const link = `#${slugId}`;\n return { id, link };\n}\nexports.slug = slug;\n","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n const delimiter = '_GitHubActionsFileCommandDelimeter_';\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;\n file_command_1.issueCommand('ENV', commandValue);\n }\n else {\n command_1.issueCommand('set-env', { name }, convertedVal);\n }\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input. The value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n */\nfunction error(message) {\n command_1.issue('error', message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds an warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n */\nfunction warning(message) {\n command_1.issue('warning', message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\nfunction issueCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueCommand = issueCommand;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst tr = __importStar(require(\"./toolrunner\"));\n/**\n * Exec a command.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param commandLine command to execute (can include additional args). Must be correctly escaped.\n * @param args optional arguments for tool. Escaping is handled by the lib.\n * @param options optional exec options. See ExecOptions\n * @returns Promise exit code\n */\nfunction exec(commandLine, args, options) {\n return __awaiter(this, void 0, void 0, function* () {\n const commandArgs = tr.argStringToArray(commandLine);\n if (commandArgs.length === 0) {\n throw new Error(`Parameter 'commandLine' cannot be null or empty.`);\n }\n // Path to tool to execute should be first arg\n const toolPath = commandArgs[0];\n args = commandArgs.slice(1).concat(args || []);\n const runner = new tr.ToolRunner(toolPath, args, options);\n return runner.exec();\n });\n}\nexports.exec = exec;\n//# sourceMappingURL=exec.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\nconst events = __importStar(require(\"events\"));\nconst child = __importStar(require(\"child_process\"));\nconst path = __importStar(require(\"path\"));\nconst io = __importStar(require(\"@actions/io\"));\nconst ioUtil = __importStar(require(\"@actions/io/lib/io-util\"));\n/* eslint-disable @typescript-eslint/unbound-method */\nconst IS_WINDOWS = process.platform === 'win32';\n/*\n * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.\n */\nclass ToolRunner extends events.EventEmitter {\n constructor(toolPath, args, options) {\n super();\n if (!toolPath) {\n throw new Error(\"Parameter 'toolPath' cannot be null or empty.\");\n }\n this.toolPath = toolPath;\n this.args = args || [];\n this.options = options || {};\n }\n _debug(message) {\n if (this.options.listeners && this.options.listeners.debug) {\n this.options.listeners.debug(message);\n }\n }\n _getCommandString(options, noPrefix) {\n const toolPath = this._getSpawnFileName();\n const args = this._getSpawnArgs(options);\n let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool\n if (IS_WINDOWS) {\n // Windows + cmd file\n if (this._isCmdFile()) {\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows + verbatim\n else if (options.windowsVerbatimArguments) {\n cmd += `\"${toolPath}\"`;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n // Windows (regular)\n else {\n cmd += this._windowsQuoteCmdArg(toolPath);\n for (const a of args) {\n cmd += ` ${this._windowsQuoteCmdArg(a)}`;\n }\n }\n }\n else {\n // OSX/Linux - this can likely be improved with some form of quoting.\n // creating processes on Unix is fundamentally different than Windows.\n // on Unix, execvp() takes an arg array.\n cmd += toolPath;\n for (const a of args) {\n cmd += ` ${a}`;\n }\n }\n return cmd;\n }\n _processLineBuffer(data, strBuffer, onLine) {\n try {\n let s = strBuffer + data.toString();\n let n = s.indexOf(os.EOL);\n while (n > -1) {\n const line = s.substring(0, n);\n onLine(line);\n // the rest of the string ...\n s = s.substring(n + os.EOL.length);\n n = s.indexOf(os.EOL);\n }\n strBuffer = s;\n }\n catch (err) {\n // streaming lines to console is best effort. Don't fail a build.\n this._debug(`error processing line. Failed with error ${err}`);\n }\n }\n _getSpawnFileName() {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n return process.env['COMSPEC'] || 'cmd.exe';\n }\n }\n return this.toolPath;\n }\n _getSpawnArgs(options) {\n if (IS_WINDOWS) {\n if (this._isCmdFile()) {\n let argline = `/D /S /C \"${this._windowsQuoteCmdArg(this.toolPath)}`;\n for (const a of this.args) {\n argline += ' ';\n argline += options.windowsVerbatimArguments\n ? a\n : this._windowsQuoteCmdArg(a);\n }\n argline += '\"';\n return [argline];\n }\n }\n return this.args;\n }\n _endsWith(str, end) {\n return str.endsWith(end);\n }\n _isCmdFile() {\n const upperToolPath = this.toolPath.toUpperCase();\n return (this._endsWith(upperToolPath, '.CMD') ||\n this._endsWith(upperToolPath, '.BAT'));\n }\n _windowsQuoteCmdArg(arg) {\n // for .exe, apply the normal quoting rules that libuv applies\n if (!this._isCmdFile()) {\n return this._uvQuoteCmdArg(arg);\n }\n // otherwise apply quoting rules specific to the cmd.exe command line parser.\n // the libuv rules are generic and are not designed specifically for cmd.exe\n // command line parser.\n //\n // for a detailed description of the cmd.exe command line parser, refer to\n // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912\n // need quotes for empty arg\n if (!arg) {\n return '\"\"';\n }\n // determine whether the arg needs to be quoted\n const cmdSpecialChars = [\n ' ',\n '\\t',\n '&',\n '(',\n ')',\n '[',\n ']',\n '{',\n '}',\n '^',\n '=',\n ';',\n '!',\n \"'\",\n '+',\n ',',\n '`',\n '~',\n '|',\n '<',\n '>',\n '\"'\n ];\n let needsQuotes = false;\n for (const char of arg) {\n if (cmdSpecialChars.some(x => x === char)) {\n needsQuotes = true;\n break;\n }\n }\n // short-circuit if quotes not needed\n if (!needsQuotes) {\n return arg;\n }\n // the following quoting rules are very similar to the rules that by libuv applies.\n //\n // 1) wrap the string in quotes\n //\n // 2) double-up quotes - i.e. \" => \"\"\n //\n // this is different from the libuv quoting rules. libuv replaces \" with \\\", which unfortunately\n // doesn't work well with a cmd.exe command line.\n //\n // note, replacing \" with \"\" also works well if the arg is passed to a downstream .NET console app.\n // for example, the command line:\n // foo.exe \"myarg:\"\"my val\"\"\"\n // is parsed by a .NET console app into an arg array:\n // [ \"myarg:\\\"my val\\\"\" ]\n // which is the same end result when applying libuv quoting rules. although the actual\n // command line from libuv quoting rules would look like:\n // foo.exe \"myarg:\\\"my val\\\"\"\n //\n // 3) double-up slashes that precede a quote,\n // e.g. hello \\world => \"hello \\world\"\n // hello\\\"world => \"hello\\\\\"\"world\"\n // hello\\\\\"world => \"hello\\\\\\\\\"\"world\"\n // hello world\\ => \"hello world\\\\\"\n //\n // technically this is not required for a cmd.exe command line, or the batch argument parser.\n // the reasons for including this as a .cmd quoting rule are:\n //\n // a) this is optimized for the scenario where the argument is passed from the .cmd file to an\n // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.\n //\n // b) it's what we've been doing previously (by deferring to node default behavior) and we\n // haven't heard any complaints about that aspect.\n //\n // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be\n // escaped when used on the command line directly - even though within a .cmd file % can be escaped\n // by using %%.\n //\n // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts\n // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.\n //\n // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would\n // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the\n // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args\n // to an external program.\n //\n // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.\n // % can be escaped within a .cmd file.\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\'; // double the slash\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\"'; // double the quote\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _uvQuoteCmdArg(arg) {\n // Tool runner wraps child_process.spawn() and needs to apply the same quoting as\n // Node in certain cases where the undocumented spawn option windowsVerbatimArguments\n // is used.\n //\n // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,\n // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),\n // pasting copyright notice from Node within this function:\n //\n // Copyright Joyent, Inc. and other Node contributors. All rights reserved.\n //\n // Permission is hereby granted, free of charge, to any person obtaining a copy\n // of this software and associated documentation files (the \"Software\"), to\n // deal in the Software without restriction, including without limitation the\n // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n // sell copies of the Software, and to permit persons to whom the Software is\n // furnished to do so, subject to the following conditions:\n //\n // The above copyright notice and this permission notice shall be included in\n // all copies or substantial portions of the Software.\n //\n // THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n // IN THE SOFTWARE.\n if (!arg) {\n // Need double quotation for empty argument\n return '\"\"';\n }\n if (!arg.includes(' ') && !arg.includes('\\t') && !arg.includes('\"')) {\n // No quotation needed\n return arg;\n }\n if (!arg.includes('\"') && !arg.includes('\\\\')) {\n // No embedded double quotes or backslashes, so I can just wrap\n // quote marks around the whole thing.\n return `\"${arg}\"`;\n }\n // Expected input/output:\n // input : hello\"world\n // output: \"hello\\\"world\"\n // input : hello\"\"world\n // output: \"hello\\\"\\\"world\"\n // input : hello\\world\n // output: hello\\world\n // input : hello\\\\world\n // output: hello\\\\world\n // input : hello\\\"world\n // output: \"hello\\\\\\\"world\"\n // input : hello\\\\\"world\n // output: \"hello\\\\\\\\\\\"world\"\n // input : hello world\\\n // output: \"hello world\\\\\" - note the comment in libuv actually reads \"hello world\\\"\n // but it appears the comment is wrong, it should be \"hello world\\\\\"\n let reverse = '\"';\n let quoteHit = true;\n for (let i = arg.length; i > 0; i--) {\n // walk the string in reverse\n reverse += arg[i - 1];\n if (quoteHit && arg[i - 1] === '\\\\') {\n reverse += '\\\\';\n }\n else if (arg[i - 1] === '\"') {\n quoteHit = true;\n reverse += '\\\\';\n }\n else {\n quoteHit = false;\n }\n }\n reverse += '\"';\n return reverse\n .split('')\n .reverse()\n .join('');\n }\n _cloneExecOptions(options) {\n options = options || {};\n const result = {\n cwd: options.cwd || process.cwd(),\n env: options.env || process.env,\n silent: options.silent || false,\n windowsVerbatimArguments: options.windowsVerbatimArguments || false,\n failOnStdErr: options.failOnStdErr || false,\n ignoreReturnCode: options.ignoreReturnCode || false,\n delay: options.delay || 10000\n };\n result.outStream = options.outStream || process.stdout;\n result.errStream = options.errStream || process.stderr;\n return result;\n }\n _getSpawnOptions(options, toolPath) {\n options = options || {};\n const result = {};\n result.cwd = options.cwd;\n result.env = options.env;\n result['windowsVerbatimArguments'] =\n options.windowsVerbatimArguments || this._isCmdFile();\n if (options.windowsVerbatimArguments) {\n result.argv0 = `\"${toolPath}\"`;\n }\n return result;\n }\n /**\n * Exec a tool.\n * Output will be streamed to the live console.\n * Returns promise with return code\n *\n * @param tool path to tool to exec\n * @param options optional exec options. See ExecOptions\n * @returns number\n */\n exec() {\n return __awaiter(this, void 0, void 0, function* () {\n // root the tool path if it is unrooted and contains relative pathing\n if (!ioUtil.isRooted(this.toolPath) &&\n (this.toolPath.includes('/') ||\n (IS_WINDOWS && this.toolPath.includes('\\\\')))) {\n // prefer options.cwd if it is specified, however options.cwd may also need to be rooted\n this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);\n }\n // if the tool is only a file name, then resolve it from the PATH\n // otherwise verify it exists (add extension on Windows if necessary)\n this.toolPath = yield io.which(this.toolPath, true);\n return new Promise((resolve, reject) => {\n this._debug(`exec tool: ${this.toolPath}`);\n this._debug('arguments:');\n for (const arg of this.args) {\n this._debug(` ${arg}`);\n }\n const optionsNonNull = this._cloneExecOptions(this.options);\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);\n }\n const state = new ExecState(optionsNonNull, this.toolPath);\n state.on('debug', (message) => {\n this._debug(message);\n });\n const fileName = this._getSpawnFileName();\n const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));\n const stdbuffer = '';\n if (cp.stdout) {\n cp.stdout.on('data', (data) => {\n if (this.options.listeners && this.options.listeners.stdout) {\n this.options.listeners.stdout(data);\n }\n if (!optionsNonNull.silent && optionsNonNull.outStream) {\n optionsNonNull.outStream.write(data);\n }\n this._processLineBuffer(data, stdbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.stdline) {\n this.options.listeners.stdline(line);\n }\n });\n });\n }\n const errbuffer = '';\n if (cp.stderr) {\n cp.stderr.on('data', (data) => {\n state.processStderr = true;\n if (this.options.listeners && this.options.listeners.stderr) {\n this.options.listeners.stderr(data);\n }\n if (!optionsNonNull.silent &&\n optionsNonNull.errStream &&\n optionsNonNull.outStream) {\n const s = optionsNonNull.failOnStdErr\n ? optionsNonNull.errStream\n : optionsNonNull.outStream;\n s.write(data);\n }\n this._processLineBuffer(data, errbuffer, (line) => {\n if (this.options.listeners && this.options.listeners.errline) {\n this.options.listeners.errline(line);\n }\n });\n });\n }\n cp.on('error', (err) => {\n state.processError = err.message;\n state.processExited = true;\n state.processClosed = true;\n state.CheckComplete();\n });\n cp.on('exit', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n cp.on('close', (code) => {\n state.processExitCode = code;\n state.processExited = true;\n state.processClosed = true;\n this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);\n state.CheckComplete();\n });\n state.on('done', (error, exitCode) => {\n if (stdbuffer.length > 0) {\n this.emit('stdline', stdbuffer);\n }\n if (errbuffer.length > 0) {\n this.emit('errline', errbuffer);\n }\n cp.removeAllListeners();\n if (error) {\n reject(error);\n }\n else {\n resolve(exitCode);\n }\n });\n if (this.options.input) {\n if (!cp.stdin) {\n throw new Error('child process missing stdin');\n }\n cp.stdin.end(this.options.input);\n }\n });\n });\n }\n}\nexports.ToolRunner = ToolRunner;\n/**\n * Convert an arg string to an array of args. Handles escaping\n *\n * @param argString string of arguments\n * @returns string[] array of arguments\n */\nfunction argStringToArray(argString) {\n const args = [];\n let inQuotes = false;\n let escaped = false;\n let arg = '';\n function append(c) {\n // we only escape double quotes.\n if (escaped && c !== '\"') {\n arg += '\\\\';\n }\n arg += c;\n escaped = false;\n }\n for (let i = 0; i < argString.length; i++) {\n const c = argString.charAt(i);\n if (c === '\"') {\n if (!escaped) {\n inQuotes = !inQuotes;\n }\n else {\n append(c);\n }\n continue;\n }\n if (c === '\\\\' && escaped) {\n append(c);\n continue;\n }\n if (c === '\\\\' && inQuotes) {\n escaped = true;\n continue;\n }\n if (c === ' ' && !inQuotes) {\n if (arg.length > 0) {\n args.push(arg);\n arg = '';\n }\n continue;\n }\n append(c);\n }\n if (arg.length > 0) {\n args.push(arg.trim());\n }\n return args;\n}\nexports.argStringToArray = argStringToArray;\nclass ExecState extends events.EventEmitter {\n constructor(options, toolPath) {\n super();\n this.processClosed = false; // tracks whether the process has exited and stdio is closed\n this.processError = '';\n this.processExitCode = 0;\n this.processExited = false; // tracks whether the process has exited\n this.processStderr = false; // tracks whether stderr was written to\n this.delay = 10000; // 10 seconds\n this.done = false;\n this.timeout = null;\n if (!toolPath) {\n throw new Error('toolPath must not be empty');\n }\n this.options = options;\n this.toolPath = toolPath;\n if (options.delay) {\n this.delay = options.delay;\n }\n }\n CheckComplete() {\n if (this.done) {\n return;\n }\n if (this.processClosed) {\n this._setResult();\n }\n else if (this.processExited) {\n this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this);\n }\n }\n _debug(message) {\n this.emit('debug', message);\n }\n _setResult() {\n // determine whether there is an error\n let error;\n if (this.processExited) {\n if (this.processError) {\n error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);\n }\n else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {\n error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);\n }\n else if (this.processStderr && this.options.failOnStdErr) {\n error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);\n }\n }\n // clear the timeout\n if (this.timeout) {\n clearTimeout(this.timeout);\n this.timeout = null;\n }\n this.done = true;\n this.emit('done', error, this.processExitCode);\n }\n static HandleTimeout(state) {\n if (state.done) {\n return;\n }\n if (!state.processClosed && state.processExited) {\n const message = `The STDIO streams did not close within ${state.delay /\n 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;\n state._debug(message);\n }\n state._setResult();\n }\n}\n//# sourceMappingURL=toolrunner.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Context = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\nclass Context {\n /**\n * Hydrate the context from the environment\n */\n constructor() {\n this.payload = {};\n if (process.env.GITHUB_EVENT_PATH) {\n if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {\n this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));\n }\n else {\n const path = process.env.GITHUB_EVENT_PATH;\n process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);\n }\n }\n this.eventName = process.env.GITHUB_EVENT_NAME;\n this.sha = process.env.GITHUB_SHA;\n this.ref = process.env.GITHUB_REF;\n this.workflow = process.env.GITHUB_WORKFLOW;\n this.action = process.env.GITHUB_ACTION;\n this.actor = process.env.GITHUB_ACTOR;\n this.job = process.env.GITHUB_JOB;\n this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);\n this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);\n }\n get issue() {\n const payload = this.payload;\n return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });\n }\n get repo() {\n if (process.env.GITHUB_REPOSITORY) {\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');\n return { owner, repo };\n }\n if (this.payload.repository) {\n return {\n owner: this.payload.repository.owner.login,\n repo: this.payload.repository.name\n };\n }\n throw new Error(\"context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'\");\n }\n}\nexports.Context = Context;\n//# sourceMappingURL=context.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokit = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst utils_1 = require(\"./utils\");\nexports.context = new Context.Context();\n/**\n * Returns a hydrated octokit ready to use for GitHub Actions\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokit(token, options) {\n return new utils_1.GitHub(utils_1.getOctokitOptions(token, options));\n}\nexports.getOctokit = getOctokit;\n//# sourceMappingURL=github.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;\nconst httpClient = __importStar(require(\"@actions/http-client\"));\nfunction getAuthString(token, options) {\n if (!token && !options.auth) {\n throw new Error('Parameter token or opts.auth is required');\n }\n else if (token && options.auth) {\n throw new Error('Parameters token and opts.auth may not both be specified');\n }\n return typeof options.auth === 'string' ? options.auth : `token ${token}`;\n}\nexports.getAuthString = getAuthString;\nfunction getProxyAgent(destinationUrl) {\n const hc = new httpClient.HttpClient();\n return hc.getAgent(destinationUrl);\n}\nexports.getProxyAgent = getProxyAgent;\nfunction getApiBaseUrl() {\n return process.env['GITHUB_API_URL'] || 'https://api.github.com';\n}\nexports.getApiBaseUrl = getApiBaseUrl;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokitOptions = exports.GitHub = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst Utils = __importStar(require(\"./internal/utils\"));\n// octokit + plugins\nconst core_1 = require(\"@octokit/core\");\nconst plugin_rest_endpoint_methods_1 = require(\"@octokit/plugin-rest-endpoint-methods\");\nconst plugin_paginate_rest_1 = require(\"@octokit/plugin-paginate-rest\");\nexports.context = new Context.Context();\nconst baseUrl = Utils.getApiBaseUrl();\nconst defaults = {\n baseUrl,\n request: {\n agent: Utils.getProxyAgent(baseUrl)\n }\n};\nexports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults);\n/**\n * Convience function to correctly format Octokit Options to pass into the constructor.\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokitOptions(token, options) {\n const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller\n // Auth\n const auth = Utils.getAuthString(token, opts);\n if (auth) {\n opts.auth = auth;\n }\n return opts;\n}\nexports.getOctokitOptions = getOctokitOptions;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst http = require(\"http\");\nconst https = require(\"https\");\nconst pm = require(\"./proxy\");\nlet tunnel;\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n let proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return new Promise(async (resolve, reject) => {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n let parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n }\n get(requestUrl, additionalHeaders) {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n }\n del(requestUrl, additionalHeaders) {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n }\n post(requestUrl, data, additionalHeaders) {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n }\n patch(requestUrl, data, additionalHeaders) {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n }\n put(requestUrl, data, additionalHeaders) {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n }\n head(requestUrl, additionalHeaders) {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n async getJson(requestUrl, additionalHeaders = {}) {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n let res = await this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async postJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async putJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async patchJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n async request(verb, requestUrl, data, headers) {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n let parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n while (numTries < maxTries) {\n response = await this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (let i = 0; i < this.handlers.length; i++) {\n if (this.handlers[i].canHandleAuthentication(response)) {\n authenticationHandler = this.handlers[i];\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n let parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol == 'https:' &&\n parsedUrl.protocol != parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n await response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (let header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = await this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n await response.readBody();\n await this._performExponentialBackoff(numTries);\n }\n }\n return response;\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return new Promise((resolve, reject) => {\n let callbackForResult = function (err, res) {\n if (err) {\n reject(err);\n }\n resolve(res);\n };\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n let socket;\n if (typeof data === 'string') {\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n let handleResult = (err, res) => {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n };\n let req = info.httpModule.request(info.options, (msg) => {\n let res = new HttpClientResponse(msg);\n handleResult(null, res);\n });\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error('Request timeout: ' + info.options.path), null);\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err, null);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n let parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n this.handlers.forEach(handler => {\n handler.prepareRequest(info.options);\n });\n }\n return info;\n }\n _mergeHeaders(headers) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n let proxyUrl = pm.getProxyUrl(parsedUrl);\n let useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (!!agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (!!this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n if (useProxy) {\n // If using proxy, need tunnel\n if (!tunnel) {\n tunnel = require('tunnel');\n }\n const agentOptions = {\n maxSockets: maxSockets,\n keepAlive: this._keepAlive,\n proxy: {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`,\n host: proxyUrl.hostname,\n port: proxyUrl.port\n }\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n }\n static dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n let a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n async _processResponse(res, options) {\n return new Promise(async (resolve, reject) => {\n const statusCode = res.message.statusCode;\n const response = {\n statusCode: statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode == HttpCodes.NotFound) {\n resolve(response);\n }\n let obj;\n let contents;\n // get the result from the body\n try {\n contents = await res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = 'Failed request: (' + statusCode + ')';\n }\n let err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n });\n }\n}\nexports.HttpClient = HttpClient;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction getProxyUrl(reqUrl) {\n let usingSsl = reqUrl.protocol === 'https:';\n let proxyUrl;\n if (checkBypass(reqUrl)) {\n return proxyUrl;\n }\n let proxyVar;\n if (usingSsl) {\n proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n if (proxyVar) {\n proxyUrl = new URL(proxyVar);\n }\n return proxyUrl;\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n let upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (let upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar _a;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst assert_1 = require(\"assert\");\nconst fs = require(\"fs\");\nconst path = require(\"path\");\n_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;\nexports.IS_WINDOWS = process.platform === 'win32';\nfunction exists(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n yield exports.stat(fsPath);\n }\n catch (err) {\n if (err.code === 'ENOENT') {\n return false;\n }\n throw err;\n }\n return true;\n });\n}\nexports.exists = exists;\nfunction isDirectory(fsPath, useStat = false) {\n return __awaiter(this, void 0, void 0, function* () {\n const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);\n return stats.isDirectory();\n });\n}\nexports.isDirectory = isDirectory;\n/**\n * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:\n * \\, \\hello, \\\\hello\\share, C:, and C:\\hello (and corresponding alternate separator cases).\n */\nfunction isRooted(p) {\n p = normalizeSeparators(p);\n if (!p) {\n throw new Error('isRooted() parameter \"p\" cannot be empty');\n }\n if (exports.IS_WINDOWS) {\n return (p.startsWith('\\\\') || /^[A-Z]:/i.test(p) // e.g. \\ or \\hello or \\\\hello\n ); // e.g. C: or C:\\hello\n }\n return p.startsWith('/');\n}\nexports.isRooted = isRooted;\n/**\n * Recursively create a directory at `fsPath`.\n *\n * This implementation is optimistic, meaning it attempts to create the full\n * path first, and backs up the path stack from there.\n *\n * @param fsPath The path to create\n * @param maxDepth The maximum recursion depth\n * @param depth The current recursion depth\n */\nfunction mkdirP(fsPath, maxDepth = 1000, depth = 1) {\n return __awaiter(this, void 0, void 0, function* () {\n assert_1.ok(fsPath, 'a path argument must be provided');\n fsPath = path.resolve(fsPath);\n if (depth >= maxDepth)\n return exports.mkdir(fsPath);\n try {\n yield exports.mkdir(fsPath);\n return;\n }\n catch (err) {\n switch (err.code) {\n case 'ENOENT': {\n yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1);\n yield exports.mkdir(fsPath);\n return;\n }\n default: {\n let stats;\n try {\n stats = yield exports.stat(fsPath);\n }\n catch (err2) {\n throw err;\n }\n if (!stats.isDirectory())\n throw err;\n }\n }\n }\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Best effort attempt to determine whether a file exists and is executable.\n * @param filePath file path to check\n * @param extensions additional file extensions to try\n * @return if file exists and is executable, returns the file path. otherwise empty string.\n */\nfunction tryGetExecutablePath(filePath, extensions) {\n return __awaiter(this, void 0, void 0, function* () {\n let stats = undefined;\n try {\n // test file exists\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // on Windows, test for valid extension\n const upperExt = path.extname(filePath).toUpperCase();\n if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {\n return filePath;\n }\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n // try each extension\n const originalFilePath = filePath;\n for (const extension of extensions) {\n filePath = originalFilePath + extension;\n stats = undefined;\n try {\n stats = yield exports.stat(filePath);\n }\n catch (err) {\n if (err.code !== 'ENOENT') {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);\n }\n }\n if (stats && stats.isFile()) {\n if (exports.IS_WINDOWS) {\n // preserve the case of the actual file (since an extension was appended)\n try {\n const directory = path.dirname(filePath);\n const upperName = path.basename(filePath).toUpperCase();\n for (const actualName of yield exports.readdir(directory)) {\n if (upperName === actualName.toUpperCase()) {\n filePath = path.join(directory, actualName);\n break;\n }\n }\n }\n catch (err) {\n // eslint-disable-next-line no-console\n console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);\n }\n return filePath;\n }\n else {\n if (isUnixExecutable(stats)) {\n return filePath;\n }\n }\n }\n }\n return '';\n });\n}\nexports.tryGetExecutablePath = tryGetExecutablePath;\nfunction normalizeSeparators(p) {\n p = p || '';\n if (exports.IS_WINDOWS) {\n // convert slashes on Windows\n p = p.replace(/\\//g, '\\\\');\n // remove redundant slashes\n return p.replace(/\\\\\\\\+/g, '\\\\');\n }\n // remove redundant slashes\n return p.replace(/\\/\\/+/g, '/');\n}\n// on Mac/Linux, test the execute bit\n// R W X R W X R W X\n// 256 128 64 32 16 8 4 2 1\nfunction isUnixExecutable(stats) {\n return ((stats.mode & 1) > 0 ||\n ((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||\n ((stats.mode & 64) > 0 && stats.uid === process.getuid()));\n}\n//# sourceMappingURL=io-util.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst childProcess = require(\"child_process\");\nconst path = require(\"path\");\nconst util_1 = require(\"util\");\nconst ioUtil = require(\"./io-util\");\nconst exec = util_1.promisify(childProcess.exec);\n/**\n * Copies a file or folder.\n * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See CopyOptions.\n */\nfunction cp(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const { force, recursive } = readCopyOptions(options);\n const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;\n // Dest is an existing file, but not forcing\n if (destStat && destStat.isFile() && !force) {\n return;\n }\n // If dest is an existing directory, should copy inside.\n const newDest = destStat && destStat.isDirectory()\n ? path.join(dest, path.basename(source))\n : dest;\n if (!(yield ioUtil.exists(source))) {\n throw new Error(`no such file or directory: ${source}`);\n }\n const sourceStat = yield ioUtil.stat(source);\n if (sourceStat.isDirectory()) {\n if (!recursive) {\n throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);\n }\n else {\n yield cpDirRecursive(source, newDest, 0, force);\n }\n }\n else {\n if (path.relative(source, newDest) === '') {\n // a file cannot be copied to itself\n throw new Error(`'${newDest}' and '${source}' are the same file`);\n }\n yield copyFile(source, newDest, force);\n }\n });\n}\nexports.cp = cp;\n/**\n * Moves a path.\n *\n * @param source source path\n * @param dest destination path\n * @param options optional. See MoveOptions.\n */\nfunction mv(source, dest, options = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n if (yield ioUtil.exists(dest)) {\n let destExists = true;\n if (yield ioUtil.isDirectory(dest)) {\n // If dest is directory copy src into dest\n dest = path.join(dest, path.basename(source));\n destExists = yield ioUtil.exists(dest);\n }\n if (destExists) {\n if (options.force == null || options.force) {\n yield rmRF(dest);\n }\n else {\n throw new Error('Destination already exists');\n }\n }\n }\n yield mkdirP(path.dirname(dest));\n yield ioUtil.rename(source, dest);\n });\n}\nexports.mv = mv;\n/**\n * Remove a path recursively with force\n *\n * @param inputPath path to remove\n */\nfunction rmRF(inputPath) {\n return __awaiter(this, void 0, void 0, function* () {\n if (ioUtil.IS_WINDOWS) {\n // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another\n // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.\n try {\n if (yield ioUtil.isDirectory(inputPath, true)) {\n yield exec(`rd /s /q \"${inputPath}\"`);\n }\n else {\n yield exec(`del /f /a \"${inputPath}\"`);\n }\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n }\n // Shelling out fails to remove a symlink folder with missing source, this unlink catches that\n try {\n yield ioUtil.unlink(inputPath);\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n }\n }\n else {\n let isDir = false;\n try {\n isDir = yield ioUtil.isDirectory(inputPath);\n }\n catch (err) {\n // if you try to delete a file that doesn't exist, desired result is achieved\n // other errors are valid\n if (err.code !== 'ENOENT')\n throw err;\n return;\n }\n if (isDir) {\n yield exec(`rm -rf \"${inputPath}\"`);\n }\n else {\n yield ioUtil.unlink(inputPath);\n }\n }\n });\n}\nexports.rmRF = rmRF;\n/**\n * Make a directory. Creates the full path with folders in between\n * Will throw if it fails\n *\n * @param fsPath path to create\n * @returns Promise\n */\nfunction mkdirP(fsPath) {\n return __awaiter(this, void 0, void 0, function* () {\n yield ioUtil.mkdirP(fsPath);\n });\n}\nexports.mkdirP = mkdirP;\n/**\n * Returns path of a tool had the tool actually been invoked. Resolves via paths.\n * If you check and the tool does not exist, it will throw.\n *\n * @param tool name of the tool\n * @param check whether to check if tool exists\n * @returns Promise path to tool\n */\nfunction which(tool, check) {\n return __awaiter(this, void 0, void 0, function* () {\n if (!tool) {\n throw new Error(\"parameter 'tool' is required\");\n }\n // recursive when check=true\n if (check) {\n const result = yield which(tool, false);\n if (!result) {\n if (ioUtil.IS_WINDOWS) {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);\n }\n else {\n throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);\n }\n }\n }\n try {\n // build the list of extensions to try\n const extensions = [];\n if (ioUtil.IS_WINDOWS && process.env.PATHEXT) {\n for (const extension of process.env.PATHEXT.split(path.delimiter)) {\n if (extension) {\n extensions.push(extension);\n }\n }\n }\n // if it's rooted, return it if exists. otherwise return empty.\n if (ioUtil.isRooted(tool)) {\n const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);\n if (filePath) {\n return filePath;\n }\n return '';\n }\n // if any path separators, return empty\n if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\\\'))) {\n return '';\n }\n // build the list of directories\n //\n // Note, technically \"where\" checks the current directory on Windows. From a toolkit perspective,\n // it feels like we should not do this. Checking the current directory seems like more of a use\n // case of a shell, and the which() function exposed by the toolkit should strive for consistency\n // across platforms.\n const directories = [];\n if (process.env.PATH) {\n for (const p of process.env.PATH.split(path.delimiter)) {\n if (p) {\n directories.push(p);\n }\n }\n }\n // return the first match\n for (const directory of directories) {\n const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions);\n if (filePath) {\n return filePath;\n }\n }\n return '';\n }\n catch (err) {\n throw new Error(`which failed with message ${err.message}`);\n }\n });\n}\nexports.which = which;\nfunction readCopyOptions(options) {\n const force = options.force == null ? true : options.force;\n const recursive = Boolean(options.recursive);\n return { force, recursive };\n}\nfunction cpDirRecursive(sourceDir, destDir, currentDepth, force) {\n return __awaiter(this, void 0, void 0, function* () {\n // Ensure there is not a run away recursive copy\n if (currentDepth >= 255)\n return;\n currentDepth++;\n yield mkdirP(destDir);\n const files = yield ioUtil.readdir(sourceDir);\n for (const fileName of files) {\n const srcFile = `${sourceDir}/${fileName}`;\n const destFile = `${destDir}/${fileName}`;\n const srcFileStat = yield ioUtil.lstat(srcFile);\n if (srcFileStat.isDirectory()) {\n // Recurse\n yield cpDirRecursive(srcFile, destFile, currentDepth, force);\n }\n else {\n yield copyFile(srcFile, destFile, force);\n }\n }\n // Change the mode for the newly created directory\n yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode);\n });\n}\n// Buffered file copy\nfunction copyFile(srcFile, destFile, force) {\n return __awaiter(this, void 0, void 0, function* () {\n if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) {\n // unlink/re-link it\n try {\n yield ioUtil.lstat(destFile);\n yield ioUtil.unlink(destFile);\n }\n catch (e) {\n // Try to override file permission\n if (e.code === 'EPERM') {\n yield ioUtil.chmod(destFile, '0666');\n yield ioUtil.unlink(destFile);\n }\n // other errors = it doesn't exist, no work to do\n }\n // Copy over symlink\n const symlinkFull = yield ioUtil.readlink(srcFile);\n yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null);\n }\n else if (!(yield ioUtil.exists(destFile)) || force) {\n yield ioUtil.copyFile(srcFile, destFile);\n }\n });\n}\n//# sourceMappingURL=io.js.map","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;\r\nconst fs = require(\"fs\");\r\nexports.FILE_SYSTEM_ADAPTER = {\r\n lstat: fs.lstat,\r\n stat: fs.stat,\r\n lstatSync: fs.lstatSync,\r\n statSync: fs.statSync,\r\n readdir: fs.readdir,\r\n readdirSync: fs.readdirSync\r\n};\r\nfunction createFileSystemAdapter(fsMethods) {\r\n if (fsMethods === undefined) {\r\n return exports.FILE_SYSTEM_ADAPTER;\r\n }\r\n return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);\r\n}\r\nexports.createFileSystemAdapter = createFileSystemAdapter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0;\r\nconst NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.');\r\nconst MAJOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[0], 10);\r\nconst MINOR_VERSION = parseInt(NODE_PROCESS_VERSION_PARTS[1], 10);\r\nconst SUPPORTED_MAJOR_VERSION = 10;\r\nconst SUPPORTED_MINOR_VERSION = 10;\r\nconst IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION;\r\nconst IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION;\r\n/**\r\n * IS `true` for Node.js 10.10 and greater.\r\n */\r\nexports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.Settings = exports.scandirSync = exports.scandir = void 0;\r\nconst async = require(\"./providers/async\");\r\nconst sync = require(\"./providers/sync\");\r\nconst settings_1 = require(\"./settings\");\r\nexports.Settings = settings_1.default;\r\nfunction scandir(path, optionsOrSettingsOrCallback, callback) {\r\n if (typeof optionsOrSettingsOrCallback === 'function') {\r\n return async.read(path, getSettings(), optionsOrSettingsOrCallback);\r\n }\r\n async.read(path, getSettings(optionsOrSettingsOrCallback), callback);\r\n}\r\nexports.scandir = scandir;\r\nfunction scandirSync(path, optionsOrSettings) {\r\n const settings = getSettings(optionsOrSettings);\r\n return sync.read(path, settings);\r\n}\r\nexports.scandirSync = scandirSync;\r\nfunction getSettings(settingsOrOptions = {}) {\r\n if (settingsOrOptions instanceof settings_1.default) {\r\n return settingsOrOptions;\r\n }\r\n return new settings_1.default(settingsOrOptions);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.readdir = exports.readdirWithFileTypes = exports.read = void 0;\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst rpl = require(\"run-parallel\");\r\nconst constants_1 = require(\"../constants\");\r\nconst utils = require(\"../utils\");\r\nconst common = require(\"./common\");\r\nfunction read(directory, settings, callback) {\r\n if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {\r\n return readdirWithFileTypes(directory, settings, callback);\r\n }\r\n return readdir(directory, settings, callback);\r\n}\r\nexports.read = read;\r\nfunction readdirWithFileTypes(directory, settings, callback) {\r\n settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => {\r\n if (readdirError !== null) {\r\n return callFailureCallback(callback, readdirError);\r\n }\r\n const entries = dirents.map((dirent) => ({\r\n dirent,\r\n name: dirent.name,\r\n path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)\r\n }));\r\n if (!settings.followSymbolicLinks) {\r\n return callSuccessCallback(callback, entries);\r\n }\r\n const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings));\r\n rpl(tasks, (rplError, rplEntries) => {\r\n if (rplError !== null) {\r\n return callFailureCallback(callback, rplError);\r\n }\r\n callSuccessCallback(callback, rplEntries);\r\n });\r\n });\r\n}\r\nexports.readdirWithFileTypes = readdirWithFileTypes;\r\nfunction makeRplTaskEntry(entry, settings) {\r\n return (done) => {\r\n if (!entry.dirent.isSymbolicLink()) {\r\n return done(null, entry);\r\n }\r\n settings.fs.stat(entry.path, (statError, stats) => {\r\n if (statError !== null) {\r\n if (settings.throwErrorOnBrokenSymbolicLink) {\r\n return done(statError);\r\n }\r\n return done(null, entry);\r\n }\r\n entry.dirent = utils.fs.createDirentFromStats(entry.name, stats);\r\n return done(null, entry);\r\n });\r\n };\r\n}\r\nfunction readdir(directory, settings, callback) {\r\n settings.fs.readdir(directory, (readdirError, names) => {\r\n if (readdirError !== null) {\r\n return callFailureCallback(callback, readdirError);\r\n }\r\n const filepaths = names.map((name) => common.joinPathSegments(directory, name, settings.pathSegmentSeparator));\r\n const tasks = filepaths.map((filepath) => {\r\n return (done) => fsStat.stat(filepath, settings.fsStatSettings, done);\r\n });\r\n rpl(tasks, (rplError, results) => {\r\n if (rplError !== null) {\r\n return callFailureCallback(callback, rplError);\r\n }\r\n const entries = [];\r\n names.forEach((name, index) => {\r\n const stats = results[index];\r\n const entry = {\r\n name,\r\n path: filepaths[index],\r\n dirent: utils.fs.createDirentFromStats(name, stats)\r\n };\r\n if (settings.stats) {\r\n entry.stats = stats;\r\n }\r\n entries.push(entry);\r\n });\r\n callSuccessCallback(callback, entries);\r\n });\r\n });\r\n}\r\nexports.readdir = readdir;\r\nfunction callFailureCallback(callback, error) {\r\n callback(error);\r\n}\r\nfunction callSuccessCallback(callback, result) {\r\n callback(null, result);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.joinPathSegments = void 0;\r\nfunction joinPathSegments(a, b, separator) {\r\n /**\r\n * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).\r\n */\r\n if (a.endsWith(separator)) {\r\n return a + b;\r\n }\r\n return a + separator + b;\r\n}\r\nexports.joinPathSegments = joinPathSegments;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.readdir = exports.readdirWithFileTypes = exports.read = void 0;\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst constants_1 = require(\"../constants\");\r\nconst utils = require(\"../utils\");\r\nconst common = require(\"./common\");\r\nfunction read(directory, settings) {\r\n if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {\r\n return readdirWithFileTypes(directory, settings);\r\n }\r\n return readdir(directory, settings);\r\n}\r\nexports.read = read;\r\nfunction readdirWithFileTypes(directory, settings) {\r\n const dirents = settings.fs.readdirSync(directory, { withFileTypes: true });\r\n return dirents.map((dirent) => {\r\n const entry = {\r\n dirent,\r\n name: dirent.name,\r\n path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)\r\n };\r\n if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) {\r\n try {\r\n const stats = settings.fs.statSync(entry.path);\r\n entry.dirent = utils.fs.createDirentFromStats(entry.name, stats);\r\n }\r\n catch (error) {\r\n if (settings.throwErrorOnBrokenSymbolicLink) {\r\n throw error;\r\n }\r\n }\r\n }\r\n return entry;\r\n });\r\n}\r\nexports.readdirWithFileTypes = readdirWithFileTypes;\r\nfunction readdir(directory, settings) {\r\n const names = settings.fs.readdirSync(directory);\r\n return names.map((name) => {\r\n const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator);\r\n const stats = fsStat.statSync(entryPath, settings.fsStatSettings);\r\n const entry = {\r\n name,\r\n path: entryPath,\r\n dirent: utils.fs.createDirentFromStats(name, stats)\r\n };\r\n if (settings.stats) {\r\n entry.stats = stats;\r\n }\r\n return entry;\r\n });\r\n}\r\nexports.readdir = readdir;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst path = require(\"path\");\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst fs = require(\"./adapters/fs\");\r\nclass Settings {\r\n constructor(_options = {}) {\r\n this._options = _options;\r\n this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false);\r\n this.fs = fs.createFileSystemAdapter(this._options.fs);\r\n this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep);\r\n this.stats = this._getValue(this._options.stats, false);\r\n this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);\r\n this.fsStatSettings = new fsStat.Settings({\r\n followSymbolicLink: this.followSymbolicLinks,\r\n fs: this.fs,\r\n throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink\r\n });\r\n }\r\n _getValue(option, value) {\r\n return option !== null && option !== void 0 ? option : value;\r\n }\r\n}\r\nexports.default = Settings;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.createDirentFromStats = void 0;\r\nclass DirentFromStats {\r\n constructor(name, stats) {\r\n this.name = name;\r\n this.isBlockDevice = stats.isBlockDevice.bind(stats);\r\n this.isCharacterDevice = stats.isCharacterDevice.bind(stats);\r\n this.isDirectory = stats.isDirectory.bind(stats);\r\n this.isFIFO = stats.isFIFO.bind(stats);\r\n this.isFile = stats.isFile.bind(stats);\r\n this.isSocket = stats.isSocket.bind(stats);\r\n this.isSymbolicLink = stats.isSymbolicLink.bind(stats);\r\n }\r\n}\r\nfunction createDirentFromStats(name, stats) {\r\n return new DirentFromStats(name, stats);\r\n}\r\nexports.createDirentFromStats = createDirentFromStats;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.fs = void 0;\r\nconst fs = require(\"./fs\");\r\nexports.fs = fs;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;\r\nconst fs = require(\"fs\");\r\nexports.FILE_SYSTEM_ADAPTER = {\r\n lstat: fs.lstat,\r\n stat: fs.stat,\r\n lstatSync: fs.lstatSync,\r\n statSync: fs.statSync\r\n};\r\nfunction createFileSystemAdapter(fsMethods) {\r\n if (fsMethods === undefined) {\r\n return exports.FILE_SYSTEM_ADAPTER;\r\n }\r\n return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);\r\n}\r\nexports.createFileSystemAdapter = createFileSystemAdapter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.statSync = exports.stat = exports.Settings = void 0;\r\nconst async = require(\"./providers/async\");\r\nconst sync = require(\"./providers/sync\");\r\nconst settings_1 = require(\"./settings\");\r\nexports.Settings = settings_1.default;\r\nfunction stat(path, optionsOrSettingsOrCallback, callback) {\r\n if (typeof optionsOrSettingsOrCallback === 'function') {\r\n return async.read(path, getSettings(), optionsOrSettingsOrCallback);\r\n }\r\n async.read(path, getSettings(optionsOrSettingsOrCallback), callback);\r\n}\r\nexports.stat = stat;\r\nfunction statSync(path, optionsOrSettings) {\r\n const settings = getSettings(optionsOrSettings);\r\n return sync.read(path, settings);\r\n}\r\nexports.statSync = statSync;\r\nfunction getSettings(settingsOrOptions = {}) {\r\n if (settingsOrOptions instanceof settings_1.default) {\r\n return settingsOrOptions;\r\n }\r\n return new settings_1.default(settingsOrOptions);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.read = void 0;\r\nfunction read(path, settings, callback) {\r\n settings.fs.lstat(path, (lstatError, lstat) => {\r\n if (lstatError !== null) {\r\n return callFailureCallback(callback, lstatError);\r\n }\r\n if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {\r\n return callSuccessCallback(callback, lstat);\r\n }\r\n settings.fs.stat(path, (statError, stat) => {\r\n if (statError !== null) {\r\n if (settings.throwErrorOnBrokenSymbolicLink) {\r\n return callFailureCallback(callback, statError);\r\n }\r\n return callSuccessCallback(callback, lstat);\r\n }\r\n if (settings.markSymbolicLink) {\r\n stat.isSymbolicLink = () => true;\r\n }\r\n callSuccessCallback(callback, stat);\r\n });\r\n });\r\n}\r\nexports.read = read;\r\nfunction callFailureCallback(callback, error) {\r\n callback(error);\r\n}\r\nfunction callSuccessCallback(callback, result) {\r\n callback(null, result);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.read = void 0;\r\nfunction read(path, settings) {\r\n const lstat = settings.fs.lstatSync(path);\r\n if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {\r\n return lstat;\r\n }\r\n try {\r\n const stat = settings.fs.statSync(path);\r\n if (settings.markSymbolicLink) {\r\n stat.isSymbolicLink = () => true;\r\n }\r\n return stat;\r\n }\r\n catch (error) {\r\n if (!settings.throwErrorOnBrokenSymbolicLink) {\r\n return lstat;\r\n }\r\n throw error;\r\n }\r\n}\r\nexports.read = read;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst fs = require(\"./adapters/fs\");\r\nclass Settings {\r\n constructor(_options = {}) {\r\n this._options = _options;\r\n this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true);\r\n this.fs = fs.createFileSystemAdapter(this._options.fs);\r\n this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false);\r\n this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);\r\n }\r\n _getValue(option, value) {\r\n return option !== null && option !== void 0 ? option : value;\r\n }\r\n}\r\nexports.default = Settings;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.Settings = exports.walkStream = exports.walkSync = exports.walk = void 0;\r\nconst async_1 = require(\"./providers/async\");\r\nconst stream_1 = require(\"./providers/stream\");\r\nconst sync_1 = require(\"./providers/sync\");\r\nconst settings_1 = require(\"./settings\");\r\nexports.Settings = settings_1.default;\r\nfunction walk(directory, optionsOrSettingsOrCallback, callback) {\r\n if (typeof optionsOrSettingsOrCallback === 'function') {\r\n return new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback);\r\n }\r\n new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback);\r\n}\r\nexports.walk = walk;\r\nfunction walkSync(directory, optionsOrSettings) {\r\n const settings = getSettings(optionsOrSettings);\r\n const provider = new sync_1.default(directory, settings);\r\n return provider.read();\r\n}\r\nexports.walkSync = walkSync;\r\nfunction walkStream(directory, optionsOrSettings) {\r\n const settings = getSettings(optionsOrSettings);\r\n const provider = new stream_1.default(directory, settings);\r\n return provider.read();\r\n}\r\nexports.walkStream = walkStream;\r\nfunction getSettings(settingsOrOptions = {}) {\r\n if (settingsOrOptions instanceof settings_1.default) {\r\n return settingsOrOptions;\r\n }\r\n return new settings_1.default(settingsOrOptions);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst async_1 = require(\"../readers/async\");\r\nclass AsyncProvider {\r\n constructor(_root, _settings) {\r\n this._root = _root;\r\n this._settings = _settings;\r\n this._reader = new async_1.default(this._root, this._settings);\r\n this._storage = new Set();\r\n }\r\n read(callback) {\r\n this._reader.onError((error) => {\r\n callFailureCallback(callback, error);\r\n });\r\n this._reader.onEntry((entry) => {\r\n this._storage.add(entry);\r\n });\r\n this._reader.onEnd(() => {\r\n callSuccessCallback(callback, [...this._storage]);\r\n });\r\n this._reader.read();\r\n }\r\n}\r\nexports.default = AsyncProvider;\r\nfunction callFailureCallback(callback, error) {\r\n callback(error);\r\n}\r\nfunction callSuccessCallback(callback, entries) {\r\n callback(null, entries);\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst stream_1 = require(\"stream\");\r\nconst async_1 = require(\"../readers/async\");\r\nclass StreamProvider {\r\n constructor(_root, _settings) {\r\n this._root = _root;\r\n this._settings = _settings;\r\n this._reader = new async_1.default(this._root, this._settings);\r\n this._stream = new stream_1.Readable({\r\n objectMode: true,\r\n read: () => { },\r\n destroy: () => {\r\n if (!this._reader.isDestroyed) {\r\n this._reader.destroy();\r\n }\r\n }\r\n });\r\n }\r\n read() {\r\n this._reader.onError((error) => {\r\n this._stream.emit('error', error);\r\n });\r\n this._reader.onEntry((entry) => {\r\n this._stream.push(entry);\r\n });\r\n this._reader.onEnd(() => {\r\n this._stream.push(null);\r\n });\r\n this._reader.read();\r\n return this._stream;\r\n }\r\n}\r\nexports.default = StreamProvider;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst sync_1 = require(\"../readers/sync\");\r\nclass SyncProvider {\r\n constructor(_root, _settings) {\r\n this._root = _root;\r\n this._settings = _settings;\r\n this._reader = new sync_1.default(this._root, this._settings);\r\n }\r\n read() {\r\n return this._reader.read();\r\n }\r\n}\r\nexports.default = SyncProvider;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst events_1 = require(\"events\");\r\nconst fsScandir = require(\"@nodelib/fs.scandir\");\r\nconst fastq = require(\"fastq\");\r\nconst common = require(\"./common\");\r\nconst reader_1 = require(\"./reader\");\r\nclass AsyncReader extends reader_1.default {\r\n constructor(_root, _settings) {\r\n super(_root, _settings);\r\n this._settings = _settings;\r\n this._scandir = fsScandir.scandir;\r\n this._emitter = new events_1.EventEmitter();\r\n this._queue = fastq(this._worker.bind(this), this._settings.concurrency);\r\n this._isFatalError = false;\r\n this._isDestroyed = false;\r\n this._queue.drain = () => {\r\n if (!this._isFatalError) {\r\n this._emitter.emit('end');\r\n }\r\n };\r\n }\r\n read() {\r\n this._isFatalError = false;\r\n this._isDestroyed = false;\r\n setImmediate(() => {\r\n this._pushToQueue(this._root, this._settings.basePath);\r\n });\r\n return this._emitter;\r\n }\r\n get isDestroyed() {\r\n return this._isDestroyed;\r\n }\r\n destroy() {\r\n if (this._isDestroyed) {\r\n throw new Error('The reader is already destroyed');\r\n }\r\n this._isDestroyed = true;\r\n this._queue.killAndDrain();\r\n }\r\n onEntry(callback) {\r\n this._emitter.on('entry', callback);\r\n }\r\n onError(callback) {\r\n this._emitter.once('error', callback);\r\n }\r\n onEnd(callback) {\r\n this._emitter.once('end', callback);\r\n }\r\n _pushToQueue(directory, base) {\r\n const queueItem = { directory, base };\r\n this._queue.push(queueItem, (error) => {\r\n if (error !== null) {\r\n this._handleError(error);\r\n }\r\n });\r\n }\r\n _worker(item, done) {\r\n this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => {\r\n if (error !== null) {\r\n return done(error, undefined);\r\n }\r\n for (const entry of entries) {\r\n this._handleEntry(entry, item.base);\r\n }\r\n done(null, undefined);\r\n });\r\n }\r\n _handleError(error) {\r\n if (this._isDestroyed || !common.isFatalError(this._settings, error)) {\r\n return;\r\n }\r\n this._isFatalError = true;\r\n this._isDestroyed = true;\r\n this._emitter.emit('error', error);\r\n }\r\n _handleEntry(entry, base) {\r\n if (this._isDestroyed || this._isFatalError) {\r\n return;\r\n }\r\n const fullpath = entry.path;\r\n if (base !== undefined) {\r\n entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);\r\n }\r\n if (common.isAppliedFilter(this._settings.entryFilter, entry)) {\r\n this._emitEntry(entry);\r\n }\r\n if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) {\r\n this._pushToQueue(fullpath, entry.path);\r\n }\r\n }\r\n _emitEntry(entry) {\r\n this._emitter.emit('entry', entry);\r\n }\r\n}\r\nexports.default = AsyncReader;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.joinPathSegments = exports.replacePathSegmentSeparator = exports.isAppliedFilter = exports.isFatalError = void 0;\r\nfunction isFatalError(settings, error) {\r\n if (settings.errorFilter === null) {\r\n return true;\r\n }\r\n return !settings.errorFilter(error);\r\n}\r\nexports.isFatalError = isFatalError;\r\nfunction isAppliedFilter(filter, value) {\r\n return filter === null || filter(value);\r\n}\r\nexports.isAppliedFilter = isAppliedFilter;\r\nfunction replacePathSegmentSeparator(filepath, separator) {\r\n return filepath.split(/[/\\\\]/).join(separator);\r\n}\r\nexports.replacePathSegmentSeparator = replacePathSegmentSeparator;\r\nfunction joinPathSegments(a, b, separator) {\r\n if (a === '') {\r\n return b;\r\n }\r\n /**\r\n * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).\r\n */\r\n if (a.endsWith(separator)) {\r\n return a + b;\r\n }\r\n return a + separator + b;\r\n}\r\nexports.joinPathSegments = joinPathSegments;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst common = require(\"./common\");\r\nclass Reader {\r\n constructor(_root, _settings) {\r\n this._root = _root;\r\n this._settings = _settings;\r\n this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator);\r\n }\r\n}\r\nexports.default = Reader;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst fsScandir = require(\"@nodelib/fs.scandir\");\r\nconst common = require(\"./common\");\r\nconst reader_1 = require(\"./reader\");\r\nclass SyncReader extends reader_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._scandir = fsScandir.scandirSync;\r\n this._storage = new Set();\r\n this._queue = new Set();\r\n }\r\n read() {\r\n this._pushToQueue(this._root, this._settings.basePath);\r\n this._handleQueue();\r\n return [...this._storage];\r\n }\r\n _pushToQueue(directory, base) {\r\n this._queue.add({ directory, base });\r\n }\r\n _handleQueue() {\r\n for (const item of this._queue.values()) {\r\n this._handleDirectory(item.directory, item.base);\r\n }\r\n }\r\n _handleDirectory(directory, base) {\r\n try {\r\n const entries = this._scandir(directory, this._settings.fsScandirSettings);\r\n for (const entry of entries) {\r\n this._handleEntry(entry, base);\r\n }\r\n }\r\n catch (error) {\r\n this._handleError(error);\r\n }\r\n }\r\n _handleError(error) {\r\n if (!common.isFatalError(this._settings, error)) {\r\n return;\r\n }\r\n throw error;\r\n }\r\n _handleEntry(entry, base) {\r\n const fullpath = entry.path;\r\n if (base !== undefined) {\r\n entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);\r\n }\r\n if (common.isAppliedFilter(this._settings.entryFilter, entry)) {\r\n this._pushToStorage(entry);\r\n }\r\n if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) {\r\n this._pushToQueue(fullpath, entry.path);\r\n }\r\n }\r\n _pushToStorage(entry) {\r\n this._storage.add(entry);\r\n }\r\n}\r\nexports.default = SyncReader;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst path = require(\"path\");\r\nconst fsScandir = require(\"@nodelib/fs.scandir\");\r\nclass Settings {\r\n constructor(_options = {}) {\r\n this._options = _options;\r\n this.basePath = this._getValue(this._options.basePath, undefined);\r\n this.concurrency = this._getValue(this._options.concurrency, Infinity);\r\n this.deepFilter = this._getValue(this._options.deepFilter, null);\r\n this.entryFilter = this._getValue(this._options.entryFilter, null);\r\n this.errorFilter = this._getValue(this._options.errorFilter, null);\r\n this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep);\r\n this.fsScandirSettings = new fsScandir.Settings({\r\n followSymbolicLinks: this._options.followSymbolicLinks,\r\n fs: this._options.fs,\r\n pathSegmentSeparator: this._options.pathSegmentSeparator,\r\n stats: this._options.stats,\r\n throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink\r\n });\r\n }\r\n _getValue(option, value) {\r\n return option !== null && option !== void 0 ? option : value;\r\n }\r\n}\r\nexports.default = Settings;\r\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nasync function auth(token) {\n const tokenType = token.split(/\\./).length === 3 ? \"app\" : /^v\\d+\\./.test(token) ? \"installation\" : \"oauth\";\n return {\n type: \"token\",\n token: token,\n tokenType\n };\n}\n\n/**\n * Prefix token for usage in the Authorization header\n *\n * @param token OAuth token or JSON Web Token\n */\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n\n return `token ${token}`;\n}\n\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\nconst createTokenAuth = function createTokenAuth(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n\n if (typeof token !== \"string\") {\n throw new Error(\"[@octokit/auth-token] Token passed to createTokenAuth is not a string\");\n }\n\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n\nexports.createTokenAuth = createTokenAuth;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar universalUserAgent = require('universal-user-agent');\nvar beforeAfterHook = require('before-after-hook');\nvar request = require('@octokit/request');\nvar graphql = require('@octokit/graphql');\nvar authToken = require('@octokit/auth-token');\n\nfunction _objectWithoutPropertiesLoose(source, excluded) {\n if (source == null) return {};\n var target = {};\n var sourceKeys = Object.keys(source);\n var key, i;\n\n for (i = 0; i < sourceKeys.length; i++) {\n key = sourceKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n target[key] = source[key];\n }\n\n return target;\n}\n\nfunction _objectWithoutProperties(source, excluded) {\n if (source == null) return {};\n\n var target = _objectWithoutPropertiesLoose(source, excluded);\n\n var key, i;\n\n if (Object.getOwnPropertySymbols) {\n var sourceSymbolKeys = Object.getOwnPropertySymbols(source);\n\n for (i = 0; i < sourceSymbolKeys.length; i++) {\n key = sourceSymbolKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;\n target[key] = source[key];\n }\n }\n\n return target;\n}\n\nconst VERSION = \"3.2.1\";\n\nclass Octokit {\n constructor(options = {}) {\n const hook = new beforeAfterHook.Collection();\n const requestDefaults = {\n baseUrl: request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n }; // prepend default user agent with `options.userAgent` if set\n\n requestDefaults.headers[\"user-agent\"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(\" \");\n\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n\n this.request = request.request.defaults(requestDefaults);\n this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => {},\n info: () => {},\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n }, options.log);\n this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n // (2)\n const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const {\n authStrategy\n } = options,\n otherOptions = _objectWithoutProperties(options, [\"authStrategy\"]);\n\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n }, options.auth)); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n } // apply plugins\n // https://stackoverflow.com/a/16345172\n\n\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach(plugin => {\n Object.assign(this, plugin(this, options));\n });\n }\n\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null));\n }\n\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n\n\n static plugin(...newPlugins) {\n var _a;\n\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);\n return NewOctokit;\n }\n\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n\nexports.Octokit = Octokit;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar isPlainObject = require('is-plain-object');\nvar universalUserAgent = require('universal-user-agent');\n\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach(key => {\n if (isPlainObject.isPlainObject(options[key])) {\n if (!(key in defaults)) Object.assign(result, {\n [key]: options[key]\n });else result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, {\n [key]: options[key]\n });\n }\n });\n return result;\n}\n\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n\n return obj;\n}\n\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? {\n method,\n url\n } : {\n url: method\n }, options);\n } else {\n options = Object.assign({}, route);\n } // lowercase header names before merging with defaults to avoid duplicates\n\n\n options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging\n\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten\n\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);\n }\n\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n\n if (names.length === 0) {\n return url;\n }\n\n return url + separator + names.map(name => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\nconst urlVariableRegex = /\\{[^}]+\\}/g;\n\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\n\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n\n if (!matches) {\n return [];\n }\n\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\nfunction omit(object, keysToOmit) {\n return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n\n// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n\n return part;\n }).join(\"\");\n}\n\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\n\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\n\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\n\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\n\nfunction getValues(context, operator, key, modifier) {\n var value = context[key],\n result = [];\n\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n\n return result;\n}\n\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\n\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n\n if (operator && operator !== \"+\") {\n var separator = \",\";\n\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n });\n}\n\nfunction parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible\n\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"mediaType\"]); // extract variable names from URL to calculate remaining variables later\n\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n\n const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(\",\");\n }\n\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n } // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n\n\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n } else {\n headers[\"content-length\"] = 0;\n }\n }\n } // default content-type for JSON if body is set\n\n\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n\n\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n } // Only return body/request keys if present\n\n\n return Object.assign({\n method,\n url,\n headers\n }, typeof body !== \"undefined\" ? {\n body\n } : null, options.request ? {\n request: options.request\n } : null);\n}\n\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse\n });\n}\n\nconst VERSION = \"6.0.9\";\n\nconst userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\n\nconst DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\",\n previews: []\n }\n};\n\nconst endpoint = withDefaults(null, DEFAULTS);\n\nexports.endpoint = endpoint;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar request = require('@octokit/request');\nvar universalUserAgent = require('universal-user-agent');\n\nconst VERSION = \"4.5.7\";\n\nclass GraphqlError extends Error {\n constructor(request, response) {\n const message = response.data.errors[0].message;\n super(message);\n Object.assign(this, response.data);\n Object.assign(this, {\n headers: response.headers\n });\n this.name = \"GraphqlError\";\n this.request = request; // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n\n}\n\nconst NON_VARIABLE_OPTIONS = [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"query\", \"mediaType\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request, query, options) {\n if (typeof query === \"string\" && options && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n\n const parsedOptions = typeof query === \"string\" ? Object.assign({\n query\n }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n\n if (!result.variables) {\n result.variables = {};\n }\n\n result.variables[key] = parsedOptions[key];\n return result;\n }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n\n return request(requestOptions).then(response => {\n if (response.data.errors) {\n const headers = {};\n\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n\n throw new GraphqlError(requestOptions, {\n headers,\n data: response.data\n });\n }\n\n return response.data.data;\n });\n}\n\nfunction withDefaults(request$1, newDefaults) {\n const newRequest = request$1.defaults(newDefaults);\n\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: request.request.endpoint\n });\n}\n\nconst graphql$1 = withDefaults(request.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n\nexports.graphql = graphql$1;\nexports.withCustomRequest = withCustomRequest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"2.6.0\";\n\n/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nfunction normalizePaginatedListResponse(response) {\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n\n response.data.total_count = totalCount;\n return response;\n}\n\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url) return {\n done: true\n };\n const response = await requestMethod({\n method,\n url,\n headers\n });\n const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return {\n value: normalizedResponse\n };\n }\n\n })\n };\n}\n\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\n\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then(result => {\n if (result.done) {\n return results;\n }\n\n let earlyExit = false;\n\n function done() {\n earlyExit = true;\n }\n\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n\n if (earlyExit) {\n return results;\n }\n\n return gather(octokit, results, iterator, mapFn);\n });\n}\n\nconst composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\n\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n\nexports.composePaginateRest = composePaginateRest;\nexports.paginateRest = paginateRest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst Endpoints = {\n actions: {\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n cancelWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n createRegistrationTokenForOrg: [\"POST /orgs/{org}/actions/runners/registration-token\"],\n createRegistrationTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/registration-token\"],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/remove-token\"],\n createWorkflowDispatch: [\"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"],\n deleteArtifact: [\"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n deleteSelfHostedRunnerFromOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}\"],\n deleteSelfHostedRunnerFromRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n downloadArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"],\n downloadJobLogsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"],\n downloadWorkflowRunLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunUsage: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"],\n getWorkflowUsage: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listJobsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/downloads\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"],\n listWorkflowRuns: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\"DELETE /notifications/threads/{thread_id}/subscription\"],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\"GET /notifications/threads/{thread_id}/subscription\"],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\"GET /users/{username}/events/orgs/{org}\"],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\"GET /users/{username}/received_events/public\"],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/notifications\"],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\"PUT /notifications/threads/{thread_id}/subscription\"],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\"],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createContentAttachment: [\"POST /content_references/{content_reference_id}/attachments\", {\n mediaType: {\n previews: [\"corsair\"]\n }\n }],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\"POST /app/installations/{installation_id}/access_tokens\"],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\"GET /marketplace_listing/accounts/{account_id}\"],\n getSubscriptionPlanForAccountStubbed: [\"GET /marketplace_listing/stubbed/accounts/{account_id}\"],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"],\n listInstallationReposForAuthenticatedUser: [\"GET /user/installations/{installation_id}/repositories\"],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\"GET /user/marketplace_purchases/stubbed\"],\n removeRepoFromInstallation: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\"DELETE /app/installations/{installation_id}/suspended\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\"GET /users/{username}/settings/billing/actions\"],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\"GET /users/{username}/settings/billing/packages\"],\n getSharedStorageBillingOrg: [\"GET /orgs/{org}/settings/billing/shared-storage\"],\n getSharedStorageBillingUser: [\"GET /users/{username}/settings/billing/shared-storage\"]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listAnnotations: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listForSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n rerequestSuite: [\"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n setSuitesPreferences: [\"PATCH /repos/{owner}/{repo}/check-suites/preferences\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\", {\n mediaType: {\n previews: [\"antiope\"]\n }\n }]\n },\n codeScanning: {\n getAlert: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\", {}, {\n renamedParameters: {\n alert_id: \"alert_number\"\n }\n }],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }],\n getConductCode: [\"GET /codes_of_conduct/{key}\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }],\n getForRepo: [\"GET /repos/{owner}/{repo}/community/code_of_conduct\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }]\n },\n emojis: {\n get: [\"GET /emojis\"]\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n removeRestrictionsForRepo: [\"DELETE /repos/{owner}/{repo}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\", {\n mediaType: {\n previews: [\"sombra\"]\n }\n }]\n },\n issues: {\n addAssignees: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\", {\n mediaType: {\n previews: [\"mockingbird\"]\n }\n }],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n removeAssignees: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n removeLabel: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\"POST /markdown/raw\", {\n headers: {\n \"content-type\": \"text/plain; charset=utf-8\"\n }\n }]\n },\n meta: {\n get: [\"GET /meta\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n deleteArchiveForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n downloadArchiveForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getArchiveForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listForAuthenticatedUser: [\"GET /user/migrations\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listForOrg: [\"GET /orgs/{org}/migrations\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listReposForUser: [\"GET /user/migrations/{migration_id}/repositories\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n unlockRepoForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\"PUT /orgs/{org}/outside_collaborators/{username}\"],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\"DELETE /orgs/{org}/outside_collaborators/{username}\"],\n removePublicMembershipForAuthenticatedUser: [\"DELETE /orgs/{org}/public_members/{username}\"],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\"PUT /orgs/{org}/public_members/{username}\"],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\"PATCH /user/memberships/orgs/{org}\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createCard: [\"POST /projects/columns/{column_id}/cards\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createColumn: [\"POST /projects/{project_id}/columns\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForAuthenticatedUser: [\"POST /user/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForOrg: [\"POST /orgs/{org}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n delete: [\"DELETE /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n get: [\"GET /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getCard: [\"GET /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getColumn: [\"GET /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getPermissionForUser: [\"GET /projects/{project_id}/collaborators/{username}/permission\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listCards: [\"GET /projects/columns/{column_id}/cards\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listColumns: [\"GET /projects/{project_id}/columns\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForOrg: [\"GET /orgs/{org}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForUser: [\"GET /users/{username}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n removeCollaborator: [\"DELETE /projects/{project_id}/collaborators/{username}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n update: [\"PATCH /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n updateColumn: [\"PATCH /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n deletePendingReview: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n deleteReviewComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n dismissReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n listReviewComments: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n requestReviewers: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n submitReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\", {\n mediaType: {\n previews: [\"lydian\"]\n }\n }],\n updateReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n updateReviewComment: [\"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"]\n },\n rateLimit: {\n get: [\"GET /rate_limit\"]\n },\n reactions: {\n createForCommitComment: [\"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForIssue: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForIssueComment: [\"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForPullRequestReviewComment: [\"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForTeamDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForTeamDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForIssue: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForIssueComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForPullRequestComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForTeamDiscussion: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForTeamDiscussionComment: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteLegacy: [\"DELETE /reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }, {\n deprecated: \"octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy\"\n }],\n listForCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForIssueComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForPullRequestReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForTeamDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForTeamDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }]\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n addTeamAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n addUserAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\"GET /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n createCommitComment: [\"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n createCommitSignatureProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\", {\n mediaType: {\n previews: [\"switcheroo\"]\n }\n }],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createUsingTemplate: [\"POST /repos/{template_owner}/{template_repo}/generate\", {\n mediaType: {\n previews: [\"baptiste\"]\n }\n }],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n deleteAdminBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n deleteBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\", {\n mediaType: {\n previews: [\"switcheroo\"]\n }\n }],\n deletePullRequestReviewProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\"DELETE /repos/{owner}/{repo}/automated-security-fixes\", {\n mediaType: {\n previews: [\"london\"]\n }\n }],\n disableVulnerabilityAlerts: [\"DELETE /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n downloadArchive: [\"GET /repos/{owner}/{repo}/{archive_format}/{ref}\"],\n enableAutomatedSecurityFixes: [\"PUT /repos/{owner}/{repo}/automated-security-fixes\", {\n mediaType: {\n previews: [\"london\"]\n }\n }],\n enableVulnerabilityAlerts: [\"PUT /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n getAdminBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n getAllStatusCheckContexts: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n getAppsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\", {\n mediaType: {\n previews: [\"black-panther\"]\n }\n }],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n getTeamsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\", {\n mediaType: {\n previews: [\"groot\"]\n }\n }],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\", {\n mediaType: {\n previews: [\"groot\"]\n }\n }],\n listReleaseAssets: [\"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n removeAppAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n removeCollaborator: [\"DELETE /repos/{owner}/{repo}/collaborators/{username}\"],\n removeStatusCheckContexts: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n removeStatusCheckProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n removeTeamAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n removeUserAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n setAppAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n setStatusCheckContexts: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n setTeamAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n setUserAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n updatePullRequestReviewProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n updateStatusCheckPotection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n uploadReleaseAsset: [\"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\", {\n baseUrl: \"https://uploads.github.com\"\n }]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\", {\n mediaType: {\n previews: [\"cloak\"]\n }\n }],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n users: [\"GET /search/users\"]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n addOrUpdateProjectPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n addOrUpdateRepoPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n checkPermissionsForProjectInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n checkPermissionsForRepoInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n deleteDiscussionInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n getDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n getMembershipForUserInOrg: [\"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/invitations\"],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n removeProjectInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n removeRepoInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n updateDiscussionCommentInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n updateDiscussionInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\n\nconst VERSION = \"4.2.1\";\n\nfunction endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({\n method,\n url\n }, defaults);\n\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n\n const scopeMethods = newMethods[scope];\n\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n\n return newMethods;\n}\n\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`\n\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined\n });\n return requestWithDefaults(options);\n }\n\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n\n delete options[name];\n }\n }\n\n return requestWithDefaults(options);\n } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n\n\n return requestWithDefaults(...args);\n }\n\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\n/**\n * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary\n * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is\n * done, we will remove the registerEndpoints methods and return the methods\n * directly as with the other plugins. At that point we will also remove the\n * legacy workarounds and deprecations.\n *\n * See the plan at\n * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1\n */\n\nfunction restEndpointMethods(octokit) {\n return endpointsToMethods(octokit, Endpoints);\n}\nrestEndpointMethods.VERSION = VERSION;\n\nexports.restEndpointMethods = restEndpointMethods;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar deprecation = require('deprecation');\nvar once = _interopDefault(require('once'));\n\nconst logOnce = once(deprecation => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\n\nclass RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = \"HttpError\";\n this.status = statusCode;\n Object.defineProperty(this, \"code\", {\n get() {\n logOnce(new deprecation.Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n }\n\n });\n this.headers = options.headers || {}; // redact request credentials without mutating original request options\n\n const requestCopy = Object.assign({}, options.request);\n\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\")\n });\n }\n\n requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\") // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n }\n\n}\n\nexports.RequestError = RequestError;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar endpoint = require('@octokit/endpoint');\nvar universalUserAgent = require('universal-user-agent');\nvar isPlainObject = require('is-plain-object');\nvar nodeFetch = _interopDefault(require('node-fetch'));\nvar requestError = require('@octokit/request-error');\n\nconst VERSION = \"5.4.10\";\n\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\nfunction fetchWrapper(requestOptions) {\n if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect\n }, requestOptions.request)).then(response => {\n url = response.url;\n status = response.status;\n\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n\n if (status === 204 || status === 205) {\n return;\n } // GitHub API returns 200 for HEAD requests\n\n\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n\n throw new requestError.RequestError(response.statusText, status, {\n headers,\n request: requestOptions\n });\n }\n\n if (status === 304) {\n throw new requestError.RequestError(\"Not modified\", status, {\n headers,\n request: requestOptions\n });\n }\n\n if (status >= 400) {\n return response.text().then(message => {\n const error = new requestError.RequestError(message, status, {\n headers,\n request: requestOptions\n });\n\n try {\n let responseBody = JSON.parse(error.message);\n Object.assign(error, responseBody);\n let errors = responseBody.errors; // Assumption `errors` would always be in Array format\n\n error.message = error.message + \": \" + errors.map(JSON.stringify).join(\", \");\n } catch (e) {// ignore, see octokit/rest.js#684\n }\n\n throw error;\n });\n }\n\n const contentType = response.headers.get(\"content-type\");\n\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n\n return getBufferResponse(response);\n }).then(data => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch(error => {\n if (error instanceof requestError.RequestError) {\n throw error;\n }\n\n throw new requestError.RequestError(error.message, 500, {\n headers,\n request: requestOptions\n });\n });\n}\n\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\n\nconst request = withDefaults(endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n }\n});\n\nexports.request = request;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","\"use strict\";\n/// \n/// \n/// \nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst typedArrayTypeNames = [\n 'Int8Array',\n 'Uint8Array',\n 'Uint8ClampedArray',\n 'Int16Array',\n 'Uint16Array',\n 'Int32Array',\n 'Uint32Array',\n 'Float32Array',\n 'Float64Array',\n 'BigInt64Array',\n 'BigUint64Array'\n];\nfunction isTypedArrayName(name) {\n return typedArrayTypeNames.includes(name);\n}\nconst objectTypeNames = [\n 'Function',\n 'Generator',\n 'AsyncGenerator',\n 'GeneratorFunction',\n 'AsyncGeneratorFunction',\n 'AsyncFunction',\n 'Observable',\n 'Array',\n 'Buffer',\n 'Object',\n 'RegExp',\n 'Date',\n 'Error',\n 'Map',\n 'Set',\n 'WeakMap',\n 'WeakSet',\n 'ArrayBuffer',\n 'SharedArrayBuffer',\n 'DataView',\n 'Promise',\n 'URL',\n 'HTMLElement',\n ...typedArrayTypeNames\n];\nfunction isObjectTypeName(name) {\n return objectTypeNames.includes(name);\n}\nconst primitiveTypeNames = [\n 'null',\n 'undefined',\n 'string',\n 'number',\n 'bigint',\n 'boolean',\n 'symbol'\n];\nfunction isPrimitiveTypeName(name) {\n return primitiveTypeNames.includes(name);\n}\n// eslint-disable-next-line @typescript-eslint/ban-types\nfunction isOfType(type) {\n return (value) => typeof value === type;\n}\nconst { toString } = Object.prototype;\nconst getObjectType = (value) => {\n const objectTypeName = toString.call(value).slice(8, -1);\n if (/HTML\\w+Element/.test(objectTypeName) && is.domElement(value)) {\n return 'HTMLElement';\n }\n if (isObjectTypeName(objectTypeName)) {\n return objectTypeName;\n }\n return undefined;\n};\nconst isObjectOfType = (type) => (value) => getObjectType(value) === type;\nfunction is(value) {\n if (value === null) {\n return 'null';\n }\n switch (typeof value) {\n case 'undefined':\n return 'undefined';\n case 'string':\n return 'string';\n case 'number':\n return 'number';\n case 'boolean':\n return 'boolean';\n case 'function':\n return 'Function';\n case 'bigint':\n return 'bigint';\n case 'symbol':\n return 'symbol';\n default:\n }\n if (is.observable(value)) {\n return 'Observable';\n }\n if (is.array(value)) {\n return 'Array';\n }\n if (is.buffer(value)) {\n return 'Buffer';\n }\n const tagType = getObjectType(value);\n if (tagType) {\n return tagType;\n }\n if (value instanceof String || value instanceof Boolean || value instanceof Number) {\n throw new TypeError('Please don\\'t use object wrappers for primitive types');\n }\n return 'Object';\n}\nis.undefined = isOfType('undefined');\nis.string = isOfType('string');\nconst isNumberType = isOfType('number');\nis.number = (value) => isNumberType(value) && !is.nan(value);\nis.bigint = isOfType('bigint');\n// eslint-disable-next-line @typescript-eslint/ban-types\nis.function_ = isOfType('function');\nis.null_ = (value) => value === null;\nis.class_ = (value) => is.function_(value) && value.toString().startsWith('class ');\nis.boolean = (value) => value === true || value === false;\nis.symbol = isOfType('symbol');\nis.numericString = (value) => is.string(value) && !is.emptyStringOrWhitespace(value) && !Number.isNaN(Number(value));\nis.array = (value, assertion) => {\n if (!Array.isArray(value)) {\n return false;\n }\n if (!is.function_(assertion)) {\n return true;\n }\n return value.every(assertion);\n};\nis.buffer = (value) => { var _a, _b, _c, _d; return (_d = (_c = (_b = (_a = value) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.isBuffer) === null || _c === void 0 ? void 0 : _c.call(_b, value)) !== null && _d !== void 0 ? _d : false; };\nis.nullOrUndefined = (value) => is.null_(value) || is.undefined(value);\nis.object = (value) => !is.null_(value) && (typeof value === 'object' || is.function_(value));\nis.iterable = (value) => { var _a; return is.function_((_a = value) === null || _a === void 0 ? void 0 : _a[Symbol.iterator]); };\nis.asyncIterable = (value) => { var _a; return is.function_((_a = value) === null || _a === void 0 ? void 0 : _a[Symbol.asyncIterator]); };\nis.generator = (value) => is.iterable(value) && is.function_(value.next) && is.function_(value.throw);\nis.asyncGenerator = (value) => is.asyncIterable(value) && is.function_(value.next) && is.function_(value.throw);\nis.nativePromise = (value) => isObjectOfType('Promise')(value);\nconst hasPromiseAPI = (value) => {\n var _a, _b;\n return is.function_((_a = value) === null || _a === void 0 ? void 0 : _a.then) &&\n is.function_((_b = value) === null || _b === void 0 ? void 0 : _b.catch);\n};\nis.promise = (value) => is.nativePromise(value) || hasPromiseAPI(value);\nis.generatorFunction = isObjectOfType('GeneratorFunction');\nis.asyncGeneratorFunction = (value) => getObjectType(value) === 'AsyncGeneratorFunction';\nis.asyncFunction = (value) => getObjectType(value) === 'AsyncFunction';\n// eslint-disable-next-line no-prototype-builtins, @typescript-eslint/ban-types\nis.boundFunction = (value) => is.function_(value) && !value.hasOwnProperty('prototype');\nis.regExp = isObjectOfType('RegExp');\nis.date = isObjectOfType('Date');\nis.error = isObjectOfType('Error');\nis.map = (value) => isObjectOfType('Map')(value);\nis.set = (value) => isObjectOfType('Set')(value);\nis.weakMap = (value) => isObjectOfType('WeakMap')(value);\nis.weakSet = (value) => isObjectOfType('WeakSet')(value);\nis.int8Array = isObjectOfType('Int8Array');\nis.uint8Array = isObjectOfType('Uint8Array');\nis.uint8ClampedArray = isObjectOfType('Uint8ClampedArray');\nis.int16Array = isObjectOfType('Int16Array');\nis.uint16Array = isObjectOfType('Uint16Array');\nis.int32Array = isObjectOfType('Int32Array');\nis.uint32Array = isObjectOfType('Uint32Array');\nis.float32Array = isObjectOfType('Float32Array');\nis.float64Array = isObjectOfType('Float64Array');\nis.bigInt64Array = isObjectOfType('BigInt64Array');\nis.bigUint64Array = isObjectOfType('BigUint64Array');\nis.arrayBuffer = isObjectOfType('ArrayBuffer');\nis.sharedArrayBuffer = isObjectOfType('SharedArrayBuffer');\nis.dataView = isObjectOfType('DataView');\nis.directInstanceOf = (instance, class_) => Object.getPrototypeOf(instance) === class_.prototype;\nis.urlInstance = (value) => isObjectOfType('URL')(value);\nis.urlString = (value) => {\n if (!is.string(value)) {\n return false;\n }\n try {\n new URL(value); // eslint-disable-line no-new\n return true;\n }\n catch (_a) {\n return false;\n }\n};\n// TODO: Use the `not` operator with a type guard here when it's available.\n// Example: `is.truthy = (value: unknown): value is (not false | not 0 | not '' | not undefined | not null) => Boolean(value);`\nis.truthy = (value) => Boolean(value);\n// Example: `is.falsy = (value: unknown): value is (not true | 0 | '' | undefined | null) => Boolean(value);`\nis.falsy = (value) => !value;\nis.nan = (value) => Number.isNaN(value);\nis.primitive = (value) => is.null_(value) || isPrimitiveTypeName(typeof value);\nis.integer = (value) => Number.isInteger(value);\nis.safeInteger = (value) => Number.isSafeInteger(value);\nis.plainObject = (value) => {\n // From: https://github.com/sindresorhus/is-plain-obj/blob/master/index.js\n if (toString.call(value) !== '[object Object]') {\n return false;\n }\n const prototype = Object.getPrototypeOf(value);\n return prototype === null || prototype === Object.getPrototypeOf({});\n};\nis.typedArray = (value) => isTypedArrayName(getObjectType(value));\nconst isValidLength = (value) => is.safeInteger(value) && value >= 0;\nis.arrayLike = (value) => !is.nullOrUndefined(value) && !is.function_(value) && isValidLength(value.length);\nis.inRange = (value, range) => {\n if (is.number(range)) {\n return value >= Math.min(0, range) && value <= Math.max(range, 0);\n }\n if (is.array(range) && range.length === 2) {\n return value >= Math.min(...range) && value <= Math.max(...range);\n }\n throw new TypeError(`Invalid range: ${JSON.stringify(range)}`);\n};\nconst NODE_TYPE_ELEMENT = 1;\nconst DOM_PROPERTIES_TO_CHECK = [\n 'innerHTML',\n 'ownerDocument',\n 'style',\n 'attributes',\n 'nodeValue'\n];\nis.domElement = (value) => {\n return is.object(value) &&\n value.nodeType === NODE_TYPE_ELEMENT &&\n is.string(value.nodeName) &&\n !is.plainObject(value) &&\n DOM_PROPERTIES_TO_CHECK.every(property => property in value);\n};\nis.observable = (value) => {\n var _a, _b, _c, _d;\n if (!value) {\n return false;\n }\n // eslint-disable-next-line no-use-extend-native/no-use-extend-native\n if (value === ((_b = (_a = value)[Symbol.observable]) === null || _b === void 0 ? void 0 : _b.call(_a))) {\n return true;\n }\n if (value === ((_d = (_c = value)['@@observable']) === null || _d === void 0 ? void 0 : _d.call(_c))) {\n return true;\n }\n return false;\n};\nis.nodeStream = (value) => is.object(value) && is.function_(value.pipe) && !is.observable(value);\nis.infinite = (value) => value === Infinity || value === -Infinity;\nconst isAbsoluteMod2 = (remainder) => (value) => is.integer(value) && Math.abs(value % 2) === remainder;\nis.evenInteger = isAbsoluteMod2(0);\nis.oddInteger = isAbsoluteMod2(1);\nis.emptyArray = (value) => is.array(value) && value.length === 0;\nis.nonEmptyArray = (value) => is.array(value) && value.length > 0;\nis.emptyString = (value) => is.string(value) && value.length === 0;\n// TODO: Use `not ''` when the `not` operator is available.\nis.nonEmptyString = (value) => is.string(value) && value.length > 0;\nconst isWhiteSpaceString = (value) => is.string(value) && !/\\S/.test(value);\nis.emptyStringOrWhitespace = (value) => is.emptyString(value) || isWhiteSpaceString(value);\nis.emptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length === 0;\n// TODO: Use `not` operator here to remove `Map` and `Set` from type guard:\n// - https://github.com/Microsoft/TypeScript/pull/29317\nis.nonEmptyObject = (value) => is.object(value) && !is.map(value) && !is.set(value) && Object.keys(value).length > 0;\nis.emptySet = (value) => is.set(value) && value.size === 0;\nis.nonEmptySet = (value) => is.set(value) && value.size > 0;\nis.emptyMap = (value) => is.map(value) && value.size === 0;\nis.nonEmptyMap = (value) => is.map(value) && value.size > 0;\nconst predicateOnArray = (method, predicate, values) => {\n if (!is.function_(predicate)) {\n throw new TypeError(`Invalid predicate: ${JSON.stringify(predicate)}`);\n }\n if (values.length === 0) {\n throw new TypeError('Invalid number of values');\n }\n return method.call(values, predicate);\n};\nis.any = (predicate, ...values) => {\n const predicates = is.array(predicate) ? predicate : [predicate];\n return predicates.some(singlePredicate => predicateOnArray(Array.prototype.some, singlePredicate, values));\n};\nis.all = (predicate, ...values) => predicateOnArray(Array.prototype.every, predicate, values);\nconst assertType = (condition, description, value) => {\n if (!condition) {\n throw new TypeError(`Expected value which is \\`${description}\\`, received value of type \\`${is(value)}\\`.`);\n }\n};\nexports.assert = {\n // Unknowns.\n undefined: (value) => assertType(is.undefined(value), 'undefined', value),\n string: (value) => assertType(is.string(value), 'string', value),\n number: (value) => assertType(is.number(value), 'number', value),\n bigint: (value) => assertType(is.bigint(value), 'bigint', value),\n // eslint-disable-next-line @typescript-eslint/ban-types\n function_: (value) => assertType(is.function_(value), 'Function', value),\n null_: (value) => assertType(is.null_(value), 'null', value),\n class_: (value) => assertType(is.class_(value), \"Class\" /* class_ */, value),\n boolean: (value) => assertType(is.boolean(value), 'boolean', value),\n symbol: (value) => assertType(is.symbol(value), 'symbol', value),\n numericString: (value) => assertType(is.numericString(value), \"string with a number\" /* numericString */, value),\n array: (value, assertion) => {\n const assert = assertType;\n assert(is.array(value), 'Array', value);\n if (assertion) {\n value.forEach(assertion);\n }\n },\n buffer: (value) => assertType(is.buffer(value), 'Buffer', value),\n nullOrUndefined: (value) => assertType(is.nullOrUndefined(value), \"null or undefined\" /* nullOrUndefined */, value),\n object: (value) => assertType(is.object(value), 'Object', value),\n iterable: (value) => assertType(is.iterable(value), \"Iterable\" /* iterable */, value),\n asyncIterable: (value) => assertType(is.asyncIterable(value), \"AsyncIterable\" /* asyncIterable */, value),\n generator: (value) => assertType(is.generator(value), 'Generator', value),\n asyncGenerator: (value) => assertType(is.asyncGenerator(value), 'AsyncGenerator', value),\n nativePromise: (value) => assertType(is.nativePromise(value), \"native Promise\" /* nativePromise */, value),\n promise: (value) => assertType(is.promise(value), 'Promise', value),\n generatorFunction: (value) => assertType(is.generatorFunction(value), 'GeneratorFunction', value),\n asyncGeneratorFunction: (value) => assertType(is.asyncGeneratorFunction(value), 'AsyncGeneratorFunction', value),\n // eslint-disable-next-line @typescript-eslint/ban-types\n asyncFunction: (value) => assertType(is.asyncFunction(value), 'AsyncFunction', value),\n // eslint-disable-next-line @typescript-eslint/ban-types\n boundFunction: (value) => assertType(is.boundFunction(value), 'Function', value),\n regExp: (value) => assertType(is.regExp(value), 'RegExp', value),\n date: (value) => assertType(is.date(value), 'Date', value),\n error: (value) => assertType(is.error(value), 'Error', value),\n map: (value) => assertType(is.map(value), 'Map', value),\n set: (value) => assertType(is.set(value), 'Set', value),\n weakMap: (value) => assertType(is.weakMap(value), 'WeakMap', value),\n weakSet: (value) => assertType(is.weakSet(value), 'WeakSet', value),\n int8Array: (value) => assertType(is.int8Array(value), 'Int8Array', value),\n uint8Array: (value) => assertType(is.uint8Array(value), 'Uint8Array', value),\n uint8ClampedArray: (value) => assertType(is.uint8ClampedArray(value), 'Uint8ClampedArray', value),\n int16Array: (value) => assertType(is.int16Array(value), 'Int16Array', value),\n uint16Array: (value) => assertType(is.uint16Array(value), 'Uint16Array', value),\n int32Array: (value) => assertType(is.int32Array(value), 'Int32Array', value),\n uint32Array: (value) => assertType(is.uint32Array(value), 'Uint32Array', value),\n float32Array: (value) => assertType(is.float32Array(value), 'Float32Array', value),\n float64Array: (value) => assertType(is.float64Array(value), 'Float64Array', value),\n bigInt64Array: (value) => assertType(is.bigInt64Array(value), 'BigInt64Array', value),\n bigUint64Array: (value) => assertType(is.bigUint64Array(value), 'BigUint64Array', value),\n arrayBuffer: (value) => assertType(is.arrayBuffer(value), 'ArrayBuffer', value),\n sharedArrayBuffer: (value) => assertType(is.sharedArrayBuffer(value), 'SharedArrayBuffer', value),\n dataView: (value) => assertType(is.dataView(value), 'DataView', value),\n urlInstance: (value) => assertType(is.urlInstance(value), 'URL', value),\n urlString: (value) => assertType(is.urlString(value), \"string with a URL\" /* urlString */, value),\n truthy: (value) => assertType(is.truthy(value), \"truthy\" /* truthy */, value),\n falsy: (value) => assertType(is.falsy(value), \"falsy\" /* falsy */, value),\n nan: (value) => assertType(is.nan(value), \"NaN\" /* nan */, value),\n primitive: (value) => assertType(is.primitive(value), \"primitive\" /* primitive */, value),\n integer: (value) => assertType(is.integer(value), \"integer\" /* integer */, value),\n safeInteger: (value) => assertType(is.safeInteger(value), \"integer\" /* safeInteger */, value),\n plainObject: (value) => assertType(is.plainObject(value), \"plain object\" /* plainObject */, value),\n typedArray: (value) => assertType(is.typedArray(value), \"TypedArray\" /* typedArray */, value),\n arrayLike: (value) => assertType(is.arrayLike(value), \"array-like\" /* arrayLike */, value),\n domElement: (value) => assertType(is.domElement(value), \"HTMLElement\" /* domElement */, value),\n observable: (value) => assertType(is.observable(value), 'Observable', value),\n nodeStream: (value) => assertType(is.nodeStream(value), \"Node.js Stream\" /* nodeStream */, value),\n infinite: (value) => assertType(is.infinite(value), \"infinite number\" /* infinite */, value),\n emptyArray: (value) => assertType(is.emptyArray(value), \"empty array\" /* emptyArray */, value),\n nonEmptyArray: (value) => assertType(is.nonEmptyArray(value), \"non-empty array\" /* nonEmptyArray */, value),\n emptyString: (value) => assertType(is.emptyString(value), \"empty string\" /* emptyString */, value),\n nonEmptyString: (value) => assertType(is.nonEmptyString(value), \"non-empty string\" /* nonEmptyString */, value),\n emptyStringOrWhitespace: (value) => assertType(is.emptyStringOrWhitespace(value), \"empty string or whitespace\" /* emptyStringOrWhitespace */, value),\n emptyObject: (value) => assertType(is.emptyObject(value), \"empty object\" /* emptyObject */, value),\n nonEmptyObject: (value) => assertType(is.nonEmptyObject(value), \"non-empty object\" /* nonEmptyObject */, value),\n emptySet: (value) => assertType(is.emptySet(value), \"empty set\" /* emptySet */, value),\n nonEmptySet: (value) => assertType(is.nonEmptySet(value), \"non-empty set\" /* nonEmptySet */, value),\n emptyMap: (value) => assertType(is.emptyMap(value), \"empty map\" /* emptyMap */, value),\n nonEmptyMap: (value) => assertType(is.nonEmptyMap(value), \"non-empty map\" /* nonEmptyMap */, value),\n // Numbers.\n evenInteger: (value) => assertType(is.evenInteger(value), \"even integer\" /* evenInteger */, value),\n oddInteger: (value) => assertType(is.oddInteger(value), \"odd integer\" /* oddInteger */, value),\n // Two arguments.\n directInstanceOf: (instance, class_) => assertType(is.directInstanceOf(instance, class_), \"T\" /* directInstanceOf */, instance),\n inRange: (value, range) => assertType(is.inRange(value, range), \"in range\" /* inRange */, value),\n // Variadic functions.\n any: (predicate, ...values) => assertType(is.any(predicate, ...values), \"predicate returns truthy for any value\" /* any */, values),\n all: (predicate, ...values) => assertType(is.all(predicate, ...values), \"predicate returns truthy for all values\" /* all */, values)\n};\n// Some few keywords are reserved, but we'll populate them for Node.js users\n// See https://github.com/Microsoft/TypeScript/issues/2536\nObject.defineProperties(is, {\n class: {\n value: is.class_\n },\n function: {\n value: is.function_\n },\n null: {\n value: is.null_\n }\n});\nObject.defineProperties(exports.assert, {\n class: {\n value: exports.assert.class_\n },\n function: {\n value: exports.assert.function_\n },\n null: {\n value: exports.assert.null_\n }\n});\nexports.default = is;\n// For CommonJS default export support\nmodule.exports = is;\nmodule.exports.default = is;\nmodule.exports.assert = exports.assert;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst defer_to_connect_1 = require(\"defer-to-connect\");\nconst nodejsMajorVersion = Number(process.versions.node.split('.')[0]);\nconst timer = (request) => {\n const timings = {\n start: Date.now(),\n socket: undefined,\n lookup: undefined,\n connect: undefined,\n secureConnect: undefined,\n upload: undefined,\n response: undefined,\n end: undefined,\n error: undefined,\n abort: undefined,\n phases: {\n wait: undefined,\n dns: undefined,\n tcp: undefined,\n tls: undefined,\n request: undefined,\n firstByte: undefined,\n download: undefined,\n total: undefined\n }\n };\n request.timings = timings;\n const handleError = (origin) => {\n const emit = origin.emit.bind(origin);\n origin.emit = (event, ...args) => {\n // Catches the `error` event\n if (event === 'error') {\n timings.error = Date.now();\n timings.phases.total = timings.error - timings.start;\n origin.emit = emit;\n }\n // Saves the original behavior\n return emit(event, ...args);\n };\n };\n handleError(request);\n request.prependOnceListener('abort', () => {\n timings.abort = Date.now();\n // Let the `end` response event be responsible for setting the total phase,\n // unless the Node.js major version is >= 13.\n if (!timings.response || nodejsMajorVersion >= 13) {\n timings.phases.total = Date.now() - timings.start;\n }\n });\n const onSocket = (socket) => {\n timings.socket = Date.now();\n timings.phases.wait = timings.socket - timings.start;\n const lookupListener = () => {\n timings.lookup = Date.now();\n timings.phases.dns = timings.lookup - timings.socket;\n };\n socket.prependOnceListener('lookup', lookupListener);\n defer_to_connect_1.default(socket, {\n connect: () => {\n timings.connect = Date.now();\n if (timings.lookup === undefined) {\n socket.removeListener('lookup', lookupListener);\n timings.lookup = timings.connect;\n timings.phases.dns = timings.lookup - timings.socket;\n }\n timings.phases.tcp = timings.connect - timings.lookup;\n // This callback is called before flushing any data,\n // so we don't need to set `timings.phases.request` here.\n },\n secureConnect: () => {\n timings.secureConnect = Date.now();\n timings.phases.tls = timings.secureConnect - timings.connect;\n }\n });\n };\n if (request.socket) {\n onSocket(request.socket);\n }\n else {\n request.prependOnceListener('socket', onSocket);\n }\n const onUpload = () => {\n var _a;\n timings.upload = Date.now();\n timings.phases.request = timings.upload - (_a = timings.secureConnect, (_a !== null && _a !== void 0 ? _a : timings.connect));\n };\n const writableFinished = () => {\n if (typeof request.writableFinished === 'boolean') {\n return request.writableFinished;\n }\n // Node.js doesn't have `request.writableFinished` property\n return request.finished && request.outputSize === 0 && (!request.socket || request.socket.writableLength === 0);\n };\n if (writableFinished()) {\n onUpload();\n }\n else {\n request.prependOnceListener('finish', onUpload);\n }\n request.prependOnceListener('response', (response) => {\n timings.response = Date.now();\n timings.phases.firstByte = timings.response - timings.upload;\n response.timings = timings;\n handleError(response);\n response.prependOnceListener('end', () => {\n timings.end = Date.now();\n timings.phases.download = timings.end - timings.response;\n timings.phases.total = timings.end - timings.start;\n });\n });\n return timings;\n};\nexports.default = timer;\n// For CommonJS default export support\nmodule.exports = timer;\nmodule.exports.default = timer;\n","var Utils = require(\"./util\");\r\nvar fs = Utils.FileSystem.require(),\r\n\tpth = require(\"path\");\r\n\r\nfs.existsSync = fs.existsSync || pth.existsSync;\r\n\r\nvar ZipEntry = require(\"./zipEntry\"),\r\n\tZipFile = require(\"./zipFile\");\r\n\r\nvar isWin = /^win/.test(process.platform);\r\n\r\nfunction canonical(p) {\r\n var safeSuffix = pth.normalize(p).replace(/^(\\.\\.(\\/|\\\\|$))+/, '');\r\n return pth.join('./', safeSuffix);\r\n}\r\n\r\nmodule.exports = function (/**String*/input) {\r\n\tvar _zip = undefined,\r\n\t\t_filename = \"\";\r\n\r\n\tif (input && typeof input === \"string\") { // load zip file\r\n\t\tif (fs.existsSync(input)) {\r\n\t\t\t_filename = input;\r\n\t\t\t_zip = new ZipFile(input, Utils.Constants.FILE);\r\n\t\t} else {\r\n\t\t\tthrow new Error(Utils.Errors.INVALID_FILENAME);\r\n\t\t}\r\n\t} else if (input && Buffer.isBuffer(input)) { // load buffer\r\n\t\t_zip = new ZipFile(input, Utils.Constants.BUFFER);\r\n\t} else { // create new zip file\r\n\t\t_zip = new ZipFile(null, Utils.Constants.NONE);\r\n\t}\r\n\r\n\tfunction sanitize(prefix, name) {\r\n\t\tprefix = pth.resolve(pth.normalize(prefix));\r\n\t\tvar parts = name.split('/');\r\n\t\tfor (var i = 0, l = parts.length; i < l; i++) {\r\n\t\t\tvar path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep)));\r\n\t\t\tif (path.indexOf(prefix) === 0) {\r\n\t\t\t\treturn path;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn pth.normalize(pth.join(prefix, pth.basename(name)));\r\n\t}\r\n\r\n\tfunction getEntry(/**Object*/entry) {\r\n\t\tif (entry && _zip) {\r\n\t\t\tvar item;\r\n\t\t\t// If entry was given as a file name\r\n\t\t\tif (typeof entry === \"string\")\r\n\t\t\t\titem = _zip.getEntry(entry);\r\n\t\t\t// if entry was given as a ZipEntry object\r\n\t\t\tif (typeof entry === \"object\" && typeof entry.entryName !== \"undefined\" && typeof entry.header !== \"undefined\")\r\n\t\t\t\titem = _zip.getEntry(entry.entryName);\r\n\r\n\t\t\tif (item) {\r\n\t\t\t\treturn item;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn null;\r\n\t}\r\n\r\n function fixPath(zipPath){\r\n // convert windows file separators and normalize\r\n zipPath = pth.posix.normalize(zipPath.split(\"\\\\\").join(\"/\"));\r\n // cleanup, remove invalid folder names\r\n var names = zipPath.split(\"/\").filter((c) => c !== \"\" && c !== \".\" && c !== \"..\");\r\n // if we have name we return it\r\n return names.length ? names.join(\"/\") + \"/\" : \"\";\r\n }\r\n\r\n\treturn {\r\n\t\t/**\r\n\t\t * Extracts the given entry from the archive and returns the content as a Buffer object\r\n\t\t * @param entry ZipEntry object or String with the full path of the entry\r\n\t\t *\r\n\t\t * @return Buffer or Null in case of error\r\n\t\t */\r\n\t\treadFile: function (/**Object*/entry, /*String, Buffer*/pass) {\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\treturn item && item.getData(pass) || null;\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Asynchronous readFile\r\n\t\t * @param entry ZipEntry object or String with the full path of the entry\r\n\t\t * @param callback\r\n\t\t *\r\n\t\t * @return Buffer or Null in case of error\r\n\t\t */\r\n\t\treadFileAsync: function (/**Object*/entry, /**Function*/callback) {\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\tif (item) {\r\n\t\t\t\titem.getDataAsync(callback);\r\n\t\t\t} else {\r\n\t\t\t\tcallback(null, \"getEntry failed for:\" + entry)\r\n\t\t\t}\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Extracts the given entry from the archive and returns the content as plain text in the given encoding\r\n\t\t * @param entry ZipEntry object or String with the full path of the entry\r\n\t\t * @param encoding Optional. If no encoding is specified utf8 is used\r\n\t\t *\r\n\t\t * @return String\r\n\t\t */\r\n\t\treadAsText: function (/**Object*/entry, /**String=*/encoding) {\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\tif (item) {\r\n\t\t\t\tvar data = item.getData();\r\n\t\t\t\tif (data && data.length) {\r\n\t\t\t\t\treturn data.toString(encoding || \"utf8\");\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\treturn \"\";\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Asynchronous readAsText\r\n\t\t * @param entry ZipEntry object or String with the full path of the entry\r\n\t\t * @param callback\r\n\t\t * @param encoding Optional. If no encoding is specified utf8 is used\r\n\t\t *\r\n\t\t * @return String\r\n\t\t */\r\n\t\treadAsTextAsync: function (/**Object*/entry, /**Function*/callback, /**String=*/encoding) {\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\tif (item) {\r\n\t\t\t\titem.getDataAsync(function (data, err) {\r\n\t\t\t\t\tif (err) {\r\n\t\t\t\t\t\tcallback(data, err);\r\n\t\t\t\t\t\treturn;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tif (data && data.length) {\r\n\t\t\t\t\t\tcallback(data.toString(encoding || \"utf8\"));\r\n\t\t\t\t\t} else {\r\n\t\t\t\t\t\tcallback(\"\");\r\n\t\t\t\t\t}\r\n\t\t\t\t})\r\n\t\t\t} else {\r\n\t\t\t\tcallback(\"\");\r\n\t\t\t}\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory\r\n\t\t *\r\n\t\t * @param entry\r\n\t\t */\r\n\t\tdeleteFile: function (/**Object*/entry) { // @TODO: test deleteFile\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\tif (item) {\r\n\t\t\t\t_zip.deleteEntry(item.entryName);\r\n\t\t\t}\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Adds a comment to the zip. The zip must be rewritten after adding the comment.\r\n\t\t *\r\n\t\t * @param comment\r\n\t\t */\r\n\t\taddZipComment: function (/**String*/comment) { // @TODO: test addZipComment\r\n\t\t\t_zip.comment = comment;\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Returns the zip comment\r\n\t\t *\r\n\t\t * @return String\r\n\t\t */\r\n\t\tgetZipComment: function () {\r\n\t\t\treturn _zip.comment || '';\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Adds a comment to a specified zipEntry. The zip must be rewritten after adding the comment\r\n\t\t * The comment cannot exceed 65535 characters in length\r\n\t\t *\r\n\t\t * @param entry\r\n\t\t * @param comment\r\n\t\t */\r\n\t\taddZipEntryComment: function (/**Object*/entry, /**String*/comment) {\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\tif (item) {\r\n\t\t\t\titem.comment = comment;\r\n\t\t\t}\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Returns the comment of the specified entry\r\n\t\t *\r\n\t\t * @param entry\r\n\t\t * @return String\r\n\t\t */\r\n\t\tgetZipEntryComment: function (/**Object*/entry) {\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\tif (item) {\r\n\t\t\t\treturn item.comment || '';\r\n\t\t\t}\r\n\t\t\treturn ''\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Updates the content of an existing entry inside the archive. The zip must be rewritten after updating the content\r\n\t\t *\r\n\t\t * @param entry\r\n\t\t * @param content\r\n\t\t */\r\n\t\tupdateFile: function (/**Object*/entry, /**Buffer*/content) {\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\tif (item) {\r\n\t\t\t\titem.setData(content);\r\n\t\t\t}\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Adds a file from the disk to the archive\r\n\t\t *\r\n\t\t * @param localPath File to add to zip\r\n\t\t * @param zipPath Optional path inside the zip\r\n\t\t * @param zipName Optional name for the file\r\n\t\t */\r\n\t\taddLocalFile: function (/**String*/localPath, /**String=*/zipPath, /**String=*/zipName, /**String*/comment) {\r\n\t\t\tif (fs.existsSync(localPath)) {\r\n\t\t\t\t// fix ZipPath\r\n\t\t\t\tzipPath = (zipPath) ? fixPath(zipPath) : \"\";\r\n\r\n\t\t\t\t// p - local file name\r\n\t\t\t\tvar p = localPath.split(\"\\\\\").join(\"/\").split(\"/\").pop();\r\n\r\n\t\t\t\t// add file name into zippath\r\n\t\t\t\tzipPath += (zipName) ? zipName : p;\r\n\r\n\t\t\t\t// read file attributes\r\n\t\t\t\tconst _attr = fs.statSync(localPath);\r\n\r\n\t\t\t\t// add file into zip file\r\n\t\t\t\tthis.addFile(zipPath, fs.readFileSync(localPath), comment, _attr)\r\n\t\t\t} else {\r\n\t\t\t\tthrow new Error(Utils.Errors.FILE_NOT_FOUND.replace(\"%s\", localPath));\r\n\t\t\t}\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Adds a local directory and all its nested files and directories to the archive\r\n\t\t *\r\n\t\t * @param localPath\r\n\t\t * @param zipPath optional path inside zip\r\n\t\t * @param filter optional RegExp or Function if files match will\r\n\t\t * be included.\r\n\t\t */\r\n addLocalFolder: function (/**String*/localPath, /**String=*/zipPath, /**=RegExp|Function*/filter) {\r\n // Prepare filter\r\n if (filter instanceof RegExp) { // if filter is RegExp wrap it\r\n filter = (function (rx){\r\n return function (filename) {\r\n return rx.test(filename);\r\n }\r\n })(filter);\r\n } else if ('function' !== typeof filter) { // if filter is not function we will replace it\r\n filter = function () {\r\n return true;\r\n };\r\n }\r\n\r\n // fix ZipPath\r\n zipPath = (zipPath) ? fixPath(zipPath) : \"\";\r\n\r\n // normalize the path first\r\n localPath = pth.normalize(localPath);\r\n\r\n if (fs.existsSync(localPath)) {\r\n\r\n var items = Utils.findFiles(localPath),\r\n self = this;\r\n\r\n if (items.length) {\r\n items.forEach(function (filepath) {\r\n var p = pth.relative(localPath, filepath).split(\"\\\\\").join(\"/\"); //windows fix\r\n if (filter(p)) {\r\n var stats = fs.statSync(filepath);\r\n if (stats.isFile()) {\r\n self.addFile(zipPath + p, fs.readFileSync(filepath), \"\", stats);\r\n } else {\r\n self.addFile(zipPath + p + '/', Buffer.alloc(0), \"\", stats);\r\n }\r\n }\r\n });\r\n }\r\n } else {\r\n throw new Error(Utils.Errors.FILE_NOT_FOUND.replace(\"%s\", localPath));\r\n }\r\n },\r\n\r\n\t\t/**\r\n\t\t * Asynchronous addLocalFile\r\n\t\t * @param localPath\r\n\t\t * @param callback\r\n\t\t * @param zipPath optional path inside zip\r\n\t\t * @param filter optional RegExp or Function if files match will\r\n\t\t * be included.\r\n\t\t */\r\n addLocalFolderAsync: function (/*String*/localPath, /*Function*/callback, /*String*/zipPath, /*RegExp|Function*/filter) {\r\n if (filter instanceof RegExp) {\r\n filter = (function (rx) {\r\n return function (filename) {\r\n return rx.test(filename);\r\n };\r\n })(filter);\r\n } else if (\"function\" !== typeof filter) {\r\n filter = function () {\r\n return true;\r\n };\r\n }\r\n\r\n // fix ZipPath\r\n zipPath = zipPath ? fixPath(zipPath) : \"\";\r\n\r\n // normalize the path first\r\n localPath = pth.normalize(localPath);\r\n\r\n var self = this;\r\n fs.open(localPath, 'r', function (err) {\r\n if (err && err.code === 'ENOENT') {\r\n callback(undefined, Utils.Errors.FILE_NOT_FOUND.replace(\"%s\", localPath));\r\n } else if (err) {\r\n callback(undefined, err);\r\n } else {\r\n var items = Utils.findFiles(localPath);\r\n var i = -1;\r\n\r\n var next = function () {\r\n i += 1;\r\n if (i < items.length) {\r\n var filepath = items[i];\r\n var p = pth.relative(localPath, filepath).split(\"\\\\\").join(\"/\"); //windows fix\r\n p = p.normalize('NFD').replace(/[\\u0300-\\u036f]/g, '').replace(/[^\\x20-\\x7E]/g, '') // accent fix\r\n if (filter(p)) {\r\n fs.stat(filepath, function (er0, stats) {\r\n if (er0) callback(undefined, er0);\r\n if (stats.isFile()) {\r\n fs.readFile(filepath, function (er1, data) {\r\n if (er1) {\r\n callback(undefined, er1);\r\n } else {\r\n self.addFile(zipPath + p, data, \"\", stats);\r\n next();\r\n }\r\n });\r\n } else {\r\n self.addFile(zipPath + p + \"/\", Buffer.alloc(0), \"\", stats);\r\n next();\r\n }\r\n });\r\n } else {\r\n next();\r\n }\r\n\r\n } else {\r\n callback(true, undefined);\r\n }\r\n }\r\n\r\n next();\r\n }\r\n });\r\n },\r\n\r\n addLocalFolderPromise: function (/*String*/ localPath, /* object */ options) {\r\n return new Promise((resolve, reject) => {\r\n const { filter, zipPath } = Object.assign({}, options);\r\n this.addLocalFolderAsync(localPath,\r\n (done, err) => {\r\n if (err) reject(err);\r\n if (done) resolve(this);\r\n }, zipPath, filter\r\n );\r\n });\r\n },\r\n\r\n\t\t/**\r\n\t\t * Allows you to create a entry (file or directory) in the zip file.\r\n\t\t * If you want to create a directory the entryName must end in / and a null buffer should be provided.\r\n\t\t * Comment and attributes are optional\r\n\t\t *\r\n\t\t * @param entryName\r\n\t\t * @param content\r\n\t\t * @param comment\r\n\t\t * @param attr\r\n\t\t */\r\n\t\taddFile: function (/**String*/entryName, /**Buffer*/content, /**String*/comment, /**Number*/attr) {\r\n\t\t\t// prepare new entry\r\n\t\t\tvar entry = new ZipEntry();\r\n\t\t\tentry.entryName = entryName;\r\n\t\t\tentry.comment = comment || \"\";\r\n\r\n\t\t\tvar isStat = ('object' === typeof attr) && (attr instanceof fs.Stats);\r\n\r\n\t\t\t// last modification time from file stats\r\n\t\t\tif (isStat){\r\n\t\t\t\tentry.header.time = attr.mtime;\r\n\t\t\t}\r\n\r\n\t\t\t// Set file attribute\r\n\t\t\tvar fileattr = (entry.isDirectory) ? 0x10 : 0; // (MS-DOS directory flag)\r\n\r\n\t\t\t// extended attributes field for Unix\r\n\t\t\tif('win32' !== process.platform){\r\n\t\t\t\t// set file type either S_IFDIR / S_IFREG\r\n\t\t\t\tvar unix = (entry.isDirectory) ? 0x4000 : 0x8000;\r\n\r\n\t\t\t\tif (isStat) { \t\t\t\t\t\t\t\t\t\t// File attributes from file stats\r\n\t\t\t\t\tunix |= (0xfff & attr.mode);\r\n\t\t\t\t}else if ('number' === typeof attr){ \t\t\t\t// attr from given attr values\r\n\t\t\t\t\tunix |= (0xfff & attr);\r\n\t\t\t\t}else{\t\t\t\t\t\t\t\t\t\t\t\t// Default values:\r\n\t\t\t\t\tunix |= (entry.isDirectory) ? 0o755 : 0o644; \t// permissions (drwxr-xr-x) or (-r-wr--r--)\r\n\t\t\t\t}\r\n\r\n\t\t\t\tfileattr = (fileattr | (unix << 16)) >>> 0;\t\t\t// add attributes\r\n\t\t\t}\r\n\r\n\t\t\tentry.attr = fileattr;\r\n\r\n\t\t\tentry.setData(content);\r\n\t\t\t_zip.setEntry(entry);\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Returns an array of ZipEntry objects representing the files and folders inside the archive\r\n\t\t *\r\n\t\t * @return Array\r\n\t\t */\r\n\t\tgetEntries: function () {\r\n\t\t\tif (_zip) {\r\n\t\t\t\treturn _zip.entries;\r\n\t\t\t} else {\r\n\t\t\t\treturn [];\r\n\t\t\t}\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Returns a ZipEntry object representing the file or folder specified by ``name``.\r\n\t\t *\r\n\t\t * @param name\r\n\t\t * @return ZipEntry\r\n\t\t */\r\n\t\tgetEntry: function (/**String*/name) {\r\n\t\t\treturn getEntry(name);\r\n\t\t},\r\n\r\n\t\tgetEntryCount: function() {\r\n\t\t\treturn _zip.getEntryCount();\r\n\t\t},\r\n\r\n\t\tforEach: function(callback) {\r\n\t\t\treturn _zip.forEach(callback);\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Extracts the given entry to the given targetPath\r\n\t\t * If the entry is a directory inside the archive, the entire directory and it's subdirectories will be extracted\r\n\t\t *\r\n\t\t * @param entry ZipEntry object or String with the full path of the entry\r\n\t\t * @param targetPath Target folder where to write the file\r\n\t\t * @param maintainEntryPath If maintainEntryPath is true and the entry is inside a folder, the entry folder\r\n\t\t * will be created in targetPath as well. Default is TRUE\r\n\t\t * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.\r\n\t\t * Default is FALSE\r\n * @param outFileName String If set will override the filename of the extracted file (Only works if the entry is a file)\r\n\t\t *\r\n\t\t * @return Boolean\r\n\t\t */\r\n\t\textractEntryTo: function (/**Object*/entry, /**String*/targetPath, /**Boolean*/maintainEntryPath, /**Boolean*/overwrite, /**String**/outFileName) {\r\n\t\t\toverwrite = overwrite || false;\r\n\t\t\tmaintainEntryPath = typeof maintainEntryPath === \"undefined\" ? true : maintainEntryPath;\r\n\r\n\t\t\tvar item = getEntry(entry);\r\n\t\t\tif (!item) {\r\n\t\t\t\tthrow new Error(Utils.Errors.NO_ENTRY);\r\n\t\t\t}\r\n\r\n\t\t\tvar entryName = canonical(item.entryName);\r\n\r\n\t\t\tvar target = sanitize(targetPath,outFileName && !item.isDirectory ? outFileName : (maintainEntryPath ? entryName : pth.basename(entryName)));\r\n\r\n\t\t\tif (item.isDirectory) {\r\n\t\t\t\ttarget = pth.resolve(target, \"..\");\r\n\t\t\t\tvar children = _zip.getEntryChildren(item);\r\n\t\t\t\tchildren.forEach(function (child) {\r\n\t\t\t\t\tif (child.isDirectory) return;\r\n\t\t\t\t\tvar content = child.getData();\r\n\t\t\t\t\tif (!content) {\r\n\t\t\t\t\t\tthrow new Error(Utils.Errors.CANT_EXTRACT_FILE);\r\n\t\t\t\t\t}\r\n\t\t\t\t\tvar name = canonical(child.entryName)\r\n\t\t\t\t\tvar childName = sanitize(targetPath, maintainEntryPath ? name : pth.basename(name));\r\n\t\t\t\t\t// The reverse operation for attr depend on method addFile()\r\n\t\t\t\t\tvar fileAttr = child.attr ? (((child.attr >>> 0) | 0) >> 16) & 0xfff : 0;\r\n\t\t\t\t\tUtils.writeFileTo(childName, content, overwrite, fileAttr);\r\n\t\t\t\t});\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\r\n\t\t\tvar content = item.getData();\r\n\t\t\tif (!content) throw new Error(Utils.Errors.CANT_EXTRACT_FILE);\r\n\r\n\t\t\tif (fs.existsSync(target) && !overwrite) {\r\n\t\t\t\tthrow new Error(Utils.Errors.CANT_OVERRIDE);\r\n\t\t\t}\r\n\t\t\t// The reverse operation for attr depend on method addFile()\r\n\t\t\tvar fileAttr = item.attr ? (((item.attr >>> 0) | 0) >> 16) & 0xfff : 0;\r\n\t\t\tUtils.writeFileTo(target, content, overwrite, fileAttr);\r\n\r\n\t\t\treturn true;\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Test the archive\r\n\t\t *\r\n\t\t */\r\n\t\ttest: function (pass) {\r\n\t\t\tif (!_zip) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\r\n\t\t\tfor (var entry in _zip.entries) {\r\n\t\t\t\ttry {\r\n\t\t\t\t\tif (entry.isDirectory) {\r\n\t\t\t\t\t\tcontinue;\r\n\t\t\t\t\t}\r\n\t\t\t\t\tvar content = _zip.entries[entry].getData(pass);\r\n\t\t\t\t\tif (!content) {\r\n\t\t\t\t\t\treturn false;\r\n\t\t\t\t\t}\r\n\t\t\t\t} catch (err) {\r\n\t\t\t\t\treturn false;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\treturn true;\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Extracts the entire archive to the given location\r\n\t\t *\r\n\t\t * @param targetPath Target location\r\n\t\t * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.\r\n\t\t * Default is FALSE\r\n\t\t */\r\n\t\textractAllTo: function (/**String*/targetPath, /**Boolean*/overwrite, /*String, Buffer*/pass) {\r\n\t\t\toverwrite = overwrite || false;\r\n\t\t\tif (!_zip) {\r\n\t\t\t\tthrow new Error(Utils.Errors.NO_ZIP);\r\n\t\t\t}\r\n\t\t\t_zip.entries.forEach(function (entry) {\r\n\t\t\t\tvar entryName = sanitize(targetPath, canonical(entry.entryName.toString()));\r\n\t\t\t\tif (entry.isDirectory) {\r\n\t\t\t\t\tUtils.makeDir(entryName);\r\n\t\t\t\t\treturn;\r\n\t\t\t\t}\r\n\t\t\t\tvar content = entry.getData(pass);\r\n\t\t\t\tif (!content) {\r\n\t\t\t\t\tthrow new Error(Utils.Errors.CANT_EXTRACT_FILE);\r\n\t\t\t\t}\r\n\t\t\t\t// The reverse operation for attr depend on method addFile()\r\n\t\t\t\tvar fileAttr = entry.attr ? (((entry.attr >>> 0) | 0) >> 16) & 0xfff : 0;\r\n\t\t\t\tUtils.writeFileTo(entryName, content, overwrite, fileAttr);\r\n\t\t\t\ttry {\r\n\t\t\t\t\tfs.utimesSync(entryName, entry.header.time, entry.header.time)\r\n\t\t\t\t} catch (err) {\r\n\t\t\t\t\tthrow new Error(Utils.Errors.CANT_EXTRACT_FILE);\r\n\t\t\t\t}\r\n\t\t\t})\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Asynchronous extractAllTo\r\n\t\t *\r\n\t\t * @param targetPath Target location\r\n\t\t * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.\r\n\t\t * Default is FALSE\r\n\t\t * @param callback\r\n\t\t */\r\n\t\textractAllToAsync: function (/**String*/targetPath, /**Boolean*/overwrite, /**Function*/callback) {\r\n\t\t\tif (!callback) {\r\n\t\t\t\tcallback = function() {}\r\n\t\t\t}\r\n\t\t\toverwrite = overwrite || false;\r\n\t\t\tif (!_zip) {\r\n\t\t\t\tcallback(new Error(Utils.Errors.NO_ZIP));\r\n\t\t\t\treturn;\r\n\t\t\t}\r\n\r\n\t\t\tvar entries = _zip.entries;\r\n\t\t\tvar i = entries.length;\r\n\t\t\tentries.forEach(function (entry) {\r\n\t\t\t\tif (i <= 0) return; // Had an error already\r\n\r\n\t\t\t\tvar entryName = pth.normalize(canonical(entry.entryName.toString()));\r\n\r\n\t\t\t\tif (entry.isDirectory) {\r\n\t\t\t\t\tUtils.makeDir(sanitize(targetPath, entryName));\r\n\t\t\t\t\tif (--i === 0)\r\n\t\t\t\t\t\tcallback(undefined);\r\n\t\t\t\t\treturn;\r\n\t\t\t\t}\r\n\t\t\t\tentry.getDataAsync(function (content, err) {\r\n\t\t\t\t\tif (i <= 0) return;\r\n\t\t\t\t\tif (err) {\r\n\t\t\t\t\t\tcallback(new Error(err));\r\n\t\t\t\t\t\treturn;\r\n\t\t\t\t\t}\r\n\t\t\t\t\tif (!content) {\r\n\t\t\t\t\t\ti = 0;\r\n\t\t\t\t\t\tcallback(new Error(Utils.Errors.CANT_EXTRACT_FILE));\r\n\t\t\t\t\t\treturn;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\t// The reverse operation for attr depend on method addFile()\r\n\t\t\t\t\tvar fileAttr = entry.attr ? (((entry.attr >>> 0) | 0) >> 16) & 0xfff : 0;\r\n\t\t\t\t\tUtils.writeFileToAsync(sanitize(targetPath, entryName), content, overwrite, fileAttr, function (succ) {\r\n\t\t\t\t\t\ttry {\r\n\t\t\t\t\t\t\tfs.utimesSync(pth.resolve(targetPath, entryName), entry.header.time, entry.header.time);\r\n\t\t\t\t\t\t} catch (err) {\r\n\t\t\t\t\t\t\tcallback(new Error('Unable to set utimes'));\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t\tif (i <= 0) return;\r\n\t\t\t\t\t\tif (!succ) {\r\n\t\t\t\t\t\t\ti = 0;\r\n\t\t\t\t\t\t\tcallback(new Error('Unable to write'));\r\n\t\t\t\t\t\t\treturn;\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t\tif (--i === 0)\r\n\t\t\t\t\t\t\tcallback(undefined);\r\n\t\t\t\t\t});\r\n\t\t\t\t});\r\n\t\t\t})\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Writes the newly created zip file to disk at the specified location or if a zip was opened and no ``targetFileName`` is provided, it will overwrite the opened zip\r\n\t\t *\r\n\t\t * @param targetFileName\r\n\t\t * @param callback\r\n\t\t */\r\n\t\twriteZip: function (/**String*/targetFileName, /**Function*/callback) {\r\n\t\t\tif (arguments.length === 1) {\r\n\t\t\t\tif (typeof targetFileName === \"function\") {\r\n\t\t\t\t\tcallback = targetFileName;\r\n\t\t\t\t\ttargetFileName = \"\";\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tif (!targetFileName && _filename) {\r\n\t\t\t\ttargetFileName = _filename;\r\n\t\t\t}\r\n\t\t\tif (!targetFileName) return;\r\n\r\n\t\t\tvar zipData = _zip.compressToBuffer();\r\n\t\t\tif (zipData) {\r\n\t\t\t\tvar ok = Utils.writeFileTo(targetFileName, zipData, true);\r\n\t\t\t\tif (typeof callback === 'function') callback(!ok ? new Error(\"failed\") : null, \"\");\r\n\t\t\t}\r\n\t\t},\r\n\r\n writeZipPromise: function (/**String*/ targetFileName, /* object */ options) {\r\n const { overwrite, perm } = Object.assign({ overwrite: true }, options);\r\n\r\n return new Promise((resolve, reject) => {\r\n // find file name\r\n if (!targetFileName && _filename) targetFileName = _filename;\r\n if (!targetFileName) reject(\"ADM-ZIP: ZIP File Name Missing\");\r\n\r\n this.toBufferPromise().then((zipData) => {\r\n const ret = (done) => (done ? resolve(done) : reject(\"ADM-ZIP: Wasn't able to write zip file\"));\r\n Utils.writeFileToAsync(targetFileName, zipData, overwrite, perm, ret);\r\n }, reject);\r\n });\r\n },\r\n\r\n toBufferPromise: function () {\r\n return new Promise((resolve, reject) => {\r\n _zip.toAsyncBuffer(resolve, reject);\r\n });\r\n },\r\n\r\n\t\t/**\r\n\t\t * Returns the content of the entire zip file as a Buffer object\r\n\t\t *\r\n\t\t * @return Buffer\r\n\t\t */\r\n\t\ttoBuffer: function (/**Function=*/onSuccess, /**Function=*/onFail, /**Function=*/onItemStart, /**Function=*/onItemEnd) {\r\n\t\t\tthis.valueOf = 2;\r\n\t\t\tif (typeof onSuccess === \"function\") {\r\n\t\t\t\t_zip.toAsyncBuffer(onSuccess, onFail, onItemStart, onItemEnd);\r\n\t\t\t\treturn null;\r\n\t\t\t}\r\n\t\t\treturn _zip.compressToBuffer()\r\n\t\t}\r\n\t}\r\n};\r\n","var Utils = require(\"../util\"),\r\n Constants = Utils.Constants;\r\n\r\n/* The central directory file header */\r\nmodule.exports = function () {\r\n var _verMade = 0x14,\r\n _version = 0x0A,\r\n _flags = 0,\r\n _method = 0,\r\n _time = 0,\r\n _crc = 0,\r\n _compressedSize = 0,\r\n _size = 0,\r\n _fnameLen = 0,\r\n _extraLen = 0,\r\n\r\n _comLen = 0,\r\n _diskStart = 0,\r\n _inattr = 0,\r\n _attr = 0,\r\n _offset = 0;\r\n\r\n switch(process.platform){\r\n case 'win32':\r\n _verMade |= 0x0A00;\r\n case 'darwin':\r\n _verMade |= 0x1300;\r\n default:\r\n _verMade |= 0x0300;\r\n }\r\n\r\n var _dataHeader = {};\r\n\r\n function setTime(val) {\r\n val = new Date(val);\r\n _time = (val.getFullYear() - 1980 & 0x7f) << 25 // b09-16 years from 1980\r\n | (val.getMonth() + 1) << 21 // b05-08 month\r\n | val.getDate() << 16 // b00-04 hour\r\n\r\n // 2 bytes time\r\n | val.getHours() << 11 // b11-15 hour\r\n | val.getMinutes() << 5 // b05-10 minute\r\n | val.getSeconds() >> 1; // b00-04 seconds divided by 2\r\n }\r\n\r\n setTime(+new Date());\r\n\r\n return {\r\n get made () { return _verMade; },\r\n set made (val) { _verMade = val; },\r\n\r\n get version () { return _version; },\r\n set version (val) { _version = val },\r\n\r\n get flags () { return _flags },\r\n set flags (val) { _flags = val; },\r\n\r\n get method () { return _method; },\r\n set method (val) {\r\n switch (val){\r\n case Constants.STORED:\r\n this.version = 10;\r\n case Constants.DEFLATED:\r\n default:\r\n this.version = 20;\r\n }\r\n _method = val;\r\n },\r\n\r\n get time () { return new Date(\r\n ((_time >> 25) & 0x7f) + 1980,\r\n ((_time >> 21) & 0x0f) - 1,\r\n (_time >> 16) & 0x1f,\r\n (_time >> 11) & 0x1f,\r\n (_time >> 5) & 0x3f,\r\n (_time & 0x1f) << 1\r\n );\r\n },\r\n set time (val) {\r\n setTime(val);\r\n },\r\n\r\n get crc () { return _crc; },\r\n set crc (val) { _crc = val; },\r\n\r\n get compressedSize () { return _compressedSize; },\r\n set compressedSize (val) { _compressedSize = val; },\r\n\r\n get size () { return _size; },\r\n set size (val) { _size = val; },\r\n\r\n get fileNameLength () { return _fnameLen; },\r\n set fileNameLength (val) { _fnameLen = val; },\r\n\r\n get extraLength () { return _extraLen },\r\n set extraLength (val) { _extraLen = val; },\r\n\r\n get commentLength () { return _comLen },\r\n set commentLength (val) { _comLen = val },\r\n\r\n get diskNumStart () { return _diskStart },\r\n set diskNumStart (val) { _diskStart = val },\r\n\r\n get inAttr () { return _inattr },\r\n set inAttr (val) { _inattr = val },\r\n\r\n get attr () { return _attr },\r\n set attr (val) { _attr = val },\r\n\r\n get offset () { return _offset },\r\n set offset (val) { _offset = val },\r\n\r\n get encripted () { return (_flags & 1) === 1 },\r\n\r\n get entryHeaderSize () {\r\n return Constants.CENHDR + _fnameLen + _extraLen + _comLen;\r\n },\r\n\r\n get realDataOffset () {\r\n return _offset + Constants.LOCHDR + _dataHeader.fnameLen + _dataHeader.extraLen;\r\n },\r\n\r\n get dataHeader () {\r\n return _dataHeader;\r\n },\r\n\r\n loadDataHeaderFromBinary : function(/*Buffer*/input) {\r\n var data = input.slice(_offset, _offset + Constants.LOCHDR);\r\n // 30 bytes and should start with \"PK\\003\\004\"\r\n if (data.readUInt32LE(0) !== Constants.LOCSIG) {\r\n throw new Error(Utils.Errors.INVALID_LOC);\r\n }\r\n _dataHeader = {\r\n // version needed to extract\r\n version : data.readUInt16LE(Constants.LOCVER),\r\n // general purpose bit flag\r\n flags : data.readUInt16LE(Constants.LOCFLG),\r\n // compression method\r\n method : data.readUInt16LE(Constants.LOCHOW),\r\n // modification time (2 bytes time, 2 bytes date)\r\n time : data.readUInt32LE(Constants.LOCTIM),\r\n // uncompressed file crc-32 value\r\n crc : data.readUInt32LE(Constants.LOCCRC),\r\n // compressed size\r\n compressedSize : data.readUInt32LE(Constants.LOCSIZ),\r\n // uncompressed size\r\n size : data.readUInt32LE(Constants.LOCLEN),\r\n // filename length\r\n fnameLen : data.readUInt16LE(Constants.LOCNAM),\r\n // extra field length\r\n extraLen : data.readUInt16LE(Constants.LOCEXT)\r\n }\r\n },\r\n\r\n loadFromBinary : function(/*Buffer*/data) {\r\n // data should be 46 bytes and start with \"PK 01 02\"\r\n if (data.length !== Constants.CENHDR || data.readUInt32LE(0) !== Constants.CENSIG) {\r\n throw new Error(Utils.Errors.INVALID_CEN);\r\n }\r\n // version made by\r\n _verMade = data.readUInt16LE(Constants.CENVEM);\r\n // version needed to extract\r\n _version = data.readUInt16LE(Constants.CENVER);\r\n // encrypt, decrypt flags\r\n _flags = data.readUInt16LE(Constants.CENFLG);\r\n // compression method\r\n _method = data.readUInt16LE(Constants.CENHOW);\r\n // modification time (2 bytes time, 2 bytes date)\r\n _time = data.readUInt32LE(Constants.CENTIM);\r\n // uncompressed file crc-32 value\r\n _crc = data.readUInt32LE(Constants.CENCRC);\r\n // compressed size\r\n _compressedSize = data.readUInt32LE(Constants.CENSIZ);\r\n // uncompressed size\r\n _size = data.readUInt32LE(Constants.CENLEN);\r\n // filename length\r\n _fnameLen = data.readUInt16LE(Constants.CENNAM);\r\n // extra field length\r\n _extraLen = data.readUInt16LE(Constants.CENEXT);\r\n // file comment length\r\n _comLen = data.readUInt16LE(Constants.CENCOM);\r\n // volume number start\r\n _diskStart = data.readUInt16LE(Constants.CENDSK);\r\n // internal file attributes\r\n _inattr = data.readUInt16LE(Constants.CENATT);\r\n // external file attributes\r\n _attr = data.readUInt32LE(Constants.CENATX);\r\n // LOC header offset\r\n _offset = data.readUInt32LE(Constants.CENOFF);\r\n },\r\n\r\n dataHeaderToBinary : function() {\r\n // LOC header size (30 bytes)\r\n var data = Buffer.alloc(Constants.LOCHDR);\r\n // \"PK\\003\\004\"\r\n data.writeUInt32LE(Constants.LOCSIG, 0);\r\n // version needed to extract\r\n data.writeUInt16LE(_version, Constants.LOCVER);\r\n // general purpose bit flag\r\n data.writeUInt16LE(_flags, Constants.LOCFLG);\r\n // compression method\r\n data.writeUInt16LE(_method, Constants.LOCHOW);\r\n // modification time (2 bytes time, 2 bytes date)\r\n data.writeUInt32LE(_time, Constants.LOCTIM);\r\n // uncompressed file crc-32 value\r\n data.writeUInt32LE(_crc, Constants.LOCCRC);\r\n // compressed size\r\n data.writeUInt32LE(_compressedSize, Constants.LOCSIZ);\r\n // uncompressed size\r\n data.writeUInt32LE(_size, Constants.LOCLEN);\r\n // filename length\r\n data.writeUInt16LE(_fnameLen, Constants.LOCNAM);\r\n // extra field length\r\n data.writeUInt16LE(_extraLen, Constants.LOCEXT);\r\n return data;\r\n },\r\n\r\n entryHeaderToBinary : function() {\r\n // CEN header size (46 bytes)\r\n var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen);\r\n // \"PK\\001\\002\"\r\n data.writeUInt32LE(Constants.CENSIG, 0);\r\n // version made by\r\n data.writeUInt16LE(_verMade, Constants.CENVEM);\r\n // version needed to extract\r\n data.writeUInt16LE(_version, Constants.CENVER);\r\n // encrypt, decrypt flags\r\n data.writeUInt16LE(_flags, Constants.CENFLG);\r\n // compression method\r\n data.writeUInt16LE(_method, Constants.CENHOW);\r\n // modification time (2 bytes time, 2 bytes date)\r\n data.writeUInt32LE(_time, Constants.CENTIM);\r\n // uncompressed file crc-32 value\r\n data.writeUInt32LE(_crc, Constants.CENCRC);\r\n // compressed size\r\n data.writeUInt32LE(_compressedSize, Constants.CENSIZ);\r\n // uncompressed size\r\n data.writeUInt32LE(_size, Constants.CENLEN);\r\n // filename length\r\n data.writeUInt16LE(_fnameLen, Constants.CENNAM);\r\n // extra field length\r\n data.writeUInt16LE(_extraLen, Constants.CENEXT);\r\n // file comment length\r\n data.writeUInt16LE(_comLen, Constants.CENCOM);\r\n // volume number start\r\n data.writeUInt16LE(_diskStart, Constants.CENDSK);\r\n // internal file attributes\r\n data.writeUInt16LE(_inattr, Constants.CENATT);\r\n // external file attributes\r\n data.writeUInt32LE(_attr, Constants.CENATX);\r\n // LOC header offset\r\n data.writeUInt32LE(_offset, Constants.CENOFF);\r\n // fill all with\r\n data.fill(0x00, Constants.CENHDR);\r\n return data;\r\n },\r\n\r\n toString : function() {\r\n return '{\\n' +\r\n '\\t\"made\" : ' + _verMade + \",\\n\" +\r\n '\\t\"version\" : ' + _version + \",\\n\" +\r\n '\\t\"flags\" : ' + _flags + \",\\n\" +\r\n '\\t\"method\" : ' + Utils.methodToString(_method) + \",\\n\" +\r\n '\\t\"time\" : ' + this.time + \",\\n\" +\r\n '\\t\"crc\" : 0x' + _crc.toString(16).toUpperCase() + \",\\n\" +\r\n '\\t\"compressedSize\" : ' + _compressedSize + \" bytes,\\n\" +\r\n '\\t\"size\" : ' + _size + \" bytes,\\n\" +\r\n '\\t\"fileNameLength\" : ' + _fnameLen + \",\\n\" +\r\n '\\t\"extraLength\" : ' + _extraLen + \" bytes,\\n\" +\r\n '\\t\"commentLength\" : ' + _comLen + \" bytes,\\n\" +\r\n '\\t\"diskNumStart\" : ' + _diskStart + \",\\n\" +\r\n '\\t\"inAttr\" : ' + _inattr + \",\\n\" +\r\n '\\t\"attr\" : ' + _attr + \",\\n\" +\r\n '\\t\"offset\" : ' + _offset + \",\\n\" +\r\n '\\t\"entryHeaderSize\" : ' + (Constants.CENHDR + _fnameLen + _extraLen + _comLen) + \" bytes\\n\" +\r\n '}';\r\n }\r\n }\r\n};\r\n","exports.EntryHeader = require(\"./entryHeader\");\r\nexports.MainHeader = require(\"./mainHeader\");\r\n","var Utils = require(\"../util\"),\r\n Constants = Utils.Constants;\r\n\r\n/* The entries in the end of central directory */\r\nmodule.exports = function () {\r\n var _volumeEntries = 0,\r\n _totalEntries = 0,\r\n _size = 0,\r\n _offset = 0,\r\n _commentLength = 0;\r\n\r\n return {\r\n get diskEntries () { return _volumeEntries },\r\n set diskEntries (/*Number*/val) { _volumeEntries = _totalEntries = val; },\r\n\r\n get totalEntries () { return _totalEntries },\r\n set totalEntries (/*Number*/val) { _totalEntries = _volumeEntries = val; },\r\n\r\n get size () { return _size },\r\n set size (/*Number*/val) { _size = val; },\r\n\r\n get offset () { return _offset },\r\n set offset (/*Number*/val) { _offset = val; },\r\n\r\n get commentLength () { return _commentLength },\r\n set commentLength (/*Number*/val) { _commentLength = val; },\r\n\r\n get mainHeaderSize () {\r\n return Constants.ENDHDR + _commentLength;\r\n },\r\n\r\n loadFromBinary : function(/*Buffer*/data) {\r\n // data should be 22 bytes and start with \"PK 05 06\"\r\n // or be 56+ bytes and start with \"PK 06 06\" for Zip64\r\n if ((data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) &&\r\n (data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG)) {\r\n\r\n throw new Error(Utils.Errors.INVALID_END);\r\n }\r\n\r\n if (data.readUInt32LE(0) === Constants.ENDSIG) {\r\n // number of entries on this volume\r\n _volumeEntries = data.readUInt16LE(Constants.ENDSUB);\r\n // total number of entries\r\n _totalEntries = data.readUInt16LE(Constants.ENDTOT);\r\n // central directory size in bytes\r\n _size = data.readUInt32LE(Constants.ENDSIZ);\r\n // offset of first CEN header\r\n _offset = data.readUInt32LE(Constants.ENDOFF);\r\n // zip file comment length\r\n _commentLength = data.readUInt16LE(Constants.ENDCOM);\r\n } else {\r\n // number of entries on this volume\r\n _volumeEntries = Utils.readBigUInt64LE(data, Constants.ZIP64SUB);\r\n // total number of entries\r\n _totalEntries = Utils.readBigUInt64LE(data, Constants.ZIP64TOT);\r\n // central directory size in bytes\r\n _size = Utils.readBigUInt64LE(data, Constants.ZIP64SIZ);\r\n // offset of first CEN header\r\n _offset = Utils.readBigUInt64LE(data, Constants.ZIP64OFF);\r\n\r\n _commentLength = 0;\r\n }\r\n\r\n },\r\n\r\n toBinary : function() {\r\n var b = Buffer.alloc(Constants.ENDHDR + _commentLength);\r\n // \"PK 05 06\" signature\r\n b.writeUInt32LE(Constants.ENDSIG, 0);\r\n b.writeUInt32LE(0, 4);\r\n // number of entries on this volume\r\n b.writeUInt16LE(_volumeEntries, Constants.ENDSUB);\r\n // total number of entries\r\n b.writeUInt16LE(_totalEntries, Constants.ENDTOT);\r\n // central directory size in bytes\r\n b.writeUInt32LE(_size, Constants.ENDSIZ);\r\n // offset of first CEN header\r\n b.writeUInt32LE(_offset, Constants.ENDOFF);\r\n // zip file comment length\r\n b.writeUInt16LE(_commentLength, Constants.ENDCOM);\r\n // fill comment memory with spaces so no garbage is left there\r\n b.fill(\" \", Constants.ENDHDR);\r\n\r\n return b;\r\n },\r\n\r\n toString : function() {\r\n return '{\\n' +\r\n '\\t\"diskEntries\" : ' + _volumeEntries + \",\\n\" +\r\n '\\t\"totalEntries\" : ' + _totalEntries + \",\\n\" +\r\n '\\t\"size\" : ' + _size + \" bytes,\\n\" +\r\n '\\t\"offset\" : 0x' + _offset.toString(16).toUpperCase() + \",\\n\" +\r\n '\\t\"commentLength\" : 0x' + _commentLength + \"\\n\" +\r\n '}';\r\n }\r\n }\r\n};","module.exports = function (/*Buffer*/inbuf) {\r\n\r\n var zlib = require(\"zlib\");\r\n \r\n var opts = {chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024};\r\n \r\n return {\r\n deflate: function () {\r\n return zlib.deflateRawSync(inbuf, opts);\r\n },\r\n\r\n deflateAsync: function (/*Function*/callback) {\r\n var tmp = zlib.createDeflateRaw(opts), parts = [], total = 0;\r\n tmp.on('data', function (data) {\r\n parts.push(data);\r\n total += data.length;\r\n });\r\n tmp.on('end', function () {\r\n var buf = Buffer.alloc(total), written = 0;\r\n buf.fill(0);\r\n for (var i = 0; i < parts.length; i++) {\r\n var part = parts[i];\r\n part.copy(buf, written);\r\n written += part.length;\r\n }\r\n callback && callback(buf);\r\n });\r\n tmp.end(inbuf);\r\n }\r\n }\r\n};\r\n","exports.Deflater = require(\"./deflater\");\r\nexports.Inflater = require(\"./inflater\");\r\nexports.ZipCrypto = require(\"./zipcrypto\");","module.exports = function (/*Buffer*/inbuf) {\r\n\r\n var zlib = require(\"zlib\");\r\n\r\n return {\r\n inflate: function () {\r\n return zlib.inflateRawSync(inbuf);\r\n },\r\n\r\n inflateAsync: function (/*Function*/callback) {\r\n var tmp = zlib.createInflateRaw(), parts = [], total = 0;\r\n tmp.on('data', function (data) {\r\n parts.push(data);\r\n total += data.length;\r\n });\r\n tmp.on('end', function () {\r\n var buf = Buffer.alloc(total), written = 0;\r\n buf.fill(0);\r\n for (var i = 0; i < parts.length; i++) {\r\n var part = parts[i];\r\n part.copy(buf, written);\r\n written += part.length;\r\n }\r\n callback && callback(buf);\r\n });\r\n tmp.end(inbuf);\r\n }\r\n }\r\n};\r\n","// generate CRC32 lookup table\r\nconst crctable = (new Uint32Array(256)).map((t,crc)=>{\r\n for(let j=0;j<8;j++){\r\n if (0 !== (crc & 1)){\r\n crc = (crc >>> 1) ^ 0xEDB88320\r\n }else{\r\n crc >>>= 1 \r\n }\r\n }\r\n return crc>>>0;\r\n});\r\n\r\nfunction make_decrypter(/*Buffer*/pwd){\r\n // C-style uInt32 Multiply\r\n const uMul = (a,b) => Math.imul(a, b) >>> 0;\r\n // Initialize keys with default values\r\n const keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]);\r\n // crc32 byte update \r\n const crc32update = (pCrc32, bval) => {\r\n return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8);\r\n }\r\n // update keys with byteValues\r\n const updateKeys = (byteValue) => {\r\n keys[0] = crc32update(keys[0], byteValue);\r\n keys[1] += keys[0] & 0xff; \r\n keys[1] = uMul(keys[1], 134775813) + 1;\r\n keys[2] = crc32update(keys[2], keys[1] >>> 24);\r\n }\r\n\r\n // 1. Stage initialize key\r\n const pass = (Buffer.isBuffer(pwd)) ? pwd : Buffer.from(pwd);\r\n for(let i=0; i< pass.length; i++){\r\n updateKeys(pass[i]);\r\n }\r\n\r\n // return decrypter function\r\n return function (/*Buffer*/data){\r\n if (!Buffer.isBuffer(data)){\r\n throw 'decrypter needs Buffer'\r\n }\r\n // result - we create new Buffer for results\r\n const result = Buffer.alloc(data.length);\r\n let pos = 0;\r\n // process input data\r\n for(let c of data){\r\n const k = (keys[2] | 2) >>> 0; // key\r\n c ^= (uMul(k, k^1) >> 8) & 0xff; // decode\r\n result[pos++] = c; // Save Value\r\n updateKeys(c); // update keys with decoded byte\r\n }\r\n return result;\r\n }\r\n}\r\n\r\nfunction decrypt(/*Buffer*/ data, /*Object*/header, /*String, Buffer*/ pwd){\r\n if (!data || !Buffer.isBuffer(data) || data.length < 12) {\r\n return Buffer.alloc(0);\r\n }\r\n \r\n // We Initialize and generate decrypting function\r\n const decrypter = make_decrypter(pwd);\r\n\r\n // check - for testing password\r\n const check = header.crc >>> 24;\r\n // decrypt salt what is always 12 bytes and is a part of file content\r\n const testbyte = decrypter(data.slice(0, 12))[11];\r\n\r\n // does password meet expectations\r\n if (check !== testbyte){\r\n throw 'ADM-ZIP: Wrong Password';\r\n }\r\n\r\n // decode content\r\n return decrypter(data.slice(12));\r\n}\r\n\r\nmodule.exports = {decrypt};\r\n","module.exports = {\r\n /* The local file header */\r\n LOCHDR : 30, // LOC header size\r\n LOCSIG : 0x04034b50, // \"PK\\003\\004\"\r\n LOCVER : 4,\t// version needed to extract\r\n LOCFLG : 6, // general purpose bit flag\r\n LOCHOW : 8, // compression method\r\n LOCTIM : 10, // modification time (2 bytes time, 2 bytes date)\r\n LOCCRC : 14, // uncompressed file crc-32 value\r\n LOCSIZ : 18, // compressed size\r\n LOCLEN : 22, // uncompressed size\r\n LOCNAM : 26, // filename length\r\n LOCEXT : 28, // extra field length\r\n\r\n /* The Data descriptor */\r\n EXTSIG : 0x08074b50, // \"PK\\007\\008\"\r\n EXTHDR : 16, // EXT header size\r\n EXTCRC : 4, // uncompressed file crc-32 value\r\n EXTSIZ : 8, // compressed size\r\n EXTLEN : 12, // uncompressed size\r\n\r\n /* The central directory file header */\r\n CENHDR : 46, // CEN header size\r\n CENSIG : 0x02014b50, // \"PK\\001\\002\"\r\n CENVEM : 4, // version made by\r\n CENVER : 6, // version needed to extract\r\n CENFLG : 8, // encrypt, decrypt flags\r\n CENHOW : 10, // compression method\r\n CENTIM : 12, // modification time (2 bytes time, 2 bytes date)\r\n CENCRC : 16, // uncompressed file crc-32 value\r\n CENSIZ : 20, // compressed size\r\n CENLEN : 24, // uncompressed size\r\n CENNAM : 28, // filename length\r\n CENEXT : 30, // extra field length\r\n CENCOM : 32, // file comment length\r\n CENDSK : 34, // volume number start\r\n CENATT : 36, // internal file attributes\r\n CENATX : 38, // external file attributes (host system dependent)\r\n CENOFF : 42, // LOC header offset\r\n\r\n /* The entries in the end of central directory */\r\n ENDHDR : 22, // END header size\r\n ENDSIG : 0x06054b50, // \"PK\\005\\006\"\r\n ENDSUB : 8, // number of entries on this disk\r\n ENDTOT : 10, // total number of entries\r\n ENDSIZ : 12, // central directory size in bytes\r\n ENDOFF : 16, // offset of first CEN header\r\n ENDCOM : 20, // zip file comment length\r\n\r\n END64HDR : 20, // zip64 END header size\r\n END64SIG : 0x07064b50, // zip64 Locator signature, \"PK\\006\\007\"\r\n END64START : 4, // number of the disk with the start of the zip64\r\n END64OFF : 8, // relative offset of the zip64 end of central directory\r\n END64NUMDISKS : 16, // total number of disks\r\n\r\n ZIP64SIG : 0x06064b50, // zip64 signature, \"PK\\006\\006\"\r\n ZIP64HDR : 56, // zip64 record minimum size\r\n ZIP64LEAD : 12, // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE\r\n ZIP64SIZE : 4, // zip64 size of the central directory record\r\n ZIP64VEM : 12, // zip64 version made by\r\n ZIP64VER : 14, // zip64 version needed to extract\r\n ZIP64DSK : 16, // zip64 number of this disk\r\n ZIP64DSKDIR : 20, // number of the disk with the start of the record directory\r\n ZIP64SUB : 24, // number of entries on this disk\r\n ZIP64TOT : 32, // total number of entries\r\n ZIP64SIZB : 40, // zip64 central directory size in bytes\r\n ZIP64OFF : 48, // offset of start of central directory with respect to the starting disk number\r\n ZIP64EXTRA : 56, // extensible data sector\r\n\r\n /* Compression methods */\r\n STORED : 0, // no compression\r\n SHRUNK : 1, // shrunk\r\n REDUCED1 : 2, // reduced with compression factor 1\r\n REDUCED2 : 3, // reduced with compression factor 2\r\n REDUCED3 : 4, // reduced with compression factor 3\r\n REDUCED4 : 5, // reduced with compression factor 4\r\n IMPLODED : 6, // imploded\r\n // 7 reserved\r\n DEFLATED : 8, // deflated\r\n ENHANCED_DEFLATED: 9, // enhanced deflated\r\n PKWARE : 10,// PKWare DCL imploded\r\n // 11 reserved\r\n BZIP2 : 12, // compressed using BZIP2\r\n // 13 reserved\r\n LZMA : 14, // LZMA\r\n // 15-17 reserved\r\n IBM_TERSE : 18, // compressed using IBM TERSE\r\n IBM_LZ77 : 19, //IBM LZ77 z\r\n\r\n /* General purpose bit flag */\r\n FLG_ENC : 0, // encripted file\r\n FLG_COMP1 : 1, // compression option\r\n FLG_COMP2 : 2, // compression option\r\n FLG_DESC : 4, // data descriptor\r\n FLG_ENH : 8, // enhanced deflation\r\n FLG_STR : 16, // strong encryption\r\n FLG_LNG : 1024, // language encoding\r\n FLG_MSK : 4096, // mask header values\r\n\r\n /* Load type */\r\n FILE : 0,\r\n BUFFER : 1,\r\n NONE : 2,\r\n\r\n /* 4.5 Extensible data fields */\r\n EF_ID : 0,\r\n EF_SIZE : 2,\r\n\r\n /* Header IDs */\r\n ID_ZIP64 : 0x0001,\r\n ID_AVINFO : 0x0007,\r\n ID_PFS : 0x0008,\r\n ID_OS2 : 0x0009,\r\n ID_NTFS : 0x000a,\r\n ID_OPENVMS : 0x000c,\r\n ID_UNIX : 0x000d,\r\n ID_FORK : 0x000e,\r\n ID_PATCH : 0x000f,\r\n ID_X509_PKCS7 : 0x0014,\r\n ID_X509_CERTID_F : 0x0015,\r\n ID_X509_CERTID_C : 0x0016,\r\n ID_STRONGENC : 0x0017,\r\n ID_RECORD_MGT : 0x0018,\r\n ID_X509_PKCS7_RL : 0x0019,\r\n ID_IBM1 : 0x0065,\r\n ID_IBM2 : 0x0066,\r\n ID_POSZIP : 0x4690,\r\n\r\n EF_ZIP64_OR_32 : 0xffffffff,\r\n EF_ZIP64_OR_16 : 0xffff,\r\n EF_ZIP64_SUNCOMP : 0,\r\n EF_ZIP64_SCOMP : 8,\r\n EF_ZIP64_RHO : 16,\r\n EF_ZIP64_DSN : 24\r\n};\r\n","module.exports = {\r\n /* Header error messages */\r\n \"INVALID_LOC\" : \"Invalid LOC header (bad signature)\",\r\n \"INVALID_CEN\" : \"Invalid CEN header (bad signature)\",\r\n \"INVALID_END\" : \"Invalid END header (bad signature)\",\r\n\r\n /* ZipEntry error messages*/\r\n \"NO_DATA\" : \"Nothing to decompress\",\r\n \"BAD_CRC\" : \"CRC32 checksum failed\",\r\n \"FILE_IN_THE_WAY\" : \"There is a file in the way: %s\",\r\n \"UNKNOWN_METHOD\" : \"Invalid/unsupported compression method\",\r\n\r\n /* Inflater error messages */\r\n \"AVAIL_DATA\" : \"inflate::Available inflate data did not terminate\",\r\n \"INVALID_DISTANCE\" : \"inflate::Invalid literal/length or distance code in fixed or dynamic block\",\r\n \"TO_MANY_CODES\" : \"inflate::Dynamic block code description: too many length or distance codes\",\r\n \"INVALID_REPEAT_LEN\" : \"inflate::Dynamic block code description: repeat more than specified lengths\",\r\n \"INVALID_REPEAT_FIRST\" : \"inflate::Dynamic block code description: repeat lengths with no first length\",\r\n \"INCOMPLETE_CODES\" : \"inflate::Dynamic block code description: code lengths codes incomplete\",\r\n \"INVALID_DYN_DISTANCE\": \"inflate::Dynamic block code description: invalid distance code lengths\",\r\n \"INVALID_CODES_LEN\": \"inflate::Dynamic block code description: invalid literal/length code lengths\",\r\n \"INVALID_STORE_BLOCK\" : \"inflate::Stored block length did not match one's complement\",\r\n \"INVALID_BLOCK_TYPE\" : \"inflate::Invalid block type (type == 3)\",\r\n\r\n /* ADM-ZIP error messages */\r\n \"CANT_EXTRACT_FILE\" : \"Could not extract the file\",\r\n \"CANT_OVERRIDE\" : \"Target file already exists\",\r\n \"NO_ZIP\" : \"No zip file was loaded\",\r\n \"NO_ENTRY\" : \"Entry doesn't exist\",\r\n \"DIRECTORY_CONTENT_ERROR\" : \"A directory cannot have content\",\r\n \"FILE_NOT_FOUND\" : \"File not found: %s\",\r\n \"NOT_IMPLEMENTED\" : \"Not implemented\",\r\n \"INVALID_FILENAME\" : \"Invalid filename\",\r\n \"INVALID_FORMAT\" : \"Invalid or unsupported zip format. No END header found\"\r\n};","var fs = require(\"./fileSystem\").require(),\r\n pth = require(\"path\");\r\n\t\r\nfs.existsSync = fs.existsSync || pth.existsSync;\r\n\r\nmodule.exports = function(/*String*/path) {\r\n\r\n var _path = path || \"\",\r\n _permissions = 0,\r\n _obj = newAttr(),\r\n _stat = null;\r\n\r\n function newAttr() {\r\n return {\r\n directory : false,\r\n readonly : false,\r\n hidden : false,\r\n executable : false,\r\n mtime : 0,\r\n atime : 0\r\n }\r\n }\r\n\r\n if (_path && fs.existsSync(_path)) {\r\n _stat = fs.statSync(_path);\r\n _obj.directory = _stat.isDirectory();\r\n _obj.mtime = _stat.mtime;\r\n _obj.atime = _stat.atime;\r\n _obj.executable = (0o111 & _stat.mode) != 0; // file is executable who ever har right not just owner\r\n _obj.readonly = (0o200 & _stat.mode) == 0; // readonly if owner has no write right\r\n _obj.hidden = pth.basename(_path)[0] === \".\";\r\n } else {\r\n console.warn(\"Invalid path: \" + _path)\r\n }\r\n\r\n return {\r\n\r\n get directory () {\r\n return _obj.directory;\r\n },\r\n\r\n get readOnly () {\r\n return _obj.readonly;\r\n },\r\n\r\n get hidden () {\r\n return _obj.hidden;\r\n },\r\n\r\n get mtime () {\r\n return _obj.mtime;\r\n },\r\n\r\n get atime () {\r\n return _obj.atime;\r\n },\r\n\r\n\r\n get executable () {\r\n return _obj.executable;\r\n },\r\n\r\n decodeAttributes : function(val) {\r\n\r\n },\r\n\r\n encodeAttributes : function (val) {\r\n\r\n },\r\n\r\n toString : function() {\r\n return '{\\n' +\r\n '\\t\"path\" : \"' + _path + \",\\n\" +\r\n '\\t\"isDirectory\" : ' + _obj.directory + \",\\n\" +\r\n '\\t\"isReadOnly\" : ' + _obj.readonly + \",\\n\" +\r\n '\\t\"isHidden\" : ' + _obj.hidden + \",\\n\" +\r\n '\\t\"isExecutable\" : ' + _obj.executable + \",\\n\" +\r\n '\\t\"mTime\" : ' + _obj.mtime + \"\\n\" +\r\n '\\t\"aTime\" : ' + _obj.atime + \"\\n\" +\r\n '}';\r\n }\r\n }\r\n\r\n};\r\n","exports.require = function() {\r\n var fs = require(\"fs\");\r\n if (process && process.versions && process.versions['electron']) {\r\n\t try {\r\n\t originalFs = require(\"original-fs\");\r\n\t if (Object.keys(originalFs).length > 0) {\r\n\t fs = originalFs;\r\n }\r\n\t } catch (e) {}\r\n }\r\n return fs\r\n};\r\n","module.exports = require(\"./utils\");\r\nmodule.exports.FileSystem = require(\"./fileSystem\");\r\nmodule.exports.Constants = require(\"./constants\");\r\nmodule.exports.Errors = require(\"./errors\");\r\nmodule.exports.FileAttr = require(\"./fattr\");","var fs = require(\"./fileSystem\").require(),\r\n pth = require('path');\r\n\r\nfs.existsSync = fs.existsSync || pth.existsSync;\r\n\r\nmodule.exports = (function() {\r\n\r\n var crcTable = [],\r\n Constants = require('./constants'),\r\n Errors = require('./errors'),\r\n\r\n PATH_SEPARATOR = pth.sep;\r\n\r\n\r\n function mkdirSync(/*String*/path) {\r\n var resolvedPath = path.split(PATH_SEPARATOR)[0];\r\n path.split(PATH_SEPARATOR).forEach(function(name) {\r\n if (!name || name.substr(-1,1) === \":\") return;\r\n resolvedPath += PATH_SEPARATOR + name;\r\n var stat;\r\n try {\r\n stat = fs.statSync(resolvedPath);\r\n } catch (e) {\r\n fs.mkdirSync(resolvedPath);\r\n }\r\n if (stat && stat.isFile())\r\n throw Errors.FILE_IN_THE_WAY.replace(\"%s\", resolvedPath);\r\n });\r\n }\r\n\r\n function findSync(/*String*/dir, /*RegExp*/pattern, /*Boolean*/recoursive) {\r\n if (typeof pattern === 'boolean') {\r\n recoursive = pattern;\r\n pattern = undefined;\r\n }\r\n var files = [];\r\n fs.readdirSync(dir).forEach(function(file) {\r\n var path = pth.join(dir, file);\r\n\r\n if (fs.statSync(path).isDirectory() && recoursive)\r\n files = files.concat(findSync(path, pattern, recoursive));\r\n\r\n if (!pattern || pattern.test(path)) {\r\n files.push(pth.normalize(path) + (fs.statSync(path).isDirectory() ? PATH_SEPARATOR : \"\"));\r\n }\r\n\r\n });\r\n return files;\r\n }\r\n\r\n function readBigUInt64LE(/*Buffer*/buffer, /*int*/index) {\r\n var slice = Buffer.from(buffer.slice(index, index + 8));\r\n slice.swap64();\r\n\r\n return parseInt(`0x${ slice.toString('hex') }`);\r\n }\r\n\r\n return {\r\n makeDir : function(/*String*/path) {\r\n mkdirSync(path);\r\n },\r\n\r\n crc32 : function(buf) {\r\n if (typeof buf === 'string') {\r\n buf = Buffer.alloc(buf.length, buf);\r\n }\r\n var b = Buffer.alloc(4);\r\n if (!crcTable.length) {\r\n for (var n = 0; n < 256; n++) {\r\n var c = n;\r\n for (var k = 8; --k >= 0;) //\r\n if ((c & 1) !== 0) { c = 0xedb88320 ^ (c >>> 1); } else { c = c >>> 1; }\r\n if (c < 0) {\r\n b.writeInt32LE(c, 0);\r\n c = b.readUInt32LE(0);\r\n }\r\n crcTable[n] = c;\r\n }\r\n }\r\n var crc = 0, off = 0, len = buf.length, c1 = ~crc;\r\n while(--len >= 0) c1 = crcTable[(c1 ^ buf[off++]) & 0xff] ^ (c1 >>> 8);\r\n crc = ~c1;\r\n b.writeInt32LE(crc & 0xffffffff, 0);\r\n return b.readUInt32LE(0);\r\n },\r\n\r\n methodToString : function(/*Number*/method) {\r\n switch (method) {\r\n case Constants.STORED:\r\n return 'STORED (' + method + ')';\r\n case Constants.DEFLATED:\r\n return 'DEFLATED (' + method + ')';\r\n default:\r\n return 'UNSUPPORTED (' + method + ')';\r\n }\r\n\r\n },\r\n\r\n writeFileTo : function(/*String*/path, /*Buffer*/content, /*Boolean*/overwrite, /*Number*/attr) {\r\n if (fs.existsSync(path)) {\r\n if (!overwrite)\r\n return false; // cannot overwrite\r\n\r\n var stat = fs.statSync(path);\r\n if (stat.isDirectory()) {\r\n return false;\r\n }\r\n }\r\n var folder = pth.dirname(path);\r\n if (!fs.existsSync(folder)) {\r\n mkdirSync(folder);\r\n }\r\n\r\n var fd;\r\n try {\r\n fd = fs.openSync(path, 'w', 438); // 0666\r\n } catch(e) {\r\n fs.chmodSync(path, 438);\r\n fd = fs.openSync(path, 'w', 438);\r\n }\r\n if (fd) {\r\n try {\r\n fs.writeSync(fd, content, 0, content.length, 0);\r\n }\r\n catch (e){\r\n throw e;\r\n }\r\n finally {\r\n fs.closeSync(fd);\r\n }\r\n }\r\n fs.chmodSync(path, attr || 438);\r\n return true;\r\n },\r\n\r\n writeFileToAsync : function(/*String*/path, /*Buffer*/content, /*Boolean*/overwrite, /*Number*/attr, /*Function*/callback) {\r\n if(typeof attr === 'function') {\r\n callback = attr;\r\n attr = undefined;\r\n }\r\n\r\n fs.exists(path, function(exists) {\r\n if(exists && !overwrite)\r\n return callback(false);\r\n\r\n fs.stat(path, function(err, stat) {\r\n if(exists &&stat.isDirectory()) {\r\n return callback(false);\r\n }\r\n\r\n var folder = pth.dirname(path);\r\n fs.exists(folder, function(exists) {\r\n if(!exists)\r\n mkdirSync(folder);\r\n\r\n fs.open(path, 'w', 438, function(err, fd) {\r\n if(err) {\r\n fs.chmod(path, 438, function() {\r\n fs.open(path, 'w', 438, function(err, fd) {\r\n fs.write(fd, content, 0, content.length, 0, function() {\r\n fs.close(fd, function() {\r\n fs.chmod(path, attr || 438, function() {\r\n callback(true);\r\n })\r\n });\r\n });\r\n });\r\n })\r\n } else {\r\n if(fd) {\r\n fs.write(fd, content, 0, content.length, 0, function() {\r\n fs.close(fd, function() {\r\n fs.chmod(path, attr || 438, function() {\r\n callback(true);\r\n })\r\n });\r\n });\r\n } else {\r\n fs.chmod(path, attr || 438, function() {\r\n callback(true);\r\n })\r\n }\r\n }\r\n });\r\n })\r\n })\r\n })\r\n },\r\n\r\n findFiles : function(/*String*/path) {\r\n return findSync(path, true);\r\n },\r\n\r\n getAttributes : function(/*String*/path) {\r\n\r\n },\r\n\r\n setAttributes : function(/*String*/path) {\r\n\r\n },\r\n\r\n toBuffer : function(input) {\r\n if (Buffer.isBuffer(input)) {\r\n return input;\r\n } else {\r\n if (input.length === 0) {\r\n return Buffer.alloc(0)\r\n }\r\n return Buffer.from(input, 'utf8');\r\n }\r\n },\r\n\r\n readBigUInt64LE,\r\n\r\n Constants : Constants,\r\n Errors : Errors\r\n }\r\n})();\r\n","var Utils = require(\"./util\"),\r\n Headers = require(\"./headers\"),\r\n Constants = Utils.Constants,\r\n Methods = require(\"./methods\");\r\n\r\nmodule.exports = function (/*Buffer*/input) {\r\n\r\n var _entryHeader = new Headers.EntryHeader(),\r\n _entryName = Buffer.alloc(0),\r\n _comment = Buffer.alloc(0),\r\n _isDirectory = false,\r\n uncompressedData = null,\r\n _extra = Buffer.alloc(0);\r\n\r\n function getCompressedDataFromZip() {\r\n if (!input || !Buffer.isBuffer(input)) {\r\n return Buffer.alloc(0);\r\n }\r\n _entryHeader.loadDataHeaderFromBinary(input);\r\n return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize)\r\n }\r\n\r\n function crc32OK(data) {\r\n // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written\r\n if ((_entryHeader.flags & 0x8) !== 0x8) {\r\n if (Utils.crc32(data) !== _entryHeader.dataHeader.crc) {\r\n return false;\r\n }\r\n } else {\r\n // @TODO: load and check data descriptor header\r\n // The fields in the local header are filled with zero, and the CRC-32 and size are appended in a 12-byte structure\r\n // (optionally preceded by a 4-byte signature) immediately after the compressed data:\r\n }\r\n return true;\r\n }\r\n\r\n function decompress(/*Boolean*/async, /*Function*/callback, /*String, Buffer*/pass) {\r\n if(typeof callback === 'undefined' && typeof async === 'string') {\r\n pass=async;\r\n async=void 0;\r\n }\r\n if (_isDirectory) {\r\n if (async && callback) {\r\n callback(Buffer.alloc(0), Utils.Errors.DIRECTORY_CONTENT_ERROR); //si added error.\r\n }\r\n return Buffer.alloc(0);\r\n }\r\n\r\n var compressedData = getCompressedDataFromZip();\r\n\r\n if (compressedData.length === 0) {\r\n // File is empty, nothing to decompress.\r\n if (async && callback) callback(compressedData);\r\n return compressedData;\r\n }\r\n\r\n if (_entryHeader.encripted){\r\n if ('string' !== typeof pass && !Buffer.isBuffer(pass)){\r\n throw new Error('ADM-ZIP: Incompatible password parameter');\r\n }\r\n compressedData = Methods.ZipCrypto.decrypt(compressedData, _entryHeader, pass);\r\n }\r\n\r\n var data = Buffer.alloc(_entryHeader.size);\r\n\r\n switch (_entryHeader.method) {\r\n case Utils.Constants.STORED:\r\n compressedData.copy(data);\r\n if (!crc32OK(data)) {\r\n if (async && callback) callback(data, Utils.Errors.BAD_CRC);//si added error\r\n throw new Error(Utils.Errors.BAD_CRC);\r\n } else {//si added otherwise did not seem to return data.\r\n if (async && callback) callback(data);\r\n return data;\r\n }\r\n case Utils.Constants.DEFLATED:\r\n var inflater = new Methods.Inflater(compressedData);\r\n if (!async) {\r\n var result = inflater.inflate(data);\r\n result.copy(data, 0);\r\n if (!crc32OK(data)) {\r\n throw new Error(Utils.Errors.BAD_CRC + \" \" + _entryName.toString());\r\n }\r\n return data;\r\n } else {\r\n inflater.inflateAsync(function(result) {\r\n result.copy(data, 0);\r\n if (!crc32OK(data)) {\r\n if (callback) callback(data, Utils.Errors.BAD_CRC); //si added error\r\n } else { //si added otherwise did not seem to return data.\r\n if (callback) callback(data);\r\n }\r\n })\r\n }\r\n break;\r\n default:\r\n if (async && callback) callback(Buffer.alloc(0), Utils.Errors.UNKNOWN_METHOD);\r\n throw new Error(Utils.Errors.UNKNOWN_METHOD);\r\n }\r\n }\r\n\r\n function compress(/*Boolean*/async, /*Function*/callback) {\r\n if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) {\r\n // no data set or the data wasn't changed to require recompression\r\n if (async && callback) callback(getCompressedDataFromZip());\r\n return getCompressedDataFromZip();\r\n }\r\n\r\n if (uncompressedData.length && !_isDirectory) {\r\n var compressedData;\r\n // Local file header\r\n switch (_entryHeader.method) {\r\n case Utils.Constants.STORED:\r\n _entryHeader.compressedSize = _entryHeader.size;\r\n\r\n compressedData = Buffer.alloc(uncompressedData.length);\r\n uncompressedData.copy(compressedData);\r\n\r\n if (async && callback) callback(compressedData);\r\n return compressedData;\r\n default:\r\n case Utils.Constants.DEFLATED:\r\n\r\n var deflater = new Methods.Deflater(uncompressedData);\r\n if (!async) {\r\n var deflated = deflater.deflate();\r\n _entryHeader.compressedSize = deflated.length;\r\n return deflated;\r\n } else {\r\n deflater.deflateAsync(function(data) {\r\n compressedData = Buffer.alloc(data.length);\r\n _entryHeader.compressedSize = data.length;\r\n data.copy(compressedData);\r\n callback && callback(compressedData);\r\n })\r\n }\r\n deflater = null;\r\n break;\r\n }\r\n } else {\r\n if (async && callback) {\r\n callback(Buffer.alloc(0));\r\n } else {\r\n return Buffer.alloc(0);\r\n }\r\n }\r\n }\r\n\r\n function readUInt64LE(buffer, offset) {\r\n return (buffer.readUInt32LE(offset + 4) << 4) + buffer.readUInt32LE(offset);\r\n }\r\n\r\n function parseExtra(data) {\r\n var offset = 0;\r\n var signature, size, part;\r\n while(offset= Constants.EF_ZIP64_SCOMP) {\r\n size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP);\r\n if(_entryHeader.size === Constants.EF_ZIP64_OR_32) {\r\n _entryHeader.size = size;\r\n }\r\n }\r\n if(data.length >= Constants.EF_ZIP64_RHO) {\r\n compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP);\r\n if(_entryHeader.compressedSize === Constants.EF_ZIP64_OR_32) {\r\n _entryHeader.compressedSize = compressedSize;\r\n }\r\n }\r\n if(data.length >= Constants.EF_ZIP64_DSN) {\r\n offset = readUInt64LE(data, Constants.EF_ZIP64_RHO);\r\n if(_entryHeader.offset === Constants.EF_ZIP64_OR_32) {\r\n _entryHeader.offset = offset;\r\n }\r\n }\r\n if(data.length >= Constants.EF_ZIP64_DSN+4) {\r\n diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN);\r\n if(_entryHeader.diskNumStart === Constants.EF_ZIP64_OR_16) {\r\n _entryHeader.diskNumStart = diskNumStart;\r\n }\r\n }\r\n }\r\n\r\n\r\n return {\r\n get entryName () { return _entryName.toString(); },\r\n get rawEntryName() { return _entryName; },\r\n set entryName (val) {\r\n _entryName = Utils.toBuffer(val);\r\n var lastChar = _entryName[_entryName.length - 1];\r\n _isDirectory = (lastChar === 47) || (lastChar === 92);\r\n _entryHeader.fileNameLength = _entryName.length;\r\n },\r\n\r\n get extra () { return _extra; },\r\n set extra (val) {\r\n _extra = val;\r\n _entryHeader.extraLength = val.length;\r\n parseExtra(val);\r\n },\r\n\r\n get comment () { return _comment.toString(); },\r\n set comment (val) {\r\n _comment = Utils.toBuffer(val);\r\n _entryHeader.commentLength = _comment.length;\r\n },\r\n\r\n get name () { var n = _entryName.toString(); return _isDirectory ? n.substr(n.length - 1).split(\"/\").pop() : n.split(\"/\").pop(); },\r\n get isDirectory () { return _isDirectory },\r\n\r\n getCompressedData : function() {\r\n return compress(false, null)\r\n },\r\n\r\n getCompressedDataAsync : function(/*Function*/callback) {\r\n compress(true, callback)\r\n },\r\n\r\n setData : function(value) {\r\n uncompressedData = Utils.toBuffer(value);\r\n if (!_isDirectory && uncompressedData.length) {\r\n _entryHeader.size = uncompressedData.length;\r\n _entryHeader.method = Utils.Constants.DEFLATED;\r\n _entryHeader.crc = Utils.crc32(value);\r\n _entryHeader.changed = true;\r\n } else { // folders and blank files should be stored\r\n _entryHeader.method = Utils.Constants.STORED;\r\n }\r\n },\r\n\r\n getData : function(pass) {\r\n if (_entryHeader.changed) {\r\n return uncompressedData;\r\n } else {\r\n return decompress(false, null, pass);\r\n }\r\n },\r\n\r\n getDataAsync : function(/*Function*/callback, pass) {\r\n if (_entryHeader.changed) {\r\n callback(uncompressedData);\r\n } else {\r\n decompress(true, callback, pass);\r\n }\r\n },\r\n\r\n set attr(attr) { _entryHeader.attr = attr; },\r\n get attr() { return _entryHeader.attr; },\r\n\r\n set header(/*Buffer*/data) {\r\n _entryHeader.loadFromBinary(data);\r\n },\r\n\r\n get header() {\r\n return _entryHeader;\r\n },\r\n\r\n packHeader : function() {\r\n // 1. create header (buffer)\r\n var header = _entryHeader.entryHeaderToBinary();\r\n var addpos = Utils.Constants.CENHDR;\r\n // 2. add file name\r\n _entryName.copy(header, addpos);\r\n addpos += _entryName.length;\r\n // 3. add extra data\r\n if (_entryHeader.extraLength) {\r\n _extra.copy(header, addpos);\r\n addpos += _entryHeader.extraLength;\r\n }\r\n // 4. add file comment\r\n if (_entryHeader.commentLength) {\r\n _comment.copy(header, addpos);\r\n }\r\n return header;\r\n },\r\n\r\n toString : function() {\r\n return '{\\n' +\r\n '\\t\"entryName\" : \"' + _entryName.toString() + \"\\\",\\n\" +\r\n '\\t\"name\" : \"' + (_isDirectory ? _entryName.toString().replace(/\\/$/, '').split(\"/\").pop() : _entryName.toString().split(\"/\").pop()) + \"\\\",\\n\" +\r\n '\\t\"comment\" : \"' + _comment.toString() + \"\\\",\\n\" +\r\n '\\t\"isDirectory\" : ' + _isDirectory + \",\\n\" +\r\n '\\t\"header\" : ' + _entryHeader.toString().replace(/\\t/mg, \"\\t\\t\").replace(/}/mg, \"\\t}\") + \",\\n\" +\r\n '\\t\"compressedData\" : <' + (input && input.length + \" bytes buffer\" || \"null\") + \">\\n\" +\r\n '\\t\"data\" : <' + (uncompressedData && uncompressedData.length + \" bytes buffer\" || \"null\") + \">\\n\" +\r\n '}';\r\n }\r\n }\r\n};\r\n","var ZipEntry = require(\"./zipEntry\"),\r\n\tHeaders = require(\"./headers\"),\r\n\tUtils = require(\"./util\");\r\n\r\nmodule.exports = function (/*String|Buffer*/input, /*Number*/inputType) {\r\n\tvar entryList = [],\r\n\t\tentryTable = {},\r\n\t\t_comment = Buffer.alloc(0),\r\n\t\tfilename = \"\",\r\n\t\tfs = Utils.FileSystem.require(),\r\n\t\tinBuffer = null,\r\n\t\tmainHeader = new Headers.MainHeader(),\r\n\t\tloadedEntries = false;\r\n\r\n\tif (inputType === Utils.Constants.FILE) {\r\n\t\t// is a filename\r\n\t\tfilename = input;\r\n\t\tinBuffer = fs.readFileSync(filename);\r\n\t\treadMainHeader();\r\n\t} else if (inputType === Utils.Constants.BUFFER) {\r\n\t\t// is a memory buffer\r\n\t\tinBuffer = input;\r\n\t\treadMainHeader();\r\n\t} else {\r\n\t\t// none. is a new file\r\n\t\tloadedEntries = true;\r\n\t}\r\n\r\n\tfunction iterateEntries(callback) {\r\n\t\tconst totalEntries = mainHeader.diskEntries; // total number of entries\r\n\t\tlet index = mainHeader.offset; // offset of first CEN header\r\n\r\n\t\tfor (let i = 0; i < totalEntries; i++) {\r\n\t\t\tlet tmp = index;\r\n\t\t\tconst entry = new ZipEntry(inBuffer);\r\n\r\n\t\t\tentry.header = inBuffer.slice(tmp, tmp += Utils.Constants.CENHDR);\r\n\t\t\tentry.entryName = inBuffer.slice(tmp, tmp += entry.header.fileNameLength);\r\n\r\n\t\t\tindex += entry.header.entryHeaderSize;\r\n\r\n\t\t\tcallback(entry);\r\n\t\t}\r\n\t}\r\n\r\n\tfunction readEntries() {\r\n\t\tloadedEntries = true;\r\n\t\tentryTable = {};\r\n\t\tentryList = new Array(mainHeader.diskEntries); // total number of entries\r\n\t\tvar index = mainHeader.offset; // offset of first CEN header\r\n\t\tfor (var i = 0; i < entryList.length; i++) {\r\n\r\n\t\t\tvar tmp = index,\r\n\t\t\t\tentry = new ZipEntry(inBuffer);\r\n\t\t\tentry.header = inBuffer.slice(tmp, tmp += Utils.Constants.CENHDR);\r\n\r\n\t\t\tentry.entryName = inBuffer.slice(tmp, tmp += entry.header.fileNameLength);\r\n\r\n\t\t\tif (entry.header.extraLength) {\r\n\t\t\t\tentry.extra = inBuffer.slice(tmp, tmp += entry.header.extraLength);\r\n\t\t\t}\r\n\r\n\t\t\tif (entry.header.commentLength)\r\n\t\t\t\tentry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength);\r\n\r\n\t\t\tindex += entry.header.entryHeaderSize;\r\n\r\n\t\t\tentryList[i] = entry;\r\n\t\t\tentryTable[entry.entryName] = entry;\r\n\t\t}\r\n\t}\r\n\r\n\tfunction readMainHeader() {\r\n\t\tvar i = inBuffer.length - Utils.Constants.ENDHDR, // END header size\r\n\t\t\tmax = Math.max(0, i - 0xFFFF), // 0xFFFF is the max zip file comment length\r\n\t\t\tn = max,\r\n\t\t\tendStart = inBuffer.length,\r\n\t\t\tendOffset = -1, // Start offset of the END header\r\n\t\t\tcommentEnd = 0;\r\n\r\n\t\tfor (i; i >= n; i--) {\r\n\t\t\tif (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P'\r\n\t\t\tif (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) { // \"PK\\005\\006\"\r\n\t\t\t\tendOffset = i;\r\n\t\t\t\tcommentEnd = i;\r\n\t\t\t\tendStart = i + Utils.Constants.ENDHDR;\r\n\t\t\t\t// We already found a regular signature, let's look just a bit further to check if there's any zip64 signature\r\n\t\t\t\tn = i - Utils.Constants.END64HDR;\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tif (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) {\r\n\t\t\t\t// Found a zip64 signature, let's continue reading the whole zip64 record\r\n\t\t\t\tn = max;\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tif (inBuffer.readUInt32LE(i) == Utils.Constants.ZIP64SIG) {\r\n\t\t\t\t// Found the zip64 record, let's determine it's size\r\n\t\t\t\tendOffset = i;\r\n\t\t\t\tendStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD;\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (!~endOffset)\r\n\t\t\tthrow new Error(Utils.Errors.INVALID_FORMAT);\r\n\r\n\t\tmainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart));\r\n\t\tif (mainHeader.commentLength) {\r\n\t\t\t_comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR);\r\n\t\t}\r\n\t\t// readEntries();\r\n\t}\r\n\r\n\treturn {\r\n\t\t/**\r\n\t\t * Returns an array of ZipEntry objects existent in the current opened archive\r\n\t\t * @return Array\r\n\t\t */\r\n\t\tget entries() {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\treadEntries();\r\n\t\t\t}\r\n\t\t\treturn entryList;\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Archive comment\r\n\t\t * @return {String}\r\n\t\t */\r\n\t\tget comment() {\r\n\t\t\treturn _comment.toString();\r\n\t\t},\r\n\t\tset comment(val) {\r\n\t\t\t_comment = Utils.toBuffer(val);\r\n\t\t\tmainHeader.commentLength = _comment.length;\r\n\t\t},\r\n\r\n\t\tgetEntryCount: function() {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\treturn mainHeader.diskEntries;\r\n\t\t\t}\r\n\r\n\t\t\treturn entryList.length;\r\n\t\t},\r\n\r\n\t\tforEach: function(callback) {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\titerateEntries(callback);\r\n\t\t\t\treturn;\r\n\t\t\t}\r\n\r\n\t\t\tentryList.forEach(callback);\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Returns a reference to the entry with the given name or null if entry is inexistent\r\n\t\t *\r\n\t\t * @param entryName\r\n\t\t * @return ZipEntry\r\n\t\t */\r\n\t\tgetEntry: function (/*String*/entryName) {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\treadEntries();\r\n\t\t\t}\r\n\t\t\treturn entryTable[entryName] || null;\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Adds the given entry to the entry list\r\n\t\t *\r\n\t\t * @param entry\r\n\t\t */\r\n\t\tsetEntry: function (/*ZipEntry*/entry) {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\treadEntries();\r\n\t\t\t}\r\n\t\t\tentryList.push(entry);\r\n\t\t\tentryTable[entry.entryName] = entry;\r\n\t\t\tmainHeader.totalEntries = entryList.length;\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Removes the entry with the given name from the entry list.\r\n\t\t *\r\n\t\t * If the entry is a directory, then all nested files and directories will be removed\r\n\t\t * @param entryName\r\n\t\t */\r\n\t\tdeleteEntry: function (/*String*/entryName) {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\treadEntries();\r\n\t\t\t}\r\n\t\t\tvar entry = entryTable[entryName];\r\n\t\t\tif (entry && entry.isDirectory) {\r\n\t\t\t\tvar _self = this;\r\n\t\t\t\tthis.getEntryChildren(entry).forEach(function (child) {\r\n\t\t\t\t\tif (child.entryName !== entryName) {\r\n\t\t\t\t\t\t_self.deleteEntry(child.entryName)\r\n\t\t\t\t\t}\r\n\t\t\t\t})\r\n\t\t\t}\r\n\t\t\tentryList.splice(entryList.indexOf(entry), 1);\r\n\t\t\tdelete(entryTable[entryName]);\r\n\t\t\tmainHeader.totalEntries = entryList.length;\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Iterates and returns all nested files and directories of the given entry\r\n\t\t *\r\n\t\t * @param entry\r\n\t\t * @return Array\r\n\t\t */\r\n\t\tgetEntryChildren: function (/*ZipEntry*/entry) {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\treadEntries();\r\n\t\t\t}\r\n\t\t\tif (entry.isDirectory) {\r\n\t\t\t\tvar list = [],\r\n\t\t\t\t\tname = entry.entryName,\r\n\t\t\t\t\tlen = name.length;\r\n\r\n\t\t\t\tentryList.forEach(function (zipEntry) {\r\n\t\t\t\t\tif (zipEntry.entryName.substr(0, len) === name) {\r\n\t\t\t\t\t\tlist.push(zipEntry);\r\n\t\t\t\t\t}\r\n\t\t\t\t});\r\n\t\t\t\treturn list;\r\n\t\t\t}\r\n\t\t\treturn []\r\n\t\t},\r\n\r\n\t\t/**\r\n\t\t * Returns the zip file\r\n\t\t *\r\n\t\t * @return Buffer\r\n\t\t */\r\n\t\tcompressToBuffer: function () {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\treadEntries();\r\n\t\t\t}\r\n\t\t\tif (entryList.length > 1) {\r\n\t\t\t\tentryList.sort(function (a, b) {\r\n\t\t\t\t\tvar nameA = a.entryName.toLowerCase();\r\n\t\t\t\t\tvar nameB = b.entryName.toLowerCase();\r\n\t\t\t\t\tif (nameA < nameB) {\r\n\t\t\t\t\t\treturn -1\r\n\t\t\t\t\t}\r\n\t\t\t\t\tif (nameA > nameB) {\r\n\t\t\t\t\t\treturn 1\r\n\t\t\t\t\t}\r\n\t\t\t\t\treturn 0;\r\n\t\t\t\t});\r\n\t\t\t}\r\n\r\n\t\t\tvar totalSize = 0,\r\n\t\t\t\tdataBlock = [],\r\n\t\t\t\tentryHeaders = [],\r\n\t\t\t\tdindex = 0;\r\n\r\n\t\t\tmainHeader.size = 0;\r\n\t\t\tmainHeader.offset = 0;\r\n\r\n\t\t\tentryList.forEach(function (entry) {\r\n\t\t\t\t// compress data and set local and entry header accordingly. Reason why is called first\r\n\t\t\t\tvar compressedData = entry.getCompressedData();\r\n\t\t\t\t// data header\r\n\t\t\t\tentry.header.offset = dindex;\r\n\t\t\t\tvar dataHeader = entry.header.dataHeaderToBinary();\r\n\t\t\t\tvar entryNameLen = entry.rawEntryName.length;\r\n\t\t\t\tvar extra = entry.extra.toString();\r\n\t\t\t\tvar postHeader = Buffer.alloc(entryNameLen + extra.length);\r\n\t\t\t\tentry.rawEntryName.copy(postHeader, 0);\r\n\t\t\t\tpostHeader.fill(extra, entryNameLen);\r\n\r\n\t\t\t\tvar dataLength = dataHeader.length + postHeader.length + compressedData.length;\r\n\r\n\t\t\t\tdindex += dataLength;\r\n\r\n\t\t\t\tdataBlock.push(dataHeader);\r\n\t\t\t\tdataBlock.push(postHeader);\r\n\t\t\t\tdataBlock.push(compressedData);\r\n\r\n\t\t\t\tvar entryHeader = entry.packHeader();\r\n\t\t\t\tentryHeaders.push(entryHeader);\r\n\t\t\t\tmainHeader.size += entryHeader.length;\r\n\t\t\t\ttotalSize += (dataLength + entryHeader.length);\r\n\t\t\t});\r\n\r\n\t\t\ttotalSize += mainHeader.mainHeaderSize; // also includes zip file comment length\r\n\t\t\t// point to end of data and beginning of central directory first record\r\n\t\t\tmainHeader.offset = dindex;\r\n\r\n\t\t\tdindex = 0;\r\n\t\t\tvar outBuffer = Buffer.alloc(totalSize);\r\n\t\t\tdataBlock.forEach(function (content) {\r\n\t\t\t\tcontent.copy(outBuffer, dindex); // write data blocks\r\n\t\t\t\tdindex += content.length;\r\n\t\t\t});\r\n\t\t\tentryHeaders.forEach(function (content) {\r\n\t\t\t\tcontent.copy(outBuffer, dindex); // write central directory entries\r\n\t\t\t\tdindex += content.length;\r\n\t\t\t});\r\n\r\n\t\t\tvar mh = mainHeader.toBinary();\r\n\t\t\tif (_comment) {\r\n\t\t\t\tBuffer.from(_comment).copy(mh, Utils.Constants.ENDHDR); // add zip file comment\r\n\t\t\t}\r\n\r\n\t\t\tmh.copy(outBuffer, dindex); // write main header\r\n\r\n\t\t\treturn outBuffer\r\n\t\t},\r\n\r\n\t\ttoAsyncBuffer: function (/*Function*/onSuccess, /*Function*/onFail, /*Function*/onItemStart, /*Function*/onItemEnd) {\r\n\t\t\tif (!loadedEntries) {\r\n\t\t\t\treadEntries();\r\n\t\t\t}\r\n\t\t\tif (entryList.length > 1) {\r\n\t\t\t\tentryList.sort(function (a, b) {\r\n\t\t\t\t\tvar nameA = a.entryName.toLowerCase();\r\n\t\t\t\t\tvar nameB = b.entryName.toLowerCase();\r\n\t\t\t\t\tif (nameA > nameB) {\r\n\t\t\t\t\t\treturn -1\r\n\t\t\t\t\t}\r\n\t\t\t\t\tif (nameA < nameB) {\r\n\t\t\t\t\t\treturn 1\r\n\t\t\t\t\t}\r\n\t\t\t\t\treturn 0;\r\n\t\t\t\t});\r\n\t\t\t}\r\n\r\n\t\t\tvar totalSize = 0,\r\n\t\t\t\tdataBlock = [],\r\n\t\t\t\tentryHeaders = [],\r\n\t\t\t\tdindex = 0;\r\n\r\n\t\t\tmainHeader.size = 0;\r\n\t\t\tmainHeader.offset = 0;\r\n\r\n\t\t\tvar compress = function (entryList) {\r\n\t\t\t\tvar self = arguments.callee;\r\n\t\t\t\tif (entryList.length) {\r\n\t\t\t\t\tvar entry = entryList.pop();\r\n\t\t\t\t\tvar name = entry.entryName + entry.extra.toString();\r\n\t\t\t\t\tif (onItemStart) onItemStart(name);\r\n\t\t\t\t\tentry.getCompressedDataAsync(function (compressedData) {\r\n\t\t\t\t\t\tif (onItemEnd) onItemEnd(name);\r\n\r\n\t\t\t\t\t\tentry.header.offset = dindex;\r\n\t\t\t\t\t\t// data header\r\n\t\t\t\t\t\tvar dataHeader = entry.header.dataHeaderToBinary();\r\n\t\t\t\t\t\tvar postHeader;\r\n\t\t\t\t\t\ttry {\r\n\t\t\t\t\t\t\tpostHeader = Buffer.alloc(name.length, name); // using alloc will work on node 5.x+\r\n\t\t\t\t\t\t} catch(e){\r\n\t\t\t\t\t\t\tpostHeader = new Buffer(name); // use deprecated method if alloc fails...\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t\tvar dataLength = dataHeader.length + postHeader.length + compressedData.length;\r\n\r\n\t\t\t\t\t\tdindex += dataLength;\r\n\r\n\t\t\t\t\t\tdataBlock.push(dataHeader);\r\n\t\t\t\t\t\tdataBlock.push(postHeader);\r\n\t\t\t\t\t\tdataBlock.push(compressedData);\r\n\r\n\t\t\t\t\t\tvar entryHeader = entry.packHeader();\r\n\t\t\t\t\t\tentryHeaders.push(entryHeader);\r\n\t\t\t\t\t\tmainHeader.size += entryHeader.length;\r\n\t\t\t\t\t\ttotalSize += (dataLength + entryHeader.length);\r\n\r\n\t\t\t\t\t\tif (entryList.length) {\r\n\t\t\t\t\t\t\tself(entryList);\r\n\t\t\t\t\t\t} else {\r\n\r\n\r\n\t\t\t\t\t\t\ttotalSize += mainHeader.mainHeaderSize; // also includes zip file comment length\r\n\t\t\t\t\t\t\t// point to end of data and beginning of central directory first record\r\n\t\t\t\t\t\t\tmainHeader.offset = dindex;\r\n\r\n\t\t\t\t\t\t\tdindex = 0;\r\n\t\t\t\t\t\t\tvar outBuffer = Buffer.alloc(totalSize);\r\n\t\t\t\t\t\t\tdataBlock.forEach(function (content) {\r\n\t\t\t\t\t\t\t\tcontent.copy(outBuffer, dindex); // write data blocks\r\n\t\t\t\t\t\t\t\tdindex += content.length;\r\n\t\t\t\t\t\t\t});\r\n\t\t\t\t\t\t\tentryHeaders.forEach(function (content) {\r\n\t\t\t\t\t\t\t\tcontent.copy(outBuffer, dindex); // write central directory entries\r\n\t\t\t\t\t\t\t\tdindex += content.length;\r\n\t\t\t\t\t\t\t});\r\n\r\n\t\t\t\t\t\t\tvar mh = mainHeader.toBinary();\r\n\t\t\t\t\t\t\tif (_comment) {\r\n\t\t\t\t\t\t\t\t_comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment\r\n\t\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\t\tmh.copy(outBuffer, dindex); // write main header\r\n\r\n\t\t\t\t\t\t\tonSuccess(outBuffer);\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t});\r\n\t\t\t\t}\r\n\t\t\t};\r\n\r\n\t\t\tcompress(entryList);\r\n\t\t}\r\n\t}\r\n};\r\n","var register = require('./lib/register')\nvar addHook = require('./lib/add')\nvar removeHook = require('./lib/remove')\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind\nvar bindable = bind.bind(bind)\n\nfunction bindApi (hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])\n hook.api = { remove: removeHookRef }\n hook.remove = removeHookRef\n\n ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind]\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)\n })\n}\n\nfunction HookSingular () {\n var singularHookName = 'h'\n var singularHookState = {\n registry: {}\n }\n var singularHook = register.bind(null, singularHookState, singularHookName)\n bindApi(singularHook, singularHookState, singularHookName)\n return singularHook\n}\n\nfunction HookCollection () {\n var state = {\n registry: {}\n }\n\n var hook = register.bind(null, state)\n bindApi(hook, state)\n\n return hook\n}\n\nvar collectionHookDeprecationMessageDisplayed = false\nfunction Hook () {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn('[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4')\n collectionHookDeprecationMessageDisplayed = true\n }\n return HookCollection()\n}\n\nHook.Singular = HookSingular.bind()\nHook.Collection = HookCollection.bind()\n\nmodule.exports = Hook\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook\nmodule.exports.Singular = Hook.Singular\nmodule.exports.Collection = Hook.Collection\n","module.exports = addHook\n\nfunction addHook (state, kind, name, hook) {\n var orig = hook\n if (!state.registry[name]) {\n state.registry[name] = []\n }\n\n if (kind === 'before') {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options))\n }\n }\n\n if (kind === 'after') {\n hook = function (method, options) {\n var result\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_\n return orig(result, options)\n })\n .then(function () {\n return result\n })\n }\n }\n\n if (kind === 'error') {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options)\n })\n }\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig\n })\n}\n","module.exports = register\n\nfunction register (state, name, method, options) {\n if (typeof method !== 'function') {\n throw new Error('method for before hook must be a function')\n }\n\n if (!options) {\n options = {}\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options)\n }, method)()\n }\n\n return Promise.resolve()\n .then(function () {\n if (!state.registry[name]) {\n return method(options)\n }\n\n return (state.registry[name]).reduce(function (method, registered) {\n return registered.hook.bind(null, method, options)\n }, method)()\n })\n}\n","module.exports = removeHook\n\nfunction removeHook (state, name, method) {\n if (!state.registry[name]) {\n return\n }\n\n var index = state.registry[name]\n .map(function (registered) { return registered.orig })\n .indexOf(method)\n\n if (index === -1) {\n return\n }\n\n state.registry[name].splice(index, 1)\n}\n","'use strict';\nconst {\n\tV4MAPPED,\n\tADDRCONFIG,\n\tALL,\n\tpromises: {\n\t\tResolver: AsyncResolver\n\t},\n\tlookup: dnsLookup\n} = require('dns');\nconst {promisify} = require('util');\nconst os = require('os');\n\nconst kCacheableLookupCreateConnection = Symbol('cacheableLookupCreateConnection');\nconst kCacheableLookupInstance = Symbol('cacheableLookupInstance');\nconst kExpires = Symbol('expires');\n\nconst supportsALL = typeof ALL === 'number';\n\nconst verifyAgent = agent => {\n\tif (!(agent && typeof agent.createConnection === 'function')) {\n\t\tthrow new Error('Expected an Agent instance as the first argument');\n\t}\n};\n\nconst map4to6 = entries => {\n\tfor (const entry of entries) {\n\t\tif (entry.family === 6) {\n\t\t\tcontinue;\n\t\t}\n\n\t\tentry.address = `::ffff:${entry.address}`;\n\t\tentry.family = 6;\n\t}\n};\n\nconst getIfaceInfo = () => {\n\tlet has4 = false;\n\tlet has6 = false;\n\n\tfor (const device of Object.values(os.networkInterfaces())) {\n\t\tfor (const iface of device) {\n\t\t\tif (iface.internal) {\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tif (iface.family === 'IPv6') {\n\t\t\t\thas6 = true;\n\t\t\t} else {\n\t\t\t\thas4 = true;\n\t\t\t}\n\n\t\t\tif (has4 && has6) {\n\t\t\t\treturn {has4, has6};\n\t\t\t}\n\t\t}\n\t}\n\n\treturn {has4, has6};\n};\n\nconst isIterable = map => {\n\treturn Symbol.iterator in map;\n};\n\nconst ttl = {ttl: true};\nconst all = {all: true};\n\nclass CacheableLookup {\n\tconstructor({\n\t\tcache = new Map(),\n\t\tmaxTtl = Infinity,\n\t\tfallbackDuration = 3600,\n\t\terrorTtl = 0.15,\n\t\tresolver = new AsyncResolver(),\n\t\tlookup = dnsLookup\n\t} = {}) {\n\t\tthis.maxTtl = maxTtl;\n\t\tthis.errorTtl = errorTtl;\n\n\t\tthis._cache = cache;\n\t\tthis._resolver = resolver;\n\t\tthis._dnsLookup = promisify(lookup);\n\n\t\tif (this._resolver instanceof AsyncResolver) {\n\t\t\tthis._resolve4 = this._resolver.resolve4.bind(this._resolver);\n\t\t\tthis._resolve6 = this._resolver.resolve6.bind(this._resolver);\n\t\t} else {\n\t\t\tthis._resolve4 = promisify(this._resolver.resolve4.bind(this._resolver));\n\t\t\tthis._resolve6 = promisify(this._resolver.resolve6.bind(this._resolver));\n\t\t}\n\n\t\tthis._iface = getIfaceInfo();\n\n\t\tthis._pending = {};\n\t\tthis._nextRemovalTime = false;\n\t\tthis._hostnamesToFallback = new Set();\n\n\t\tif (fallbackDuration < 1) {\n\t\t\tthis._fallback = false;\n\t\t} else {\n\t\t\tthis._fallback = true;\n\n\t\t\tconst interval = setInterval(() => {\n\t\t\t\tthis._hostnamesToFallback.clear();\n\t\t\t}, fallbackDuration * 1000);\n\n\t\t\t/* istanbul ignore next: There is no `interval.unref()` when running inside an Electron renderer */\n\t\t\tif (interval.unref) {\n\t\t\t\tinterval.unref();\n\t\t\t}\n\t\t}\n\n\t\tthis.lookup = this.lookup.bind(this);\n\t\tthis.lookupAsync = this.lookupAsync.bind(this);\n\t}\n\n\tset servers(servers) {\n\t\tthis.clear();\n\n\t\tthis._resolver.setServers(servers);\n\t}\n\n\tget servers() {\n\t\treturn this._resolver.getServers();\n\t}\n\n\tlookup(hostname, options, callback) {\n\t\tif (typeof options === 'function') {\n\t\t\tcallback = options;\n\t\t\toptions = {};\n\t\t} else if (typeof options === 'number') {\n\t\t\toptions = {\n\t\t\t\tfamily: options\n\t\t\t};\n\t\t}\n\n\t\tif (!callback) {\n\t\t\tthrow new Error('Callback must be a function.');\n\t\t}\n\n\t\t// eslint-disable-next-line promise/prefer-await-to-then\n\t\tthis.lookupAsync(hostname, options).then(result => {\n\t\t\tif (options.all) {\n\t\t\t\tcallback(null, result);\n\t\t\t} else {\n\t\t\t\tcallback(null, result.address, result.family, result.expires, result.ttl);\n\t\t\t}\n\t\t}, callback);\n\t}\n\n\tasync lookupAsync(hostname, options = {}) {\n\t\tif (typeof options === 'number') {\n\t\t\toptions = {\n\t\t\t\tfamily: options\n\t\t\t};\n\t\t}\n\n\t\tlet cached = await this.query(hostname);\n\n\t\tif (options.family === 6) {\n\t\t\tconst filtered = cached.filter(entry => entry.family === 6);\n\n\t\t\tif (options.hints & V4MAPPED) {\n\t\t\t\tif ((supportsALL && options.hints & ALL) || filtered.length === 0) {\n\t\t\t\t\tmap4to6(cached);\n\t\t\t\t} else {\n\t\t\t\t\tcached = filtered;\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tcached = filtered;\n\t\t\t}\n\t\t} else if (options.family === 4) {\n\t\t\tcached = cached.filter(entry => entry.family === 4);\n\t\t}\n\n\t\tif (options.hints & ADDRCONFIG) {\n\t\t\tconst {_iface} = this;\n\t\t\tcached = cached.filter(entry => entry.family === 6 ? _iface.has6 : _iface.has4);\n\t\t}\n\n\t\tif (cached.length === 0) {\n\t\t\tconst error = new Error(`cacheableLookup ENOTFOUND ${hostname}`);\n\t\t\terror.code = 'ENOTFOUND';\n\t\t\terror.hostname = hostname;\n\n\t\t\tthrow error;\n\t\t}\n\n\t\tif (options.all) {\n\t\t\treturn cached;\n\t\t}\n\n\t\treturn cached[0];\n\t}\n\n\tasync query(hostname) {\n\t\tlet cached = await this._cache.get(hostname);\n\n\t\tif (!cached) {\n\t\t\tconst pending = this._pending[hostname];\n\n\t\t\tif (pending) {\n\t\t\t\tcached = await pending;\n\t\t\t} else {\n\t\t\t\tconst newPromise = this.queryAndCache(hostname);\n\t\t\t\tthis._pending[hostname] = newPromise;\n\n\t\t\t\ttry {\n\t\t\t\t\tcached = await newPromise;\n\t\t\t\t} finally {\n\t\t\t\t\tdelete this._pending[hostname];\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tcached = cached.map(entry => {\n\t\t\treturn {...entry};\n\t\t});\n\n\t\treturn cached;\n\t}\n\n\tasync _resolve(hostname) {\n\t\tconst wrap = async promise => {\n\t\t\ttry {\n\t\t\t\treturn await promise;\n\t\t\t} catch (error) {\n\t\t\t\tif (error.code === 'ENODATA' || error.code === 'ENOTFOUND') {\n\t\t\t\t\treturn [];\n\t\t\t\t}\n\n\t\t\t\tthrow error;\n\t\t\t}\n\t\t};\n\n\t\t// ANY is unsafe as it doesn't trigger new queries in the underlying server.\n\t\tconst [A, AAAA] = await Promise.all([\n\t\t\tthis._resolve4(hostname, ttl),\n\t\t\tthis._resolve6(hostname, ttl)\n\t\t].map(promise => wrap(promise)));\n\n\t\tlet aTtl = 0;\n\t\tlet aaaaTtl = 0;\n\t\tlet cacheTtl = 0;\n\n\t\tconst now = Date.now();\n\n\t\tfor (const entry of A) {\n\t\t\tentry.family = 4;\n\t\t\tentry.expires = now + (entry.ttl * 1000);\n\n\t\t\taTtl = Math.max(aTtl, entry.ttl);\n\t\t}\n\n\t\tfor (const entry of AAAA) {\n\t\t\tentry.family = 6;\n\t\t\tentry.expires = now + (entry.ttl * 1000);\n\n\t\t\taaaaTtl = Math.max(aaaaTtl, entry.ttl);\n\t\t}\n\n\t\tif (A.length > 0) {\n\t\t\tif (AAAA.length > 0) {\n\t\t\t\tcacheTtl = Math.min(aTtl, aaaaTtl);\n\t\t\t} else {\n\t\t\t\tcacheTtl = aTtl;\n\t\t\t}\n\t\t} else {\n\t\t\tcacheTtl = aaaaTtl;\n\t\t}\n\n\t\treturn {\n\t\t\tentries: [\n\t\t\t\t...A,\n\t\t\t\t...AAAA\n\t\t\t],\n\t\t\tcacheTtl\n\t\t};\n\t}\n\n\tasync _lookup(hostname) {\n\t\ttry {\n\t\t\tconst entries = await this._dnsLookup(hostname, {\n\t\t\t\tall: true\n\t\t\t});\n\n\t\t\treturn {\n\t\t\t\tentries,\n\t\t\t\tcacheTtl: 0\n\t\t\t};\n\t\t} catch (_) {\n\t\t\treturn {\n\t\t\t\tentries: [],\n\t\t\t\tcacheTtl: 0\n\t\t\t};\n\t\t}\n\t}\n\n\tasync _set(hostname, data, cacheTtl) {\n\t\tif (this.maxTtl > 0 && cacheTtl > 0) {\n\t\t\tcacheTtl = Math.min(cacheTtl, this.maxTtl) * 1000;\n\t\t\tdata[kExpires] = Date.now() + cacheTtl;\n\n\t\t\ttry {\n\t\t\t\tawait this._cache.set(hostname, data, cacheTtl);\n\t\t\t} catch (error) {\n\t\t\t\tthis.lookupAsync = async () => {\n\t\t\t\t\tconst cacheError = new Error('Cache Error. Please recreate the CacheableLookup instance.');\n\t\t\t\t\tcacheError.cause = error;\n\n\t\t\t\t\tthrow cacheError;\n\t\t\t\t};\n\t\t\t}\n\n\t\t\tif (isIterable(this._cache)) {\n\t\t\t\tthis._tick(cacheTtl);\n\t\t\t}\n\t\t}\n\t}\n\n\tasync queryAndCache(hostname) {\n\t\tif (this._hostnamesToFallback.has(hostname)) {\n\t\t\treturn this._dnsLookup(hostname, all);\n\t\t}\n\n\t\tlet query = await this._resolve(hostname);\n\n\t\tif (query.entries.length === 0 && this._fallback) {\n\t\t\tquery = await this._lookup(hostname);\n\n\t\t\tif (query.entries.length !== 0) {\n\t\t\t\t// Use `dns.lookup(...)` for that particular hostname\n\t\t\t\tthis._hostnamesToFallback.add(hostname);\n\t\t\t}\n\t\t}\n\n\t\tconst cacheTtl = query.entries.length === 0 ? this.errorTtl : query.cacheTtl;\n\t\tawait this._set(hostname, query.entries, cacheTtl);\n\n\t\treturn query.entries;\n\t}\n\n\t_tick(ms) {\n\t\tconst nextRemovalTime = this._nextRemovalTime;\n\n\t\tif (!nextRemovalTime || ms < nextRemovalTime) {\n\t\t\tclearTimeout(this._removalTimeout);\n\n\t\t\tthis._nextRemovalTime = ms;\n\n\t\t\tthis._removalTimeout = setTimeout(() => {\n\t\t\t\tthis._nextRemovalTime = false;\n\n\t\t\t\tlet nextExpiry = Infinity;\n\n\t\t\t\tconst now = Date.now();\n\n\t\t\t\tfor (const [hostname, entries] of this._cache) {\n\t\t\t\t\tconst expires = entries[kExpires];\n\n\t\t\t\t\tif (now >= expires) {\n\t\t\t\t\t\tthis._cache.delete(hostname);\n\t\t\t\t\t} else if (expires < nextExpiry) {\n\t\t\t\t\t\tnextExpiry = expires;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif (nextExpiry !== Infinity) {\n\t\t\t\t\tthis._tick(nextExpiry - now);\n\t\t\t\t}\n\t\t\t}, ms);\n\n\t\t\t/* istanbul ignore next: There is no `timeout.unref()` when running inside an Electron renderer */\n\t\t\tif (this._removalTimeout.unref) {\n\t\t\t\tthis._removalTimeout.unref();\n\t\t\t}\n\t\t}\n\t}\n\n\tinstall(agent) {\n\t\tverifyAgent(agent);\n\n\t\tif (kCacheableLookupCreateConnection in agent) {\n\t\t\tthrow new Error('CacheableLookup has been already installed');\n\t\t}\n\n\t\tagent[kCacheableLookupCreateConnection] = agent.createConnection;\n\t\tagent[kCacheableLookupInstance] = this;\n\n\t\tagent.createConnection = (options, callback) => {\n\t\t\tif (!('lookup' in options)) {\n\t\t\t\toptions.lookup = this.lookup;\n\t\t\t}\n\n\t\t\treturn agent[kCacheableLookupCreateConnection](options, callback);\n\t\t};\n\t}\n\n\tuninstall(agent) {\n\t\tverifyAgent(agent);\n\n\t\tif (agent[kCacheableLookupCreateConnection]) {\n\t\t\tif (agent[kCacheableLookupInstance] !== this) {\n\t\t\t\tthrow new Error('The agent is not owned by this CacheableLookup instance');\n\t\t\t}\n\n\t\t\tagent.createConnection = agent[kCacheableLookupCreateConnection];\n\n\t\t\tdelete agent[kCacheableLookupCreateConnection];\n\t\t\tdelete agent[kCacheableLookupInstance];\n\t\t}\n\t}\n\n\tupdateInterfaceInfo() {\n\t\tconst {_iface} = this;\n\n\t\tthis._iface = getIfaceInfo();\n\n\t\tif ((_iface.has4 && !this._iface.has4) || (_iface.has6 && !this._iface.has6)) {\n\t\t\tthis._cache.clear();\n\t\t}\n\t}\n\n\tclear(hostname) {\n\t\tif (hostname) {\n\t\t\tthis._cache.delete(hostname);\n\t\t\treturn;\n\t\t}\n\n\t\tthis._cache.clear();\n\t}\n}\n\nmodule.exports = CacheableLookup;\nmodule.exports.default = CacheableLookup;\n","'use strict';\nconst {PassThrough: PassThroughStream} = require('stream');\n\nmodule.exports = options => {\n\toptions = {...options};\n\n\tconst {array} = options;\n\tlet {encoding} = options;\n\tconst isBuffer = encoding === 'buffer';\n\tlet objectMode = false;\n\n\tif (array) {\n\t\tobjectMode = !(encoding || isBuffer);\n\t} else {\n\t\tencoding = encoding || 'utf8';\n\t}\n\n\tif (isBuffer) {\n\t\tencoding = null;\n\t}\n\n\tconst stream = new PassThroughStream({objectMode});\n\n\tif (encoding) {\n\t\tstream.setEncoding(encoding);\n\t}\n\n\tlet length = 0;\n\tconst chunks = [];\n\n\tstream.on('data', chunk => {\n\t\tchunks.push(chunk);\n\n\t\tif (objectMode) {\n\t\t\tlength = chunks.length;\n\t\t} else {\n\t\t\tlength += chunk.length;\n\t\t}\n\t});\n\n\tstream.getBufferedValue = () => {\n\t\tif (array) {\n\t\t\treturn chunks;\n\t\t}\n\n\t\treturn isBuffer ? Buffer.concat(chunks, length) : chunks.join('');\n\t};\n\n\tstream.getBufferedLength = () => length;\n\n\treturn stream;\n};\n","'use strict';\nconst {constants: BufferConstants} = require('buffer');\nconst pump = require('pump');\nconst bufferStream = require('./buffer-stream');\n\nclass MaxBufferError extends Error {\n\tconstructor() {\n\t\tsuper('maxBuffer exceeded');\n\t\tthis.name = 'MaxBufferError';\n\t}\n}\n\nasync function getStream(inputStream, options) {\n\tif (!inputStream) {\n\t\treturn Promise.reject(new Error('Expected a stream'));\n\t}\n\n\toptions = {\n\t\tmaxBuffer: Infinity,\n\t\t...options\n\t};\n\n\tconst {maxBuffer} = options;\n\n\tlet stream;\n\tawait new Promise((resolve, reject) => {\n\t\tconst rejectPromise = error => {\n\t\t\t// Don't retrieve an oversized buffer.\n\t\t\tif (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) {\n\t\t\t\terror.bufferedData = stream.getBufferedValue();\n\t\t\t}\n\n\t\t\treject(error);\n\t\t};\n\n\t\tstream = pump(inputStream, bufferStream(options), error => {\n\t\t\tif (error) {\n\t\t\t\trejectPromise(error);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tresolve();\n\t\t});\n\n\t\tstream.on('data', () => {\n\t\t\tif (stream.getBufferedLength() > maxBuffer) {\n\t\t\t\trejectPromise(new MaxBufferError());\n\t\t\t}\n\t\t});\n\t});\n\n\treturn stream.getBufferedValue();\n}\n\nmodule.exports = getStream;\n// TODO: Remove this for the next major release\nmodule.exports.default = getStream;\nmodule.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'});\nmodule.exports.array = (stream, options) => getStream(stream, {...options, array: true});\nmodule.exports.MaxBufferError = MaxBufferError;\n","'use strict';\n\nconst EventEmitter = require('events');\nconst urlLib = require('url');\nconst normalizeUrl = require('normalize-url');\nconst getStream = require('get-stream');\nconst CachePolicy = require('http-cache-semantics');\nconst Response = require('responselike');\nconst lowercaseKeys = require('lowercase-keys');\nconst cloneResponse = require('clone-response');\nconst Keyv = require('keyv');\n\nclass CacheableRequest {\n\tconstructor(request, cacheAdapter) {\n\t\tif (typeof request !== 'function') {\n\t\t\tthrow new TypeError('Parameter `request` must be a function');\n\t\t}\n\n\t\tthis.cache = new Keyv({\n\t\t\turi: typeof cacheAdapter === 'string' && cacheAdapter,\n\t\t\tstore: typeof cacheAdapter !== 'string' && cacheAdapter,\n\t\t\tnamespace: 'cacheable-request'\n\t\t});\n\n\t\treturn this.createCacheableRequest(request);\n\t}\n\n\tcreateCacheableRequest(request) {\n\t\treturn (opts, cb) => {\n\t\t\tlet url;\n\t\t\tif (typeof opts === 'string') {\n\t\t\t\turl = normalizeUrlObject(urlLib.parse(opts));\n\t\t\t\topts = {};\n\t\t\t} else if (opts instanceof urlLib.URL) {\n\t\t\t\turl = normalizeUrlObject(urlLib.parse(opts.toString()));\n\t\t\t\topts = {};\n\t\t\t} else {\n\t\t\t\tconst [pathname, ...searchParts] = (opts.path || '').split('?');\n\t\t\t\tconst search = searchParts.length > 0 ?\n\t\t\t\t\t`?${searchParts.join('?')}` :\n\t\t\t\t\t'';\n\t\t\t\turl = normalizeUrlObject({ ...opts, pathname, search });\n\t\t\t}\n\n\t\t\topts = {\n\t\t\t\theaders: {},\n\t\t\t\tmethod: 'GET',\n\t\t\t\tcache: true,\n\t\t\t\tstrictTtl: false,\n\t\t\t\tautomaticFailover: false,\n\t\t\t\t...opts,\n\t\t\t\t...urlObjectToRequestOptions(url)\n\t\t\t};\n\t\t\topts.headers = lowercaseKeys(opts.headers);\n\n\t\t\tconst ee = new EventEmitter();\n\t\t\tconst normalizedUrlString = normalizeUrl(\n\t\t\t\turlLib.format(url),\n\t\t\t\t{\n\t\t\t\t\tstripWWW: false,\n\t\t\t\t\tremoveTrailingSlash: false,\n\t\t\t\t\tstripAuthentication: false\n\t\t\t\t}\n\t\t\t);\n\t\t\tconst key = `${opts.method}:${normalizedUrlString}`;\n\t\t\tlet revalidate = false;\n\t\t\tlet madeRequest = false;\n\n\t\t\tconst makeRequest = opts => {\n\t\t\t\tmadeRequest = true;\n\t\t\t\tlet requestErrored = false;\n\t\t\t\tlet requestErrorCallback;\n\n\t\t\t\tconst requestErrorPromise = new Promise(resolve => {\n\t\t\t\t\trequestErrorCallback = () => {\n\t\t\t\t\t\tif (!requestErrored) {\n\t\t\t\t\t\t\trequestErrored = true;\n\t\t\t\t\t\t\tresolve();\n\t\t\t\t\t\t}\n\t\t\t\t\t};\n\t\t\t\t});\n\n\t\t\t\tconst handler = response => {\n\t\t\t\t\tif (revalidate && !opts.forceRefresh) {\n\t\t\t\t\t\tresponse.status = response.statusCode;\n\t\t\t\t\t\tconst revalidatedPolicy = CachePolicy.fromObject(revalidate.cachePolicy).revalidatedPolicy(opts, response);\n\t\t\t\t\t\tif (!revalidatedPolicy.modified) {\n\t\t\t\t\t\t\tconst headers = revalidatedPolicy.policy.responseHeaders();\n\t\t\t\t\t\t\tresponse = new Response(revalidate.statusCode, headers, revalidate.body, revalidate.url);\n\t\t\t\t\t\t\tresponse.cachePolicy = revalidatedPolicy.policy;\n\t\t\t\t\t\t\tresponse.fromCache = true;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif (!response.fromCache) {\n\t\t\t\t\t\tresponse.cachePolicy = new CachePolicy(opts, response, opts);\n\t\t\t\t\t\tresponse.fromCache = false;\n\t\t\t\t\t}\n\n\t\t\t\t\tlet clonedResponse;\n\t\t\t\t\tif (opts.cache && response.cachePolicy.storable()) {\n\t\t\t\t\t\tclonedResponse = cloneResponse(response);\n\n\t\t\t\t\t\t(async () => {\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\tconst bodyPromise = getStream.buffer(response);\n\n\t\t\t\t\t\t\t\tawait Promise.race([\n\t\t\t\t\t\t\t\t\trequestErrorPromise,\n\t\t\t\t\t\t\t\t\tnew Promise(resolve => response.once('end', resolve))\n\t\t\t\t\t\t\t\t]);\n\n\t\t\t\t\t\t\t\tif (requestErrored) {\n\t\t\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\tconst body = await bodyPromise;\n\n\t\t\t\t\t\t\t\tconst value = {\n\t\t\t\t\t\t\t\t\tcachePolicy: response.cachePolicy.toObject(),\n\t\t\t\t\t\t\t\t\turl: response.url,\n\t\t\t\t\t\t\t\t\tstatusCode: response.fromCache ? revalidate.statusCode : response.statusCode,\n\t\t\t\t\t\t\t\t\tbody\n\t\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\t\tlet ttl = opts.strictTtl ? response.cachePolicy.timeToLive() : undefined;\n\t\t\t\t\t\t\t\tif (opts.maxTtl) {\n\t\t\t\t\t\t\t\t\tttl = ttl ? Math.min(ttl, opts.maxTtl) : opts.maxTtl;\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\tawait this.cache.set(key, value, ttl);\n\t\t\t\t\t\t\t} catch (error) {\n\t\t\t\t\t\t\t\tee.emit('error', new CacheableRequest.CacheError(error));\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})();\n\t\t\t\t\t} else if (opts.cache && revalidate) {\n\t\t\t\t\t\t(async () => {\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\tawait this.cache.delete(key);\n\t\t\t\t\t\t\t} catch (error) {\n\t\t\t\t\t\t\t\tee.emit('error', new CacheableRequest.CacheError(error));\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})();\n\t\t\t\t\t}\n\n\t\t\t\t\tee.emit('response', clonedResponse || response);\n\t\t\t\t\tif (typeof cb === 'function') {\n\t\t\t\t\t\tcb(clonedResponse || response);\n\t\t\t\t\t}\n\t\t\t\t};\n\n\t\t\t\ttry {\n\t\t\t\t\tconst req = request(opts, handler);\n\t\t\t\t\treq.once('error', requestErrorCallback);\n\t\t\t\t\treq.once('abort', requestErrorCallback);\n\t\t\t\t\tee.emit('request', req);\n\t\t\t\t} catch (error) {\n\t\t\t\t\tee.emit('error', new CacheableRequest.RequestError(error));\n\t\t\t\t}\n\t\t\t};\n\n\t\t\t(async () => {\n\t\t\t\tconst get = async opts => {\n\t\t\t\t\tawait Promise.resolve();\n\n\t\t\t\t\tconst cacheEntry = opts.cache ? await this.cache.get(key) : undefined;\n\t\t\t\t\tif (typeof cacheEntry === 'undefined') {\n\t\t\t\t\t\treturn makeRequest(opts);\n\t\t\t\t\t}\n\n\t\t\t\t\tconst policy = CachePolicy.fromObject(cacheEntry.cachePolicy);\n\t\t\t\t\tif (policy.satisfiesWithoutRevalidation(opts) && !opts.forceRefresh) {\n\t\t\t\t\t\tconst headers = policy.responseHeaders();\n\t\t\t\t\t\tconst response = new Response(cacheEntry.statusCode, headers, cacheEntry.body, cacheEntry.url);\n\t\t\t\t\t\tresponse.cachePolicy = policy;\n\t\t\t\t\t\tresponse.fromCache = true;\n\n\t\t\t\t\t\tee.emit('response', response);\n\t\t\t\t\t\tif (typeof cb === 'function') {\n\t\t\t\t\t\t\tcb(response);\n\t\t\t\t\t\t}\n\t\t\t\t\t} else {\n\t\t\t\t\t\trevalidate = cacheEntry;\n\t\t\t\t\t\topts.headers = policy.revalidationHeaders(opts);\n\t\t\t\t\t\tmakeRequest(opts);\n\t\t\t\t\t}\n\t\t\t\t};\n\n\t\t\t\tconst errorHandler = error => ee.emit('error', new CacheableRequest.CacheError(error));\n\t\t\t\tthis.cache.once('error', errorHandler);\n\t\t\t\tee.on('response', () => this.cache.removeListener('error', errorHandler));\n\n\t\t\t\ttry {\n\t\t\t\t\tawait get(opts);\n\t\t\t\t} catch (error) {\n\t\t\t\t\tif (opts.automaticFailover && !madeRequest) {\n\t\t\t\t\t\tmakeRequest(opts);\n\t\t\t\t\t}\n\n\t\t\t\t\tee.emit('error', new CacheableRequest.CacheError(error));\n\t\t\t\t}\n\t\t\t})();\n\n\t\t\treturn ee;\n\t\t};\n\t}\n}\n\nfunction urlObjectToRequestOptions(url) {\n\tconst options = { ...url };\n\toptions.path = `${url.pathname || '/'}${url.search || ''}`;\n\tdelete options.pathname;\n\tdelete options.search;\n\treturn options;\n}\n\nfunction normalizeUrlObject(url) {\n\t// If url was parsed by url.parse or new URL:\n\t// - hostname will be set\n\t// - host will be hostname[:port]\n\t// - port will be set if it was explicit in the parsed string\n\t// Otherwise, url was from request options:\n\t// - hostname or host may be set\n\t// - host shall not have port encoded\n\treturn {\n\t\tprotocol: url.protocol,\n\t\tauth: url.auth,\n\t\thostname: url.hostname || url.host || 'localhost',\n\t\tport: url.port,\n\t\tpathname: url.pathname,\n\t\tsearch: url.search\n\t};\n}\n\nCacheableRequest.RequestError = class extends Error {\n\tconstructor(error) {\n\t\tsuper(error.message);\n\t\tthis.name = 'RequestError';\n\t\tObject.assign(this, error);\n\t}\n};\n\nCacheableRequest.CacheError = class extends Error {\n\tconstructor(error) {\n\t\tsuper(error.message);\n\t\tthis.name = 'CacheError';\n\t\tObject.assign(this, error);\n\t}\n};\n\nmodule.exports = CacheableRequest;\n","'use strict';\n\nconst PassThrough = require('stream').PassThrough;\nconst mimicResponse = require('mimic-response');\n\nconst cloneResponse = response => {\n\tif (!(response && response.pipe)) {\n\t\tthrow new TypeError('Parameter `response` must be a response stream.');\n\t}\n\n\tconst clone = new PassThrough();\n\tmimicResponse(response, clone);\n\n\treturn response.pipe(clone);\n};\n\nmodule.exports = cloneResponse;\n","'use strict';\nconst {Transform, PassThrough} = require('stream');\nconst zlib = require('zlib');\nconst mimicResponse = require('mimic-response');\n\nmodule.exports = response => {\n\tconst contentEncoding = (response.headers['content-encoding'] || '').toLowerCase();\n\n\tif (!['gzip', 'deflate', 'br'].includes(contentEncoding)) {\n\t\treturn response;\n\t}\n\n\t// TODO: Remove this when targeting Node.js 12.\n\tconst isBrotli = contentEncoding === 'br';\n\tif (isBrotli && typeof zlib.createBrotliDecompress !== 'function') {\n\t\tresponse.destroy(new Error('Brotli is not supported on Node.js < 12'));\n\t\treturn response;\n\t}\n\n\tlet isEmpty = true;\n\n\tconst checker = new Transform({\n\t\ttransform(data, _encoding, callback) {\n\t\t\tisEmpty = false;\n\n\t\t\tcallback(null, data);\n\t\t},\n\n\t\tflush(callback) {\n\t\t\tcallback();\n\t\t}\n\t});\n\n\tconst finalStream = new PassThrough({\n\t\tautoDestroy: false,\n\t\tdestroy(error, callback) {\n\t\t\tresponse.destroy();\n\n\t\t\tcallback(error);\n\t\t}\n\t});\n\n\tconst decompressStream = isBrotli ? zlib.createBrotliDecompress() : zlib.createUnzip();\n\n\tdecompressStream.once('error', error => {\n\t\tif (isEmpty && !response.readable) {\n\t\t\tfinalStream.end();\n\t\t\treturn;\n\t\t}\n\n\t\tfinalStream.destroy(error);\n\t});\n\n\tmimicResponse(response, finalStream);\n\tresponse.pipe(checker).pipe(decompressStream).pipe(finalStream);\n\n\treturn finalStream;\n};\n","'use strict';\n\n// We define these manually to ensure they're always copied\n// even if they would move up the prototype chain\n// https://nodejs.org/api/http.html#http_class_http_incomingmessage\nconst knownProperties = [\n\t'aborted',\n\t'complete',\n\t'headers',\n\t'httpVersion',\n\t'httpVersionMinor',\n\t'httpVersionMajor',\n\t'method',\n\t'rawHeaders',\n\t'rawTrailers',\n\t'setTimeout',\n\t'socket',\n\t'statusCode',\n\t'statusMessage',\n\t'trailers',\n\t'url'\n];\n\nmodule.exports = (fromStream, toStream) => {\n\tif (toStream._readableState.autoDestroy) {\n\t\tthrow new Error('The second stream must have the `autoDestroy` option set to `false`');\n\t}\n\n\tconst fromProperties = new Set(Object.keys(fromStream).concat(knownProperties));\n\n\tconst properties = {};\n\n\tfor (const property of fromProperties) {\n\t\t// Don't overwrite existing properties.\n\t\tif (property in toStream) {\n\t\t\tcontinue;\n\t\t}\n\n\t\tproperties[property] = {\n\t\t\tget() {\n\t\t\t\tconst value = fromStream[property];\n\t\t\t\tconst isFunction = typeof value === 'function';\n\n\t\t\t\treturn isFunction ? value.bind(fromStream) : value;\n\t\t\t},\n\t\t\tset(value) {\n\t\t\t\tfromStream[property] = value;\n\t\t\t},\n\t\t\tenumerable: true,\n\t\t\tconfigurable: false\n\t\t};\n\t}\n\n\tObject.defineProperties(toStream, properties);\n\n\tfromStream.once('aborted', () => {\n\t\ttoStream.destroy();\n\n\t\ttoStream.emit('aborted');\n\t});\n\n\tfromStream.once('close', () => {\n\t\tif (fromStream.complete) {\n\t\t\tif (toStream.readable) {\n\t\t\t\ttoStream.once('end', () => {\n\t\t\t\t\ttoStream.emit('close');\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\ttoStream.emit('close');\n\t\t\t}\n\t\t} else {\n\t\t\ttoStream.emit('close');\n\t\t}\n\t});\n\n\treturn toStream;\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction isTLSSocket(socket) {\n return socket.encrypted;\n}\nconst deferToConnect = (socket, fn) => {\n let listeners;\n if (typeof fn === 'function') {\n const connect = fn;\n listeners = { connect };\n }\n else {\n listeners = fn;\n }\n const hasConnectListener = typeof listeners.connect === 'function';\n const hasSecureConnectListener = typeof listeners.secureConnect === 'function';\n const hasCloseListener = typeof listeners.close === 'function';\n const onConnect = () => {\n if (hasConnectListener) {\n listeners.connect();\n }\n if (isTLSSocket(socket) && hasSecureConnectListener) {\n if (socket.authorized) {\n listeners.secureConnect();\n }\n else if (!socket.authorizationError) {\n socket.once('secureConnect', listeners.secureConnect);\n }\n }\n if (hasCloseListener) {\n socket.once('close', listeners.close);\n }\n };\n if (socket.writable && !socket.connecting) {\n onConnect();\n }\n else if (socket.connecting) {\n socket.once('connect', onConnect);\n }\n else if (socket.destroyed && hasCloseListener) {\n listeners.close(socket._hadError);\n }\n};\nexports.default = deferToConnect;\n// For CommonJS default export support\nmodule.exports = deferToConnect;\nmodule.exports.default = deferToConnect;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","var once = require('once');\n\nvar noop = function() {};\n\nvar isRequest = function(stream) {\n\treturn stream.setHeader && typeof stream.abort === 'function';\n};\n\nvar isChildProcess = function(stream) {\n\treturn stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3\n};\n\nvar eos = function(stream, opts, callback) {\n\tif (typeof opts === 'function') return eos(stream, null, opts);\n\tif (!opts) opts = {};\n\n\tcallback = once(callback || noop);\n\n\tvar ws = stream._writableState;\n\tvar rs = stream._readableState;\n\tvar readable = opts.readable || (opts.readable !== false && stream.readable);\n\tvar writable = opts.writable || (opts.writable !== false && stream.writable);\n\tvar cancelled = false;\n\n\tvar onlegacyfinish = function() {\n\t\tif (!stream.writable) onfinish();\n\t};\n\n\tvar onfinish = function() {\n\t\twritable = false;\n\t\tif (!readable) callback.call(stream);\n\t};\n\n\tvar onend = function() {\n\t\treadable = false;\n\t\tif (!writable) callback.call(stream);\n\t};\n\n\tvar onexit = function(exitCode) {\n\t\tcallback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);\n\t};\n\n\tvar onerror = function(err) {\n\t\tcallback.call(stream, err);\n\t};\n\n\tvar onclose = function() {\n\t\tprocess.nextTick(onclosenexttick);\n\t};\n\n\tvar onclosenexttick = function() {\n\t\tif (cancelled) return;\n\t\tif (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));\n\t\tif (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));\n\t};\n\n\tvar onrequest = function() {\n\t\tstream.req.on('finish', onfinish);\n\t};\n\n\tif (isRequest(stream)) {\n\t\tstream.on('complete', onfinish);\n\t\tstream.on('abort', onclose);\n\t\tif (stream.req) onrequest();\n\t\telse stream.on('request', onrequest);\n\t} else if (writable && !ws) { // legacy streams\n\t\tstream.on('end', onlegacyfinish);\n\t\tstream.on('close', onlegacyfinish);\n\t}\n\n\tif (isChildProcess(stream)) stream.on('exit', onexit);\n\n\tstream.on('end', onend);\n\tstream.on('finish', onfinish);\n\tif (opts.error !== false) stream.on('error', onerror);\n\tstream.on('close', onclose);\n\n\treturn function() {\n\t\tcancelled = true;\n\t\tstream.removeListener('complete', onfinish);\n\t\tstream.removeListener('abort', onclose);\n\t\tstream.removeListener('request', onrequest);\n\t\tif (stream.req) stream.req.removeListener('finish', onfinish);\n\t\tstream.removeListener('end', onlegacyfinish);\n\t\tstream.removeListener('close', onlegacyfinish);\n\t\tstream.removeListener('finish', onfinish);\n\t\tstream.removeListener('exit', onexit);\n\t\tstream.removeListener('end', onend);\n\t\tstream.removeListener('error', onerror);\n\t\tstream.removeListener('close', onclose);\n\t};\n};\n\nmodule.exports = eos;\n","'use strict';\n\nconst stringify = require('./lib/stringify');\nconst compile = require('./lib/compile');\nconst expand = require('./lib/expand');\nconst parse = require('./lib/parse');\n\n/**\n * Expand the given pattern or create a regex-compatible string.\n *\n * ```js\n * const braces = require('braces');\n * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']\n * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']\n * ```\n * @param {String} `str`\n * @param {Object} `options`\n * @return {String}\n * @api public\n */\n\nconst braces = (input, options = {}) => {\n let output = [];\n\n if (Array.isArray(input)) {\n for (let pattern of input) {\n let result = braces.create(pattern, options);\n if (Array.isArray(result)) {\n output.push(...result);\n } else {\n output.push(result);\n }\n }\n } else {\n output = [].concat(braces.create(input, options));\n }\n\n if (options && options.expand === true && options.nodupes === true) {\n output = [...new Set(output)];\n }\n return output;\n};\n\n/**\n * Parse the given `str` with the given `options`.\n *\n * ```js\n * // braces.parse(pattern, [, options]);\n * const ast = braces.parse('a/{b,c}/d');\n * console.log(ast);\n * ```\n * @param {String} pattern Brace pattern to parse\n * @param {Object} options\n * @return {Object} Returns an AST\n * @api public\n */\n\nbraces.parse = (input, options = {}) => parse(input, options);\n\n/**\n * Creates a braces string from an AST, or an AST node.\n *\n * ```js\n * const braces = require('braces');\n * let ast = braces.parse('foo/{a,b}/bar');\n * console.log(stringify(ast.nodes[2])); //=> '{a,b}'\n * ```\n * @param {String} `input` Brace pattern or AST.\n * @param {Object} `options`\n * @return {Array} Returns an array of expanded values.\n * @api public\n */\n\nbraces.stringify = (input, options = {}) => {\n if (typeof input === 'string') {\n return stringify(braces.parse(input, options), options);\n }\n return stringify(input, options);\n};\n\n/**\n * Compiles a brace pattern into a regex-compatible, optimized string.\n * This method is called by the main [braces](#braces) function by default.\n *\n * ```js\n * const braces = require('braces');\n * console.log(braces.compile('a/{b,c}/d'));\n * //=> ['a/(b|c)/d']\n * ```\n * @param {String} `input` Brace pattern or AST.\n * @param {Object} `options`\n * @return {Array} Returns an array of expanded values.\n * @api public\n */\n\nbraces.compile = (input, options = {}) => {\n if (typeof input === 'string') {\n input = braces.parse(input, options);\n }\n return compile(input, options);\n};\n\n/**\n * Expands a brace pattern into an array. This method is called by the\n * main [braces](#braces) function when `options.expand` is true. Before\n * using this method it's recommended that you read the [performance notes](#performance))\n * and advantages of using [.compile](#compile) instead.\n *\n * ```js\n * const braces = require('braces');\n * console.log(braces.expand('a/{b,c}/d'));\n * //=> ['a/b/d', 'a/c/d'];\n * ```\n * @param {String} `pattern` Brace pattern\n * @param {Object} `options`\n * @return {Array} Returns an array of expanded values.\n * @api public\n */\n\nbraces.expand = (input, options = {}) => {\n if (typeof input === 'string') {\n input = braces.parse(input, options);\n }\n\n let result = expand(input, options);\n\n // filter out empty strings if specified\n if (options.noempty === true) {\n result = result.filter(Boolean);\n }\n\n // filter out duplicates if specified\n if (options.nodupes === true) {\n result = [...new Set(result)];\n }\n\n return result;\n};\n\n/**\n * Processes a brace pattern and returns either an expanded array\n * (if `options.expand` is true), a highly optimized regex-compatible string.\n * This method is called by the main [braces](#braces) function.\n *\n * ```js\n * const braces = require('braces');\n * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}'))\n * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'\n * ```\n * @param {String} `pattern` Brace pattern\n * @param {Object} `options`\n * @return {Array} Returns an array of expanded values.\n * @api public\n */\n\nbraces.create = (input, options = {}) => {\n if (input === '' || input.length < 3) {\n return [input];\n }\n\n return options.expand !== true\n ? braces.compile(input, options)\n : braces.expand(input, options);\n};\n\n/**\n * Expose \"braces\"\n */\n\nmodule.exports = braces;\n","'use strict';\n\nconst fill = require('fill-range');\nconst utils = require('./utils');\n\nconst compile = (ast, options = {}) => {\n let walk = (node, parent = {}) => {\n let invalidBlock = utils.isInvalidBrace(parent);\n let invalidNode = node.invalid === true && options.escapeInvalid === true;\n let invalid = invalidBlock === true || invalidNode === true;\n let prefix = options.escapeInvalid === true ? '\\\\' : '';\n let output = '';\n\n if (node.isOpen === true) {\n return prefix + node.value;\n }\n if (node.isClose === true) {\n return prefix + node.value;\n }\n\n if (node.type === 'open') {\n return invalid ? (prefix + node.value) : '(';\n }\n\n if (node.type === 'close') {\n return invalid ? (prefix + node.value) : ')';\n }\n\n if (node.type === 'comma') {\n return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|');\n }\n\n if (node.value) {\n return node.value;\n }\n\n if (node.nodes && node.ranges > 0) {\n let args = utils.reduce(node.nodes);\n let range = fill(...args, { ...options, wrap: false, toRegex: true });\n\n if (range.length !== 0) {\n return args.length > 1 && range.length > 1 ? `(${range})` : range;\n }\n }\n\n if (node.nodes) {\n for (let child of node.nodes) {\n output += walk(child, node);\n }\n }\n return output;\n };\n\n return walk(ast);\n};\n\nmodule.exports = compile;\n","'use strict';\n\nmodule.exports = {\n MAX_LENGTH: 1024 * 64,\n\n // Digits\n CHAR_0: '0', /* 0 */\n CHAR_9: '9', /* 9 */\n\n // Alphabet chars.\n CHAR_UPPERCASE_A: 'A', /* A */\n CHAR_LOWERCASE_A: 'a', /* a */\n CHAR_UPPERCASE_Z: 'Z', /* Z */\n CHAR_LOWERCASE_Z: 'z', /* z */\n\n CHAR_LEFT_PARENTHESES: '(', /* ( */\n CHAR_RIGHT_PARENTHESES: ')', /* ) */\n\n CHAR_ASTERISK: '*', /* * */\n\n // Non-alphabetic chars.\n CHAR_AMPERSAND: '&', /* & */\n CHAR_AT: '@', /* @ */\n CHAR_BACKSLASH: '\\\\', /* \\ */\n CHAR_BACKTICK: '`', /* ` */\n CHAR_CARRIAGE_RETURN: '\\r', /* \\r */\n CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */\n CHAR_COLON: ':', /* : */\n CHAR_COMMA: ',', /* , */\n CHAR_DOLLAR: '$', /* . */\n CHAR_DOT: '.', /* . */\n CHAR_DOUBLE_QUOTE: '\"', /* \" */\n CHAR_EQUAL: '=', /* = */\n CHAR_EXCLAMATION_MARK: '!', /* ! */\n CHAR_FORM_FEED: '\\f', /* \\f */\n CHAR_FORWARD_SLASH: '/', /* / */\n CHAR_HASH: '#', /* # */\n CHAR_HYPHEN_MINUS: '-', /* - */\n CHAR_LEFT_ANGLE_BRACKET: '<', /* < */\n CHAR_LEFT_CURLY_BRACE: '{', /* { */\n CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */\n CHAR_LINE_FEED: '\\n', /* \\n */\n CHAR_NO_BREAK_SPACE: '\\u00A0', /* \\u00A0 */\n CHAR_PERCENT: '%', /* % */\n CHAR_PLUS: '+', /* + */\n CHAR_QUESTION_MARK: '?', /* ? */\n CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */\n CHAR_RIGHT_CURLY_BRACE: '}', /* } */\n CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */\n CHAR_SEMICOLON: ';', /* ; */\n CHAR_SINGLE_QUOTE: '\\'', /* ' */\n CHAR_SPACE: ' ', /* */\n CHAR_TAB: '\\t', /* \\t */\n CHAR_UNDERSCORE: '_', /* _ */\n CHAR_VERTICAL_LINE: '|', /* | */\n CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\\uFEFF' /* \\uFEFF */\n};\n","'use strict';\n\nconst fill = require('fill-range');\nconst stringify = require('./stringify');\nconst utils = require('./utils');\n\nconst append = (queue = '', stash = '', enclose = false) => {\n let result = [];\n\n queue = [].concat(queue);\n stash = [].concat(stash);\n\n if (!stash.length) return queue;\n if (!queue.length) {\n return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash;\n }\n\n for (let item of queue) {\n if (Array.isArray(item)) {\n for (let value of item) {\n result.push(append(value, stash, enclose));\n }\n } else {\n for (let ele of stash) {\n if (enclose === true && typeof ele === 'string') ele = `{${ele}}`;\n result.push(Array.isArray(ele) ? append(item, ele, enclose) : (item + ele));\n }\n }\n }\n return utils.flatten(result);\n};\n\nconst expand = (ast, options = {}) => {\n let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit;\n\n let walk = (node, parent = {}) => {\n node.queue = [];\n\n let p = parent;\n let q = parent.queue;\n\n while (p.type !== 'brace' && p.type !== 'root' && p.parent) {\n p = p.parent;\n q = p.queue;\n }\n\n if (node.invalid || node.dollar) {\n q.push(append(q.pop(), stringify(node, options)));\n return;\n }\n\n if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) {\n q.push(append(q.pop(), ['{}']));\n return;\n }\n\n if (node.nodes && node.ranges > 0) {\n let args = utils.reduce(node.nodes);\n\n if (utils.exceedsLimit(...args, options.step, rangeLimit)) {\n throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.');\n }\n\n let range = fill(...args, options);\n if (range.length === 0) {\n range = stringify(node, options);\n }\n\n q.push(append(q.pop(), range));\n node.nodes = [];\n return;\n }\n\n let enclose = utils.encloseBrace(node);\n let queue = node.queue;\n let block = node;\n\n while (block.type !== 'brace' && block.type !== 'root' && block.parent) {\n block = block.parent;\n queue = block.queue;\n }\n\n for (let i = 0; i < node.nodes.length; i++) {\n let child = node.nodes[i];\n\n if (child.type === 'comma' && node.type === 'brace') {\n if (i === 1) queue.push('');\n queue.push('');\n continue;\n }\n\n if (child.type === 'close') {\n q.push(append(q.pop(), queue, enclose));\n continue;\n }\n\n if (child.value && child.type !== 'open') {\n queue.push(append(queue.pop(), child.value));\n continue;\n }\n\n if (child.nodes) {\n walk(child, node);\n }\n }\n\n return queue;\n };\n\n return utils.flatten(walk(ast));\n};\n\nmodule.exports = expand;\n","'use strict';\n\nconst stringify = require('./stringify');\n\n/**\n * Constants\n */\n\nconst {\n MAX_LENGTH,\n CHAR_BACKSLASH, /* \\ */\n CHAR_BACKTICK, /* ` */\n CHAR_COMMA, /* , */\n CHAR_DOT, /* . */\n CHAR_LEFT_PARENTHESES, /* ( */\n CHAR_RIGHT_PARENTHESES, /* ) */\n CHAR_LEFT_CURLY_BRACE, /* { */\n CHAR_RIGHT_CURLY_BRACE, /* } */\n CHAR_LEFT_SQUARE_BRACKET, /* [ */\n CHAR_RIGHT_SQUARE_BRACKET, /* ] */\n CHAR_DOUBLE_QUOTE, /* \" */\n CHAR_SINGLE_QUOTE, /* ' */\n CHAR_NO_BREAK_SPACE,\n CHAR_ZERO_WIDTH_NOBREAK_SPACE\n} = require('./constants');\n\n/**\n * parse\n */\n\nconst parse = (input, options = {}) => {\n if (typeof input !== 'string') {\n throw new TypeError('Expected a string');\n }\n\n let opts = options || {};\n let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;\n if (input.length > max) {\n throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`);\n }\n\n let ast = { type: 'root', input, nodes: [] };\n let stack = [ast];\n let block = ast;\n let prev = ast;\n let brackets = 0;\n let length = input.length;\n let index = 0;\n let depth = 0;\n let value;\n let memo = {};\n\n /**\n * Helpers\n */\n\n const advance = () => input[index++];\n const push = node => {\n if (node.type === 'text' && prev.type === 'dot') {\n prev.type = 'text';\n }\n\n if (prev && prev.type === 'text' && node.type === 'text') {\n prev.value += node.value;\n return;\n }\n\n block.nodes.push(node);\n node.parent = block;\n node.prev = prev;\n prev = node;\n return node;\n };\n\n push({ type: 'bos' });\n\n while (index < length) {\n block = stack[stack.length - 1];\n value = advance();\n\n /**\n * Invalid chars\n */\n\n if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) {\n continue;\n }\n\n /**\n * Escaped chars\n */\n\n if (value === CHAR_BACKSLASH) {\n push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() });\n continue;\n }\n\n /**\n * Right square bracket (literal): ']'\n */\n\n if (value === CHAR_RIGHT_SQUARE_BRACKET) {\n push({ type: 'text', value: '\\\\' + value });\n continue;\n }\n\n /**\n * Left square bracket: '['\n */\n\n if (value === CHAR_LEFT_SQUARE_BRACKET) {\n brackets++;\n\n let closed = true;\n let next;\n\n while (index < length && (next = advance())) {\n value += next;\n\n if (next === CHAR_LEFT_SQUARE_BRACKET) {\n brackets++;\n continue;\n }\n\n if (next === CHAR_BACKSLASH) {\n value += advance();\n continue;\n }\n\n if (next === CHAR_RIGHT_SQUARE_BRACKET) {\n brackets--;\n\n if (brackets === 0) {\n break;\n }\n }\n }\n\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Parentheses\n */\n\n if (value === CHAR_LEFT_PARENTHESES) {\n block = push({ type: 'paren', nodes: [] });\n stack.push(block);\n push({ type: 'text', value });\n continue;\n }\n\n if (value === CHAR_RIGHT_PARENTHESES) {\n if (block.type !== 'paren') {\n push({ type: 'text', value });\n continue;\n }\n block = stack.pop();\n push({ type: 'text', value });\n block = stack[stack.length - 1];\n continue;\n }\n\n /**\n * Quotes: '|\"|`\n */\n\n if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) {\n let open = value;\n let next;\n\n if (options.keepQuotes !== true) {\n value = '';\n }\n\n while (index < length && (next = advance())) {\n if (next === CHAR_BACKSLASH) {\n value += next + advance();\n continue;\n }\n\n if (next === open) {\n if (options.keepQuotes === true) value += next;\n break;\n }\n\n value += next;\n }\n\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Left curly brace: '{'\n */\n\n if (value === CHAR_LEFT_CURLY_BRACE) {\n depth++;\n\n let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true;\n let brace = {\n type: 'brace',\n open: true,\n close: false,\n dollar,\n depth,\n commas: 0,\n ranges: 0,\n nodes: []\n };\n\n block = push(brace);\n stack.push(block);\n push({ type: 'open', value });\n continue;\n }\n\n /**\n * Right curly brace: '}'\n */\n\n if (value === CHAR_RIGHT_CURLY_BRACE) {\n if (block.type !== 'brace') {\n push({ type: 'text', value });\n continue;\n }\n\n let type = 'close';\n block = stack.pop();\n block.close = true;\n\n push({ type, value });\n depth--;\n\n block = stack[stack.length - 1];\n continue;\n }\n\n /**\n * Comma: ','\n */\n\n if (value === CHAR_COMMA && depth > 0) {\n if (block.ranges > 0) {\n block.ranges = 0;\n let open = block.nodes.shift();\n block.nodes = [open, { type: 'text', value: stringify(block) }];\n }\n\n push({ type: 'comma', value });\n block.commas++;\n continue;\n }\n\n /**\n * Dot: '.'\n */\n\n if (value === CHAR_DOT && depth > 0 && block.commas === 0) {\n let siblings = block.nodes;\n\n if (depth === 0 || siblings.length === 0) {\n push({ type: 'text', value });\n continue;\n }\n\n if (prev.type === 'dot') {\n block.range = [];\n prev.value += value;\n prev.type = 'range';\n\n if (block.nodes.length !== 3 && block.nodes.length !== 5) {\n block.invalid = true;\n block.ranges = 0;\n prev.type = 'text';\n continue;\n }\n\n block.ranges++;\n block.args = [];\n continue;\n }\n\n if (prev.type === 'range') {\n siblings.pop();\n\n let before = siblings[siblings.length - 1];\n before.value += prev.value + value;\n prev = before;\n block.ranges--;\n continue;\n }\n\n push({ type: 'dot', value });\n continue;\n }\n\n /**\n * Text\n */\n\n push({ type: 'text', value });\n }\n\n // Mark imbalanced braces and brackets as invalid\n do {\n block = stack.pop();\n\n if (block.type !== 'root') {\n block.nodes.forEach(node => {\n if (!node.nodes) {\n if (node.type === 'open') node.isOpen = true;\n if (node.type === 'close') node.isClose = true;\n if (!node.nodes) node.type = 'text';\n node.invalid = true;\n }\n });\n\n // get the location of the block on parent.nodes (block's siblings)\n let parent = stack[stack.length - 1];\n let index = parent.nodes.indexOf(block);\n // replace the (invalid) block with it's nodes\n parent.nodes.splice(index, 1, ...block.nodes);\n }\n } while (stack.length > 0);\n\n push({ type: 'eos' });\n return ast;\n};\n\nmodule.exports = parse;\n","'use strict';\n\nconst utils = require('./utils');\n\nmodule.exports = (ast, options = {}) => {\n let stringify = (node, parent = {}) => {\n let invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent);\n let invalidNode = node.invalid === true && options.escapeInvalid === true;\n let output = '';\n\n if (node.value) {\n if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) {\n return '\\\\' + node.value;\n }\n return node.value;\n }\n\n if (node.value) {\n return node.value;\n }\n\n if (node.nodes) {\n for (let child of node.nodes) {\n output += stringify(child);\n }\n }\n return output;\n };\n\n return stringify(ast);\n};\n\n","'use strict';\n\nexports.isInteger = num => {\n if (typeof num === 'number') {\n return Number.isInteger(num);\n }\n if (typeof num === 'string' && num.trim() !== '') {\n return Number.isInteger(Number(num));\n }\n return false;\n};\n\n/**\n * Find a node of the given type\n */\n\nexports.find = (node, type) => node.nodes.find(node => node.type === type);\n\n/**\n * Find a node of the given type\n */\n\nexports.exceedsLimit = (min, max, step = 1, limit) => {\n if (limit === false) return false;\n if (!exports.isInteger(min) || !exports.isInteger(max)) return false;\n return ((Number(max) - Number(min)) / Number(step)) >= limit;\n};\n\n/**\n * Escape the given node with '\\\\' before node.value\n */\n\nexports.escapeNode = (block, n = 0, type) => {\n let node = block.nodes[n];\n if (!node) return;\n\n if ((type && node.type === type) || node.type === 'open' || node.type === 'close') {\n if (node.escaped !== true) {\n node.value = '\\\\' + node.value;\n node.escaped = true;\n }\n }\n};\n\n/**\n * Returns true if the given brace node should be enclosed in literal braces\n */\n\nexports.encloseBrace = node => {\n if (node.type !== 'brace') return false;\n if ((node.commas >> 0 + node.ranges >> 0) === 0) {\n node.invalid = true;\n return true;\n }\n return false;\n};\n\n/**\n * Returns true if a brace node is invalid.\n */\n\nexports.isInvalidBrace = block => {\n if (block.type !== 'brace') return false;\n if (block.invalid === true || block.dollar) return true;\n if ((block.commas >> 0 + block.ranges >> 0) === 0) {\n block.invalid = true;\n return true;\n }\n if (block.open !== true || block.close !== true) {\n block.invalid = true;\n return true;\n }\n return false;\n};\n\n/**\n * Returns true if a node is an open or close node\n */\n\nexports.isOpenOrClose = node => {\n if (node.type === 'open' || node.type === 'close') {\n return true;\n }\n return node.open === true || node.close === true;\n};\n\n/**\n * Reduce an array of text nodes.\n */\n\nexports.reduce = nodes => nodes.reduce((acc, node) => {\n if (node.type === 'text') acc.push(node.value);\n if (node.type === 'range') node.type = 'text';\n return acc;\n}, []);\n\n/**\n * Flatten an array\n */\n\nexports.flatten = (...args) => {\n const result = [];\n const flat = arr => {\n for (let i = 0; i < arr.length; i++) {\n let ele = arr[i];\n Array.isArray(ele) ? flat(ele, result) : ele !== void 0 && result.push(ele);\n }\n return result;\n };\n flat(args);\n return result;\n};\n","/*!\n * fill-range \n *\n * Copyright (c) 2014-present, Jon Schlinkert.\n * Licensed under the MIT License.\n */\n\n'use strict';\n\nconst util = require('util');\nconst toRegexRange = require('to-regex-range');\n\nconst isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);\n\nconst transform = toNumber => {\n return value => toNumber === true ? Number(value) : String(value);\n};\n\nconst isValidValue = value => {\n return typeof value === 'number' || (typeof value === 'string' && value !== '');\n};\n\nconst isNumber = num => Number.isInteger(+num);\n\nconst zeros = input => {\n let value = `${input}`;\n let index = -1;\n if (value[0] === '-') value = value.slice(1);\n if (value === '0') return false;\n while (value[++index] === '0');\n return index > 0;\n};\n\nconst stringify = (start, end, options) => {\n if (typeof start === 'string' || typeof end === 'string') {\n return true;\n }\n return options.stringify === true;\n};\n\nconst pad = (input, maxLength, toNumber) => {\n if (maxLength > 0) {\n let dash = input[0] === '-' ? '-' : '';\n if (dash) input = input.slice(1);\n input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0'));\n }\n if (toNumber === false) {\n return String(input);\n }\n return input;\n};\n\nconst toMaxLen = (input, maxLength) => {\n let negative = input[0] === '-' ? '-' : '';\n if (negative) {\n input = input.slice(1);\n maxLength--;\n }\n while (input.length < maxLength) input = '0' + input;\n return negative ? ('-' + input) : input;\n};\n\nconst toSequence = (parts, options) => {\n parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);\n parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);\n\n let prefix = options.capture ? '' : '?:';\n let positives = '';\n let negatives = '';\n let result;\n\n if (parts.positives.length) {\n positives = parts.positives.join('|');\n }\n\n if (parts.negatives.length) {\n negatives = `-(${prefix}${parts.negatives.join('|')})`;\n }\n\n if (positives && negatives) {\n result = `${positives}|${negatives}`;\n } else {\n result = positives || negatives;\n }\n\n if (options.wrap) {\n return `(${prefix}${result})`;\n }\n\n return result;\n};\n\nconst toRange = (a, b, isNumbers, options) => {\n if (isNumbers) {\n return toRegexRange(a, b, { wrap: false, ...options });\n }\n\n let start = String.fromCharCode(a);\n if (a === b) return start;\n\n let stop = String.fromCharCode(b);\n return `[${start}-${stop}]`;\n};\n\nconst toRegex = (start, end, options) => {\n if (Array.isArray(start)) {\n let wrap = options.wrap === true;\n let prefix = options.capture ? '' : '?:';\n return wrap ? `(${prefix}${start.join('|')})` : start.join('|');\n }\n return toRegexRange(start, end, options);\n};\n\nconst rangeError = (...args) => {\n return new RangeError('Invalid range arguments: ' + util.inspect(...args));\n};\n\nconst invalidRange = (start, end, options) => {\n if (options.strictRanges === true) throw rangeError([start, end]);\n return [];\n};\n\nconst invalidStep = (step, options) => {\n if (options.strictRanges === true) {\n throw new TypeError(`Expected step \"${step}\" to be a number`);\n }\n return [];\n};\n\nconst fillNumbers = (start, end, step = 1, options = {}) => {\n let a = Number(start);\n let b = Number(end);\n\n if (!Number.isInteger(a) || !Number.isInteger(b)) {\n if (options.strictRanges === true) throw rangeError([start, end]);\n return [];\n }\n\n // fix negative zero\n if (a === 0) a = 0;\n if (b === 0) b = 0;\n\n let descending = a > b;\n let startString = String(start);\n let endString = String(end);\n let stepString = String(step);\n step = Math.max(Math.abs(step), 1);\n\n let padded = zeros(startString) || zeros(endString) || zeros(stepString);\n let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0;\n let toNumber = padded === false && stringify(start, end, options) === false;\n let format = options.transform || transform(toNumber);\n\n if (options.toRegex && step === 1) {\n return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options);\n }\n\n let parts = { negatives: [], positives: [] };\n let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num));\n let range = [];\n let index = 0;\n\n while (descending ? a >= b : a <= b) {\n if (options.toRegex === true && step > 1) {\n push(a);\n } else {\n range.push(pad(format(a, index), maxLen, toNumber));\n }\n a = descending ? a - step : a + step;\n index++;\n }\n\n if (options.toRegex === true) {\n return step > 1\n ? toSequence(parts, options)\n : toRegex(range, null, { wrap: false, ...options });\n }\n\n return range;\n};\n\nconst fillLetters = (start, end, step = 1, options = {}) => {\n if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) {\n return invalidRange(start, end, options);\n }\n\n\n let format = options.transform || (val => String.fromCharCode(val));\n let a = `${start}`.charCodeAt(0);\n let b = `${end}`.charCodeAt(0);\n\n let descending = a > b;\n let min = Math.min(a, b);\n let max = Math.max(a, b);\n\n if (options.toRegex && step === 1) {\n return toRange(min, max, false, options);\n }\n\n let range = [];\n let index = 0;\n\n while (descending ? a >= b : a <= b) {\n range.push(format(a, index));\n a = descending ? a - step : a + step;\n index++;\n }\n\n if (options.toRegex === true) {\n return toRegex(range, null, { wrap: false, options });\n }\n\n return range;\n};\n\nconst fill = (start, end, step, options = {}) => {\n if (end == null && isValidValue(start)) {\n return [start];\n }\n\n if (!isValidValue(start) || !isValidValue(end)) {\n return invalidRange(start, end, options);\n }\n\n if (typeof step === 'function') {\n return fill(start, end, 1, { transform: step });\n }\n\n if (isObject(step)) {\n return fill(start, end, 0, step);\n }\n\n let opts = { ...options };\n if (opts.capture === true) opts.wrap = true;\n step = step || opts.step || 1;\n\n if (!isNumber(step)) {\n if (step != null && !isObject(step)) return invalidStep(step, opts);\n return fill(start, end, 1, step);\n }\n\n if (isNumber(start) && isNumber(end)) {\n return fillNumbers(start, end, step, opts);\n }\n\n return fillLetters(start, end, Math.max(Math.abs(step), 1), opts);\n};\n\nmodule.exports = fill;\n","/*!\n * is-number \n *\n * Copyright (c) 2014-present, Jon Schlinkert.\n * Released under the MIT License.\n */\n\n'use strict';\n\nmodule.exports = function(num) {\n if (typeof num === 'number') {\n return num - num === 0;\n }\n if (typeof num === 'string' && num.trim() !== '') {\n return Number.isFinite ? Number.isFinite(+num) : isFinite(+num);\n }\n return false;\n};\n","'use strict';\n\nconst util = require('util');\nconst braces = require('braces');\nconst picomatch = require('picomatch');\nconst utils = require('picomatch/lib/utils');\nconst isEmptyString = val => typeof val === 'string' && (val === '' || val === './');\n\n/**\n * Returns an array of strings that match one or more glob patterns.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm(list, patterns[, options]);\n *\n * console.log(mm(['a.js', 'a.txt'], ['*.js']));\n * //=> [ 'a.js' ]\n * ```\n * @param {String|Array} list List of strings to match.\n * @param {String|Array} patterns One or more glob patterns to use for matching.\n * @param {Object} options See available [options](#options)\n * @return {Array} Returns an array of matches\n * @summary false\n * @api public\n */\n\nconst micromatch = (list, patterns, options) => {\n patterns = [].concat(patterns);\n list = [].concat(list);\n\n let omit = new Set();\n let keep = new Set();\n let items = new Set();\n let negatives = 0;\n\n let onResult = state => {\n items.add(state.output);\n if (options && options.onResult) {\n options.onResult(state);\n }\n };\n\n for (let i = 0; i < patterns.length; i++) {\n let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true);\n let negated = isMatch.state.negated || isMatch.state.negatedExtglob;\n if (negated) negatives++;\n\n for (let item of list) {\n let matched = isMatch(item, true);\n\n let match = negated ? !matched.isMatch : matched.isMatch;\n if (!match) continue;\n\n if (negated) {\n omit.add(matched.output);\n } else {\n omit.delete(matched.output);\n keep.add(matched.output);\n }\n }\n }\n\n let result = negatives === patterns.length ? [...items] : [...keep];\n let matches = result.filter(item => !omit.has(item));\n\n if (options && matches.length === 0) {\n if (options.failglob === true) {\n throw new Error(`No matches found for \"${patterns.join(', ')}\"`);\n }\n\n if (options.nonull === true || options.nullglob === true) {\n return options.unescape ? patterns.map(p => p.replace(/\\\\/g, '')) : patterns;\n }\n }\n\n return matches;\n};\n\n/**\n * Backwards compatibility\n */\n\nmicromatch.match = micromatch;\n\n/**\n * Returns a matcher function from the given glob `pattern` and `options`.\n * The returned function takes a string to match as its only argument and returns\n * true if the string is a match.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.matcher(pattern[, options]);\n *\n * const isMatch = mm.matcher('*.!(*a)');\n * console.log(isMatch('a.a')); //=> false\n * console.log(isMatch('a.b')); //=> true\n * ```\n * @param {String} `pattern` Glob pattern\n * @param {Object} `options`\n * @return {Function} Returns a matcher function.\n * @api public\n */\n\nmicromatch.matcher = (pattern, options) => picomatch(pattern, options);\n\n/**\n * Returns true if **any** of the given glob `patterns` match the specified `string`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.isMatch(string, patterns[, options]);\n *\n * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true\n * console.log(mm.isMatch('a.a', 'b.*')); //=> false\n * ```\n * @param {String} str The string to test.\n * @param {String|Array} patterns One or more glob patterns to use for matching.\n * @param {Object} [options] See available [options](#options).\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\nmicromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str);\n\n/**\n * Backwards compatibility\n */\n\nmicromatch.any = micromatch.isMatch;\n\n/**\n * Returns a list of strings that _**do not match any**_ of the given `patterns`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.not(list, patterns[, options]);\n *\n * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a'));\n * //=> ['b.b', 'c.c']\n * ```\n * @param {Array} `list` Array of strings to match.\n * @param {String|Array} `patterns` One or more glob pattern to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Array} Returns an array of strings that **do not match** the given patterns.\n * @api public\n */\n\nmicromatch.not = (list, patterns, options = {}) => {\n patterns = [].concat(patterns).map(String);\n let result = new Set();\n let items = [];\n\n let onResult = state => {\n if (options.onResult) options.onResult(state);\n items.push(state.output);\n };\n\n let matches = micromatch(list, patterns, { ...options, onResult });\n\n for (let item of items) {\n if (!matches.includes(item)) {\n result.add(item);\n }\n }\n return [...result];\n};\n\n/**\n * Returns true if the given `string` contains the given pattern. Similar\n * to [.isMatch](#isMatch) but the pattern can match any part of the string.\n *\n * ```js\n * var mm = require('micromatch');\n * // mm.contains(string, pattern[, options]);\n *\n * console.log(mm.contains('aa/bb/cc', '*b'));\n * //=> true\n * console.log(mm.contains('aa/bb/cc', '*d'));\n * //=> false\n * ```\n * @param {String} `str` The string to match.\n * @param {String|Array} `patterns` Glob pattern to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns true if the patter matches any part of `str`.\n * @api public\n */\n\nmicromatch.contains = (str, pattern, options) => {\n if (typeof str !== 'string') {\n throw new TypeError(`Expected a string: \"${util.inspect(str)}\"`);\n }\n\n if (Array.isArray(pattern)) {\n return pattern.some(p => micromatch.contains(str, p, options));\n }\n\n if (typeof pattern === 'string') {\n if (isEmptyString(str) || isEmptyString(pattern)) {\n return false;\n }\n\n if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) {\n return true;\n }\n }\n\n return micromatch.isMatch(str, pattern, { ...options, contains: true });\n};\n\n/**\n * Filter the keys of the given object with the given `glob` pattern\n * and `options`. Does not attempt to match nested keys. If you need this feature,\n * use [glob-object][] instead.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.matchKeys(object, patterns[, options]);\n *\n * const obj = { aa: 'a', ab: 'b', ac: 'c' };\n * console.log(mm.matchKeys(obj, '*b'));\n * //=> { ab: 'b' }\n * ```\n * @param {Object} `object` The object with keys to filter.\n * @param {String|Array} `patterns` One or more glob patterns to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Object} Returns an object with only keys that match the given patterns.\n * @api public\n */\n\nmicromatch.matchKeys = (obj, patterns, options) => {\n if (!utils.isObject(obj)) {\n throw new TypeError('Expected the first argument to be an object');\n }\n let keys = micromatch(Object.keys(obj), patterns, options);\n let res = {};\n for (let key of keys) res[key] = obj[key];\n return res;\n};\n\n/**\n * Returns true if some of the strings in the given `list` match any of the given glob `patterns`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.some(list, patterns[, options]);\n *\n * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js']));\n * // true\n * console.log(mm.some(['foo.js'], ['*.js', '!foo.js']));\n * // false\n * ```\n * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found.\n * @param {String|Array} `patterns` One or more glob patterns to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\nmicromatch.some = (list, patterns, options) => {\n let items = [].concat(list);\n\n for (let pattern of [].concat(patterns)) {\n let isMatch = picomatch(String(pattern), options);\n if (items.some(item => isMatch(item))) {\n return true;\n }\n }\n return false;\n};\n\n/**\n * Returns true if every string in the given `list` matches\n * any of the given glob `patterns`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.every(list, patterns[, options]);\n *\n * console.log(mm.every('foo.js', ['foo.js']));\n * // true\n * console.log(mm.every(['foo.js', 'bar.js'], ['*.js']));\n * // true\n * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js']));\n * // false\n * console.log(mm.every(['foo.js'], ['*.js', '!foo.js']));\n * // false\n * ```\n * @param {String|Array} `list` The string or array of strings to test.\n * @param {String|Array} `patterns` One or more glob patterns to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\nmicromatch.every = (list, patterns, options) => {\n let items = [].concat(list);\n\n for (let pattern of [].concat(patterns)) {\n let isMatch = picomatch(String(pattern), options);\n if (!items.every(item => isMatch(item))) {\n return false;\n }\n }\n return true;\n};\n\n/**\n * Returns true if **all** of the given `patterns` match\n * the specified string.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.all(string, patterns[, options]);\n *\n * console.log(mm.all('foo.js', ['foo.js']));\n * // true\n *\n * console.log(mm.all('foo.js', ['*.js', '!foo.js']));\n * // false\n *\n * console.log(mm.all('foo.js', ['*.js', 'foo.js']));\n * // true\n *\n * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js']));\n * // true\n * ```\n * @param {String|Array} `str` The string to test.\n * @param {String|Array} `patterns` One or more glob patterns to use for matching.\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\nmicromatch.all = (str, patterns, options) => {\n if (typeof str !== 'string') {\n throw new TypeError(`Expected a string: \"${util.inspect(str)}\"`);\n }\n\n return [].concat(patterns).every(p => picomatch(p, options)(str));\n};\n\n/**\n * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.capture(pattern, string[, options]);\n *\n * console.log(mm.capture('test/*.js', 'test/foo.js'));\n * //=> ['foo']\n * console.log(mm.capture('test/*.js', 'foo/bar.css'));\n * //=> null\n * ```\n * @param {String} `glob` Glob pattern to use for matching.\n * @param {String} `input` String to match\n * @param {Object} `options` See available [options](#options) for changing how matches are performed\n * @return {Boolean} Returns an array of captures if the input matches the glob pattern, otherwise `null`.\n * @api public\n */\n\nmicromatch.capture = (glob, input, options) => {\n let posix = utils.isWindows(options);\n let regex = picomatch.makeRe(String(glob), { ...options, capture: true });\n let match = regex.exec(posix ? utils.toPosixSlashes(input) : input);\n\n if (match) {\n return match.slice(1).map(v => v === void 0 ? '' : v);\n }\n};\n\n/**\n * Create a regular expression from the given glob `pattern`.\n *\n * ```js\n * const mm = require('micromatch');\n * // mm.makeRe(pattern[, options]);\n *\n * console.log(mm.makeRe('*.js'));\n * //=> /^(?:(\\.[\\\\\\/])?(?!\\.)(?=.)[^\\/]*?\\.js)$/\n * ```\n * @param {String} `pattern` A glob pattern to convert to regex.\n * @param {Object} `options`\n * @return {RegExp} Returns a regex created from the given pattern.\n * @api public\n */\n\nmicromatch.makeRe = (...args) => picomatch.makeRe(...args);\n\n/**\n * Scan a glob pattern to separate the pattern into segments. Used\n * by the [split](#split) method.\n *\n * ```js\n * const mm = require('micromatch');\n * const state = mm.scan(pattern[, options]);\n * ```\n * @param {String} `pattern`\n * @param {Object} `options`\n * @return {Object} Returns an object with\n * @api public\n */\n\nmicromatch.scan = (...args) => picomatch.scan(...args);\n\n/**\n * Parse a glob pattern to create the source string for a regular\n * expression.\n *\n * ```js\n * const mm = require('micromatch');\n * const state = mm(pattern[, options]);\n * ```\n * @param {String} `glob`\n * @param {Object} `options`\n * @return {Object} Returns an object with useful properties and output to be used as regex source string.\n * @api public\n */\n\nmicromatch.parse = (patterns, options) => {\n let res = [];\n for (let pattern of [].concat(patterns || [])) {\n for (let str of braces(String(pattern), options)) {\n res.push(picomatch.parse(str, options));\n }\n }\n return res;\n};\n\n/**\n * Process the given brace `pattern`.\n *\n * ```js\n * const { braces } = require('micromatch');\n * console.log(braces('foo/{a,b,c}/bar'));\n * //=> [ 'foo/(a|b|c)/bar' ]\n *\n * console.log(braces('foo/{a,b,c}/bar', { expand: true }));\n * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ]\n * ```\n * @param {String} `pattern` String with brace pattern to process.\n * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options.\n * @return {Array}\n * @api public\n */\n\nmicromatch.braces = (pattern, options) => {\n if (typeof pattern !== 'string') throw new TypeError('Expected a string');\n if ((options && options.nobrace === true) || !/\\{.*\\}/.test(pattern)) {\n return [pattern];\n }\n return braces(pattern, options);\n};\n\n/**\n * Expand braces\n */\n\nmicromatch.braceExpand = (pattern, options) => {\n if (typeof pattern !== 'string') throw new TypeError('Expected a string');\n return micromatch.braces(pattern, { ...options, expand: true });\n};\n\n/**\n * Expose micromatch\n */\n\nmodule.exports = micromatch;\n","/*!\n * to-regex-range \n *\n * Copyright (c) 2015-present, Jon Schlinkert.\n * Released under the MIT License.\n */\n\n'use strict';\n\nconst isNumber = require('is-number');\n\nconst toRegexRange = (min, max, options) => {\n if (isNumber(min) === false) {\n throw new TypeError('toRegexRange: expected the first argument to be a number');\n }\n\n if (max === void 0 || min === max) {\n return String(min);\n }\n\n if (isNumber(max) === false) {\n throw new TypeError('toRegexRange: expected the second argument to be a number.');\n }\n\n let opts = { relaxZeros: true, ...options };\n if (typeof opts.strictZeros === 'boolean') {\n opts.relaxZeros = opts.strictZeros === false;\n }\n\n let relax = String(opts.relaxZeros);\n let shorthand = String(opts.shorthand);\n let capture = String(opts.capture);\n let wrap = String(opts.wrap);\n let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap;\n\n if (toRegexRange.cache.hasOwnProperty(cacheKey)) {\n return toRegexRange.cache[cacheKey].result;\n }\n\n let a = Math.min(min, max);\n let b = Math.max(min, max);\n\n if (Math.abs(a - b) === 1) {\n let result = min + '|' + max;\n if (opts.capture) {\n return `(${result})`;\n }\n if (opts.wrap === false) {\n return result;\n }\n return `(?:${result})`;\n }\n\n let isPadded = hasPadding(min) || hasPadding(max);\n let state = { min, max, a, b };\n let positives = [];\n let negatives = [];\n\n if (isPadded) {\n state.isPadded = isPadded;\n state.maxLen = String(state.max).length;\n }\n\n if (a < 0) {\n let newMin = b < 0 ? Math.abs(b) : 1;\n negatives = splitToPatterns(newMin, Math.abs(a), state, opts);\n a = state.a = 0;\n }\n\n if (b >= 0) {\n positives = splitToPatterns(a, b, state, opts);\n }\n\n state.negatives = negatives;\n state.positives = positives;\n state.result = collatePatterns(negatives, positives, opts);\n\n if (opts.capture === true) {\n state.result = `(${state.result})`;\n } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) {\n state.result = `(?:${state.result})`;\n }\n\n toRegexRange.cache[cacheKey] = state;\n return state.result;\n};\n\nfunction collatePatterns(neg, pos, options) {\n let onlyNegative = filterPatterns(neg, pos, '-', false, options) || [];\n let onlyPositive = filterPatterns(pos, neg, '', false, options) || [];\n let intersected = filterPatterns(neg, pos, '-?', true, options) || [];\n let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive);\n return subpatterns.join('|');\n}\n\nfunction splitToRanges(min, max) {\n let nines = 1;\n let zeros = 1;\n\n let stop = countNines(min, nines);\n let stops = new Set([max]);\n\n while (min <= stop && stop <= max) {\n stops.add(stop);\n nines += 1;\n stop = countNines(min, nines);\n }\n\n stop = countZeros(max + 1, zeros) - 1;\n\n while (min < stop && stop <= max) {\n stops.add(stop);\n zeros += 1;\n stop = countZeros(max + 1, zeros) - 1;\n }\n\n stops = [...stops];\n stops.sort(compare);\n return stops;\n}\n\n/**\n * Convert a range to a regex pattern\n * @param {Number} `start`\n * @param {Number} `stop`\n * @return {String}\n */\n\nfunction rangeToPattern(start, stop, options) {\n if (start === stop) {\n return { pattern: start, count: [], digits: 0 };\n }\n\n let zipped = zip(start, stop);\n let digits = zipped.length;\n let pattern = '';\n let count = 0;\n\n for (let i = 0; i < digits; i++) {\n let [startDigit, stopDigit] = zipped[i];\n\n if (startDigit === stopDigit) {\n pattern += startDigit;\n\n } else if (startDigit !== '0' || stopDigit !== '9') {\n pattern += toCharacterClass(startDigit, stopDigit, options);\n\n } else {\n count++;\n }\n }\n\n if (count) {\n pattern += options.shorthand === true ? '\\\\d' : '[0-9]';\n }\n\n return { pattern, count: [count], digits };\n}\n\nfunction splitToPatterns(min, max, tok, options) {\n let ranges = splitToRanges(min, max);\n let tokens = [];\n let start = min;\n let prev;\n\n for (let i = 0; i < ranges.length; i++) {\n let max = ranges[i];\n let obj = rangeToPattern(String(start), String(max), options);\n let zeros = '';\n\n if (!tok.isPadded && prev && prev.pattern === obj.pattern) {\n if (prev.count.length > 1) {\n prev.count.pop();\n }\n\n prev.count.push(obj.count[0]);\n prev.string = prev.pattern + toQuantifier(prev.count);\n start = max + 1;\n continue;\n }\n\n if (tok.isPadded) {\n zeros = padZeros(max, tok, options);\n }\n\n obj.string = zeros + obj.pattern + toQuantifier(obj.count);\n tokens.push(obj);\n start = max + 1;\n prev = obj;\n }\n\n return tokens;\n}\n\nfunction filterPatterns(arr, comparison, prefix, intersection, options) {\n let result = [];\n\n for (let ele of arr) {\n let { string } = ele;\n\n // only push if _both_ are negative...\n if (!intersection && !contains(comparison, 'string', string)) {\n result.push(prefix + string);\n }\n\n // or _both_ are positive\n if (intersection && contains(comparison, 'string', string)) {\n result.push(prefix + string);\n }\n }\n return result;\n}\n\n/**\n * Zip strings\n */\n\nfunction zip(a, b) {\n let arr = [];\n for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]);\n return arr;\n}\n\nfunction compare(a, b) {\n return a > b ? 1 : b > a ? -1 : 0;\n}\n\nfunction contains(arr, key, val) {\n return arr.some(ele => ele[key] === val);\n}\n\nfunction countNines(min, len) {\n return Number(String(min).slice(0, -len) + '9'.repeat(len));\n}\n\nfunction countZeros(integer, zeros) {\n return integer - (integer % Math.pow(10, zeros));\n}\n\nfunction toQuantifier(digits) {\n let [start = 0, stop = ''] = digits;\n if (stop || start > 1) {\n return `{${start + (stop ? ',' + stop : '')}}`;\n }\n return '';\n}\n\nfunction toCharacterClass(a, b, options) {\n return `[${a}${(b - a === 1) ? '' : '-'}${b}]`;\n}\n\nfunction hasPadding(str) {\n return /^-?(0+)\\d/.test(str);\n}\n\nfunction padZeros(value, tok, options) {\n if (!tok.isPadded) {\n return value;\n }\n\n let diff = Math.abs(tok.maxLen - String(value).length);\n let relax = options.relaxZeros !== false;\n\n switch (diff) {\n case 0:\n return '';\n case 1:\n return relax ? '0?' : '0';\n case 2:\n return relax ? '0{0,2}' : '00';\n default: {\n return relax ? `0{0,${diff}}` : `0{${diff}}`;\n }\n }\n}\n\n/**\n * Cache\n */\n\ntoRegexRange.cache = {};\ntoRegexRange.clearCache = () => (toRegexRange.cache = {});\n\n/**\n * Expose `toRegexRange`\n */\n\nmodule.exports = toRegexRange;\n","\"use strict\";\r\nconst taskManager = require(\"./managers/tasks\");\r\nconst async_1 = require(\"./providers/async\");\r\nconst stream_1 = require(\"./providers/stream\");\r\nconst sync_1 = require(\"./providers/sync\");\r\nconst settings_1 = require(\"./settings\");\r\nconst utils = require(\"./utils\");\r\nasync function FastGlob(source, options) {\r\n assertPatternsInput(source);\r\n const works = getWorks(source, async_1.default, options);\r\n const result = await Promise.all(works);\r\n return utils.array.flatten(result);\r\n}\r\n// https://github.com/typescript-eslint/typescript-eslint/issues/60\r\n// eslint-disable-next-line no-redeclare\r\n(function (FastGlob) {\r\n function sync(source, options) {\r\n assertPatternsInput(source);\r\n const works = getWorks(source, sync_1.default, options);\r\n return utils.array.flatten(works);\r\n }\r\n FastGlob.sync = sync;\r\n function stream(source, options) {\r\n assertPatternsInput(source);\r\n const works = getWorks(source, stream_1.default, options);\r\n /**\r\n * The stream returned by the provider cannot work with an asynchronous iterator.\r\n * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams.\r\n * This affects performance (+25%). I don't see best solution right now.\r\n */\r\n return utils.stream.merge(works);\r\n }\r\n FastGlob.stream = stream;\r\n function generateTasks(source, options) {\r\n assertPatternsInput(source);\r\n const patterns = [].concat(source);\r\n const settings = new settings_1.default(options);\r\n return taskManager.generate(patterns, settings);\r\n }\r\n FastGlob.generateTasks = generateTasks;\r\n function isDynamicPattern(source, options) {\r\n assertPatternsInput(source);\r\n const settings = new settings_1.default(options);\r\n return utils.pattern.isDynamicPattern(source, settings);\r\n }\r\n FastGlob.isDynamicPattern = isDynamicPattern;\r\n function escapePath(source) {\r\n assertPatternsInput(source);\r\n return utils.path.escape(source);\r\n }\r\n FastGlob.escapePath = escapePath;\r\n})(FastGlob || (FastGlob = {}));\r\nfunction getWorks(source, _Provider, options) {\r\n const patterns = [].concat(source);\r\n const settings = new settings_1.default(options);\r\n const tasks = taskManager.generate(patterns, settings);\r\n const provider = new _Provider(settings);\r\n return tasks.map(provider.read, provider);\r\n}\r\nfunction assertPatternsInput(input) {\r\n const source = [].concat(input);\r\n const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item));\r\n if (!isValidSource) {\r\n throw new TypeError('Patterns must be a string (non empty) or an array of strings');\r\n }\r\n}\r\nmodule.exports = FastGlob;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0;\r\nconst utils = require(\"../utils\");\r\nfunction generate(patterns, settings) {\r\n const positivePatterns = getPositivePatterns(patterns);\r\n const negativePatterns = getNegativePatternsAsPositive(patterns, settings.ignore);\r\n const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings));\r\n const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings));\r\n const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false);\r\n const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true);\r\n return staticTasks.concat(dynamicTasks);\r\n}\r\nexports.generate = generate;\r\nfunction convertPatternsToTasks(positive, negative, dynamic) {\r\n const positivePatternsGroup = groupPatternsByBaseDirectory(positive);\r\n // When we have a global group – there is no reason to divide the patterns into independent tasks.\r\n // In this case, the global task covers the rest.\r\n if ('.' in positivePatternsGroup) {\r\n const task = convertPatternGroupToTask('.', positive, negative, dynamic);\r\n return [task];\r\n }\r\n return convertPatternGroupsToTasks(positivePatternsGroup, negative, dynamic);\r\n}\r\nexports.convertPatternsToTasks = convertPatternsToTasks;\r\nfunction getPositivePatterns(patterns) {\r\n return utils.pattern.getPositivePatterns(patterns);\r\n}\r\nexports.getPositivePatterns = getPositivePatterns;\r\nfunction getNegativePatternsAsPositive(patterns, ignore) {\r\n const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore);\r\n const positive = negative.map(utils.pattern.convertToPositivePattern);\r\n return positive;\r\n}\r\nexports.getNegativePatternsAsPositive = getNegativePatternsAsPositive;\r\nfunction groupPatternsByBaseDirectory(patterns) {\r\n const group = {};\r\n return patterns.reduce((collection, pattern) => {\r\n const base = utils.pattern.getBaseDirectory(pattern);\r\n if (base in collection) {\r\n collection[base].push(pattern);\r\n }\r\n else {\r\n collection[base] = [pattern];\r\n }\r\n return collection;\r\n }, group);\r\n}\r\nexports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory;\r\nfunction convertPatternGroupsToTasks(positive, negative, dynamic) {\r\n return Object.keys(positive).map((base) => {\r\n return convertPatternGroupToTask(base, positive[base], negative, dynamic);\r\n });\r\n}\r\nexports.convertPatternGroupsToTasks = convertPatternGroupsToTasks;\r\nfunction convertPatternGroupToTask(base, positive, negative, dynamic) {\r\n return {\r\n dynamic,\r\n positive,\r\n negative,\r\n base,\r\n patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern))\r\n };\r\n}\r\nexports.convertPatternGroupToTask = convertPatternGroupToTask;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst stream_1 = require(\"../readers/stream\");\r\nconst provider_1 = require(\"./provider\");\r\nclass ProviderAsync extends provider_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._reader = new stream_1.default(this._settings);\r\n }\r\n read(task) {\r\n const root = this._getRootDirectory(task);\r\n const options = this._getReaderOptions(task);\r\n const entries = [];\r\n return new Promise((resolve, reject) => {\r\n const stream = this.api(root, task, options);\r\n stream.once('error', reject);\r\n stream.on('data', (entry) => entries.push(options.transform(entry)));\r\n stream.once('end', () => resolve(entries));\r\n });\r\n }\r\n api(root, task, options) {\r\n if (task.dynamic) {\r\n return this._reader.dynamic(root, options);\r\n }\r\n return this._reader.static(task.patterns, options);\r\n }\r\n}\r\nexports.default = ProviderAsync;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nconst partial_1 = require(\"../matchers/partial\");\r\nclass DeepFilter {\r\n constructor(_settings, _micromatchOptions) {\r\n this._settings = _settings;\r\n this._micromatchOptions = _micromatchOptions;\r\n }\r\n getFilter(basePath, positive, negative) {\r\n const matcher = this._getMatcher(positive);\r\n const negativeRe = this._getNegativePatternsRe(negative);\r\n return (entry) => this._filter(basePath, entry, matcher, negativeRe);\r\n }\r\n _getMatcher(patterns) {\r\n return new partial_1.default(patterns, this._settings, this._micromatchOptions);\r\n }\r\n _getNegativePatternsRe(patterns) {\r\n const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern);\r\n return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions);\r\n }\r\n _filter(basePath, entry, matcher, negativeRe) {\r\n if (this._isSkippedByDeep(basePath, entry.path)) {\r\n return false;\r\n }\r\n if (this._isSkippedSymbolicLink(entry)) {\r\n return false;\r\n }\r\n const filepath = utils.path.removeLeadingDotSegment(entry.path);\r\n if (this._isSkippedByPositivePatterns(filepath, matcher)) {\r\n return false;\r\n }\r\n return this._isSkippedByNegativePatterns(filepath, negativeRe);\r\n }\r\n _isSkippedByDeep(basePath, entryPath) {\r\n /**\r\n * Avoid unnecessary depth calculations when it doesn't matter.\r\n */\r\n if (this._settings.deep === Infinity) {\r\n return false;\r\n }\r\n return this._getEntryLevel(basePath, entryPath) >= this._settings.deep;\r\n }\r\n _getEntryLevel(basePath, entryPath) {\r\n const entryPathDepth = entryPath.split('/').length;\r\n if (basePath === '') {\r\n return entryPathDepth;\r\n }\r\n const basePathDepth = basePath.split('/').length;\r\n return entryPathDepth - basePathDepth;\r\n }\r\n _isSkippedSymbolicLink(entry) {\r\n return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink();\r\n }\r\n _isSkippedByPositivePatterns(entryPath, matcher) {\r\n return !this._settings.baseNameMatch && !matcher.match(entryPath);\r\n }\r\n _isSkippedByNegativePatterns(entryPath, patternsRe) {\r\n return !utils.pattern.matchAny(entryPath, patternsRe);\r\n }\r\n}\r\nexports.default = DeepFilter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nclass EntryFilter {\r\n constructor(_settings, _micromatchOptions) {\r\n this._settings = _settings;\r\n this._micromatchOptions = _micromatchOptions;\r\n this.index = new Map();\r\n }\r\n getFilter(positive, negative) {\r\n const positiveRe = utils.pattern.convertPatternsToRe(positive, this._micromatchOptions);\r\n const negativeRe = utils.pattern.convertPatternsToRe(negative, this._micromatchOptions);\r\n return (entry) => this._filter(entry, positiveRe, negativeRe);\r\n }\r\n _filter(entry, positiveRe, negativeRe) {\r\n if (this._settings.unique && this._isDuplicateEntry(entry)) {\r\n return false;\r\n }\r\n if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) {\r\n return false;\r\n }\r\n if (this._isSkippedByAbsoluteNegativePatterns(entry.path, negativeRe)) {\r\n return false;\r\n }\r\n const filepath = this._settings.baseNameMatch ? entry.name : entry.path;\r\n const isMatched = this._isMatchToPatterns(filepath, positiveRe) && !this._isMatchToPatterns(entry.path, negativeRe);\r\n if (this._settings.unique && isMatched) {\r\n this._createIndexRecord(entry);\r\n }\r\n return isMatched;\r\n }\r\n _isDuplicateEntry(entry) {\r\n return this.index.has(entry.path);\r\n }\r\n _createIndexRecord(entry) {\r\n this.index.set(entry.path, undefined);\r\n }\r\n _onlyFileFilter(entry) {\r\n return this._settings.onlyFiles && !entry.dirent.isFile();\r\n }\r\n _onlyDirectoryFilter(entry) {\r\n return this._settings.onlyDirectories && !entry.dirent.isDirectory();\r\n }\r\n _isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) {\r\n if (!this._settings.absolute) {\r\n return false;\r\n }\r\n const fullpath = utils.path.makeAbsolute(this._settings.cwd, entryPath);\r\n return utils.pattern.matchAny(fullpath, patternsRe);\r\n }\r\n _isMatchToPatterns(entryPath, patternsRe) {\r\n const filepath = utils.path.removeLeadingDotSegment(entryPath);\r\n return utils.pattern.matchAny(filepath, patternsRe);\r\n }\r\n}\r\nexports.default = EntryFilter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nclass ErrorFilter {\r\n constructor(_settings) {\r\n this._settings = _settings;\r\n }\r\n getFilter() {\r\n return (error) => this._isNonFatalError(error);\r\n }\r\n _isNonFatalError(error) {\r\n return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors;\r\n }\r\n}\r\nexports.default = ErrorFilter;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nclass Matcher {\r\n constructor(_patterns, _settings, _micromatchOptions) {\r\n this._patterns = _patterns;\r\n this._settings = _settings;\r\n this._micromatchOptions = _micromatchOptions;\r\n this._storage = [];\r\n this._fillStorage();\r\n }\r\n _fillStorage() {\r\n /**\r\n * The original pattern may include `{,*,**,a/*}`, which will lead to problems with matching (unresolved level).\r\n * So, before expand patterns with brace expansion into separated patterns.\r\n */\r\n const patterns = utils.pattern.expandPatternsWithBraceExpansion(this._patterns);\r\n for (const pattern of patterns) {\r\n const segments = this._getPatternSegments(pattern);\r\n const sections = this._splitSegmentsIntoSections(segments);\r\n this._storage.push({\r\n complete: sections.length <= 1,\r\n pattern,\r\n segments,\r\n sections\r\n });\r\n }\r\n }\r\n _getPatternSegments(pattern) {\r\n const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions);\r\n return parts.map((part) => {\r\n const dynamic = utils.pattern.isDynamicPattern(part, this._settings);\r\n if (!dynamic) {\r\n return {\r\n dynamic: false,\r\n pattern: part\r\n };\r\n }\r\n return {\r\n dynamic: true,\r\n pattern: part,\r\n patternRe: utils.pattern.makeRe(part, this._micromatchOptions)\r\n };\r\n });\r\n }\r\n _splitSegmentsIntoSections(segments) {\r\n return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern));\r\n }\r\n}\r\nexports.default = Matcher;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst matcher_1 = require(\"./matcher\");\r\nclass PartialMatcher extends matcher_1.default {\r\n match(filepath) {\r\n const parts = filepath.split('/');\r\n const levels = parts.length;\r\n const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels);\r\n for (const pattern of patterns) {\r\n const section = pattern.sections[0];\r\n /**\r\n * In this case, the pattern has a globstar and we must read all directories unconditionally,\r\n * but only if the level has reached the end of the first group.\r\n *\r\n * fixtures/{a,b}/**\r\n * ^ true/false ^ always true\r\n */\r\n if (!pattern.complete && levels > section.length) {\r\n return true;\r\n }\r\n const match = parts.every((part, index) => {\r\n const segment = pattern.segments[index];\r\n if (segment.dynamic && segment.patternRe.test(part)) {\r\n return true;\r\n }\r\n if (!segment.dynamic && segment.pattern === part) {\r\n return true;\r\n }\r\n return false;\r\n });\r\n if (match) {\r\n return true;\r\n }\r\n }\r\n return false;\r\n }\r\n}\r\nexports.default = PartialMatcher;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst path = require(\"path\");\r\nconst deep_1 = require(\"./filters/deep\");\r\nconst entry_1 = require(\"./filters/entry\");\r\nconst error_1 = require(\"./filters/error\");\r\nconst entry_2 = require(\"./transformers/entry\");\r\nclass Provider {\r\n constructor(_settings) {\r\n this._settings = _settings;\r\n this.errorFilter = new error_1.default(this._settings);\r\n this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions());\r\n this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions());\r\n this.entryTransformer = new entry_2.default(this._settings);\r\n }\r\n _getRootDirectory(task) {\r\n return path.resolve(this._settings.cwd, task.base);\r\n }\r\n _getReaderOptions(task) {\r\n const basePath = task.base === '.' ? '' : task.base;\r\n return {\r\n basePath,\r\n pathSegmentSeparator: '/',\r\n concurrency: this._settings.concurrency,\r\n deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative),\r\n entryFilter: this.entryFilter.getFilter(task.positive, task.negative),\r\n errorFilter: this.errorFilter.getFilter(),\r\n followSymbolicLinks: this._settings.followSymbolicLinks,\r\n fs: this._settings.fs,\r\n stats: this._settings.stats,\r\n throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink,\r\n transform: this.entryTransformer.getTransformer()\r\n };\r\n }\r\n _getMicromatchOptions() {\r\n return {\r\n dot: this._settings.dot,\r\n matchBase: this._settings.baseNameMatch,\r\n nobrace: !this._settings.braceExpansion,\r\n nocase: !this._settings.caseSensitiveMatch,\r\n noext: !this._settings.extglob,\r\n noglobstar: !this._settings.globstar,\r\n posix: true,\r\n strictSlashes: false\r\n };\r\n }\r\n}\r\nexports.default = Provider;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst stream_1 = require(\"stream\");\r\nconst stream_2 = require(\"../readers/stream\");\r\nconst provider_1 = require(\"./provider\");\r\nclass ProviderStream extends provider_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._reader = new stream_2.default(this._settings);\r\n }\r\n read(task) {\r\n const root = this._getRootDirectory(task);\r\n const options = this._getReaderOptions(task);\r\n const source = this.api(root, task, options);\r\n const destination = new stream_1.Readable({ objectMode: true, read: () => { } });\r\n source\r\n .once('error', (error) => destination.emit('error', error))\r\n .on('data', (entry) => destination.emit('data', options.transform(entry)))\r\n .once('end', () => destination.emit('end'));\r\n destination\r\n .once('close', () => source.destroy());\r\n return destination;\r\n }\r\n api(root, task, options) {\r\n if (task.dynamic) {\r\n return this._reader.dynamic(root, options);\r\n }\r\n return this._reader.static(task.patterns, options);\r\n }\r\n}\r\nexports.default = ProviderStream;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst sync_1 = require(\"../readers/sync\");\r\nconst provider_1 = require(\"./provider\");\r\nclass ProviderSync extends provider_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._reader = new sync_1.default(this._settings);\r\n }\r\n read(task) {\r\n const root = this._getRootDirectory(task);\r\n const options = this._getReaderOptions(task);\r\n const entries = this.api(root, task, options);\r\n return entries.map(options.transform);\r\n }\r\n api(root, task, options) {\r\n if (task.dynamic) {\r\n return this._reader.dynamic(root, options);\r\n }\r\n return this._reader.static(task.patterns, options);\r\n }\r\n}\r\nexports.default = ProviderSync;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst utils = require(\"../../utils\");\r\nclass EntryTransformer {\r\n constructor(_settings) {\r\n this._settings = _settings;\r\n }\r\n getTransformer() {\r\n return (entry) => this._transform(entry);\r\n }\r\n _transform(entry) {\r\n let filepath = entry.path;\r\n if (this._settings.absolute) {\r\n filepath = utils.path.makeAbsolute(this._settings.cwd, filepath);\r\n filepath = utils.path.unixify(filepath);\r\n }\r\n if (this._settings.markDirectories && entry.dirent.isDirectory()) {\r\n filepath += '/';\r\n }\r\n if (!this._settings.objectMode) {\r\n return filepath;\r\n }\r\n return Object.assign(Object.assign({}, entry), { path: filepath });\r\n }\r\n}\r\nexports.default = EntryTransformer;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst path = require(\"path\");\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst utils = require(\"../utils\");\r\nclass Reader {\r\n constructor(_settings) {\r\n this._settings = _settings;\r\n this._fsStatSettings = new fsStat.Settings({\r\n followSymbolicLink: this._settings.followSymbolicLinks,\r\n fs: this._settings.fs,\r\n throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks\r\n });\r\n }\r\n _getFullEntryPath(filepath) {\r\n return path.resolve(this._settings.cwd, filepath);\r\n }\r\n _makeEntry(stats, pattern) {\r\n const entry = {\r\n name: pattern,\r\n path: pattern,\r\n dirent: utils.fs.createDirentFromStats(pattern, stats)\r\n };\r\n if (this._settings.stats) {\r\n entry.stats = stats;\r\n }\r\n return entry;\r\n }\r\n _isFatalError(error) {\r\n return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors;\r\n }\r\n}\r\nexports.default = Reader;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst stream_1 = require(\"stream\");\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst fsWalk = require(\"@nodelib/fs.walk\");\r\nconst reader_1 = require(\"./reader\");\r\nclass ReaderStream extends reader_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._walkStream = fsWalk.walkStream;\r\n this._stat = fsStat.stat;\r\n }\r\n dynamic(root, options) {\r\n return this._walkStream(root, options);\r\n }\r\n static(patterns, options) {\r\n const filepaths = patterns.map(this._getFullEntryPath, this);\r\n const stream = new stream_1.PassThrough({ objectMode: true });\r\n stream._write = (index, _enc, done) => {\r\n return this._getEntry(filepaths[index], patterns[index], options)\r\n .then((entry) => {\r\n if (entry !== null && options.entryFilter(entry)) {\r\n stream.push(entry);\r\n }\r\n if (index === filepaths.length - 1) {\r\n stream.end();\r\n }\r\n done();\r\n })\r\n .catch(done);\r\n };\r\n for (let i = 0; i < filepaths.length; i++) {\r\n stream.write(i);\r\n }\r\n return stream;\r\n }\r\n _getEntry(filepath, pattern, options) {\r\n return this._getStat(filepath)\r\n .then((stats) => this._makeEntry(stats, pattern))\r\n .catch((error) => {\r\n if (options.errorFilter(error)) {\r\n return null;\r\n }\r\n throw error;\r\n });\r\n }\r\n _getStat(filepath) {\r\n return new Promise((resolve, reject) => {\r\n this._stat(filepath, this._fsStatSettings, (error, stats) => {\r\n return error === null ? resolve(stats) : reject(error);\r\n });\r\n });\r\n }\r\n}\r\nexports.default = ReaderStream;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst fsStat = require(\"@nodelib/fs.stat\");\r\nconst fsWalk = require(\"@nodelib/fs.walk\");\r\nconst reader_1 = require(\"./reader\");\r\nclass ReaderSync extends reader_1.default {\r\n constructor() {\r\n super(...arguments);\r\n this._walkSync = fsWalk.walkSync;\r\n this._statSync = fsStat.statSync;\r\n }\r\n dynamic(root, options) {\r\n return this._walkSync(root, options);\r\n }\r\n static(patterns, options) {\r\n const entries = [];\r\n for (const pattern of patterns) {\r\n const filepath = this._getFullEntryPath(pattern);\r\n const entry = this._getEntry(filepath, pattern, options);\r\n if (entry === null || !options.entryFilter(entry)) {\r\n continue;\r\n }\r\n entries.push(entry);\r\n }\r\n return entries;\r\n }\r\n _getEntry(filepath, pattern, options) {\r\n try {\r\n const stats = this._getStat(filepath);\r\n return this._makeEntry(stats, pattern);\r\n }\r\n catch (error) {\r\n if (options.errorFilter(error)) {\r\n return null;\r\n }\r\n throw error;\r\n }\r\n }\r\n _getStat(filepath) {\r\n return this._statSync(filepath, this._fsStatSettings);\r\n }\r\n}\r\nexports.default = ReaderSync;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0;\r\nconst fs = require(\"fs\");\r\nconst os = require(\"os\");\r\n/**\r\n * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero.\r\n * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107\r\n */\r\nconst CPU_COUNT = Math.max(os.cpus().length, 1);\r\nexports.DEFAULT_FILE_SYSTEM_ADAPTER = {\r\n lstat: fs.lstat,\r\n lstatSync: fs.lstatSync,\r\n stat: fs.stat,\r\n statSync: fs.statSync,\r\n readdir: fs.readdir,\r\n readdirSync: fs.readdirSync\r\n};\r\nclass Settings {\r\n constructor(_options = {}) {\r\n this._options = _options;\r\n this.absolute = this._getValue(this._options.absolute, false);\r\n this.baseNameMatch = this._getValue(this._options.baseNameMatch, false);\r\n this.braceExpansion = this._getValue(this._options.braceExpansion, true);\r\n this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true);\r\n this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT);\r\n this.cwd = this._getValue(this._options.cwd, process.cwd());\r\n this.deep = this._getValue(this._options.deep, Infinity);\r\n this.dot = this._getValue(this._options.dot, false);\r\n this.extglob = this._getValue(this._options.extglob, true);\r\n this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true);\r\n this.fs = this._getFileSystemMethods(this._options.fs);\r\n this.globstar = this._getValue(this._options.globstar, true);\r\n this.ignore = this._getValue(this._options.ignore, []);\r\n this.markDirectories = this._getValue(this._options.markDirectories, false);\r\n this.objectMode = this._getValue(this._options.objectMode, false);\r\n this.onlyDirectories = this._getValue(this._options.onlyDirectories, false);\r\n this.onlyFiles = this._getValue(this._options.onlyFiles, true);\r\n this.stats = this._getValue(this._options.stats, false);\r\n this.suppressErrors = this._getValue(this._options.suppressErrors, false);\r\n this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false);\r\n this.unique = this._getValue(this._options.unique, true);\r\n if (this.onlyDirectories) {\r\n this.onlyFiles = false;\r\n }\r\n if (this.stats) {\r\n this.objectMode = true;\r\n }\r\n }\r\n _getValue(option, value) {\r\n return option === undefined ? value : option;\r\n }\r\n _getFileSystemMethods(methods = {}) {\r\n return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods);\r\n }\r\n}\r\nexports.default = Settings;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.splitWhen = exports.flatten = void 0;\r\nfunction flatten(items) {\r\n return items.reduce((collection, item) => [].concat(collection, item), []);\r\n}\r\nexports.flatten = flatten;\r\nfunction splitWhen(items, predicate) {\r\n const result = [[]];\r\n let groupIndex = 0;\r\n for (const item of items) {\r\n if (predicate(item)) {\r\n groupIndex++;\r\n result[groupIndex] = [];\r\n }\r\n else {\r\n result[groupIndex].push(item);\r\n }\r\n }\r\n return result;\r\n}\r\nexports.splitWhen = splitWhen;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.isEnoentCodeError = void 0;\r\nfunction isEnoentCodeError(error) {\r\n return error.code === 'ENOENT';\r\n}\r\nexports.isEnoentCodeError = isEnoentCodeError;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.createDirentFromStats = void 0;\r\nclass DirentFromStats {\r\n constructor(name, stats) {\r\n this.name = name;\r\n this.isBlockDevice = stats.isBlockDevice.bind(stats);\r\n this.isCharacterDevice = stats.isCharacterDevice.bind(stats);\r\n this.isDirectory = stats.isDirectory.bind(stats);\r\n this.isFIFO = stats.isFIFO.bind(stats);\r\n this.isFile = stats.isFile.bind(stats);\r\n this.isSocket = stats.isSocket.bind(stats);\r\n this.isSymbolicLink = stats.isSymbolicLink.bind(stats);\r\n }\r\n}\r\nfunction createDirentFromStats(name, stats) {\r\n return new DirentFromStats(name, stats);\r\n}\r\nexports.createDirentFromStats = createDirentFromStats;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0;\r\nconst array = require(\"./array\");\r\nexports.array = array;\r\nconst errno = require(\"./errno\");\r\nexports.errno = errno;\r\nconst fs = require(\"./fs\");\r\nexports.fs = fs;\r\nconst path = require(\"./path\");\r\nexports.path = path;\r\nconst pattern = require(\"./pattern\");\r\nexports.pattern = pattern;\r\nconst stream = require(\"./stream\");\r\nexports.stream = stream;\r\nconst string = require(\"./string\");\r\nexports.string = string;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.removeLeadingDotSegment = exports.escape = exports.makeAbsolute = exports.unixify = void 0;\r\nconst path = require(\"path\");\r\nconst LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\\\\r\nconst UNESCAPED_GLOB_SYMBOLS_RE = /(\\\\?)([()*?[\\]{|}]|^!|[!+@](?=\\())/g;\r\n/**\r\n * Designed to work only with simple paths: `dir\\\\file`.\r\n */\r\nfunction unixify(filepath) {\r\n return filepath.replace(/\\\\/g, '/');\r\n}\r\nexports.unixify = unixify;\r\nfunction makeAbsolute(cwd, filepath) {\r\n return path.resolve(cwd, filepath);\r\n}\r\nexports.makeAbsolute = makeAbsolute;\r\nfunction escape(pattern) {\r\n return pattern.replace(UNESCAPED_GLOB_SYMBOLS_RE, '\\\\$2');\r\n}\r\nexports.escape = escape;\r\nfunction removeLeadingDotSegment(entry) {\r\n // We do not use `startsWith` because this is 10x slower than current implementation for some cases.\r\n // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with\r\n if (entry.charAt(0) === '.') {\r\n const secondCharactery = entry.charAt(1);\r\n if (secondCharactery === '/' || secondCharactery === '\\\\') {\r\n return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT);\r\n }\r\n }\r\n return entry;\r\n}\r\nexports.removeLeadingDotSegment = removeLeadingDotSegment;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0;\r\nconst path = require(\"path\");\r\nconst globParent = require(\"glob-parent\");\r\nconst micromatch = require(\"micromatch\");\r\nconst picomatch = require(\"picomatch\");\r\nconst GLOBSTAR = '**';\r\nconst ESCAPE_SYMBOL = '\\\\';\r\nconst COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/;\r\nconst REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\\[.*]/;\r\nconst REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\\(.*\\|.*\\)/;\r\nconst GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\\(.*\\)/;\r\nconst BRACE_EXPANSIONS_SYMBOLS_RE = /{.*(?:,|\\.\\.).*}/;\r\nfunction isStaticPattern(pattern, options = {}) {\r\n return !isDynamicPattern(pattern, options);\r\n}\r\nexports.isStaticPattern = isStaticPattern;\r\nfunction isDynamicPattern(pattern, options = {}) {\r\n /**\r\n * A special case with an empty string is necessary for matching patterns that start with a forward slash.\r\n * An empty string cannot be a dynamic pattern.\r\n * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'.\r\n */\r\n if (pattern === '') {\r\n return false;\r\n }\r\n /**\r\n * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check\r\n * filepath directly (without read directory).\r\n */\r\n if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) {\r\n return true;\r\n }\r\n if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) {\r\n return true;\r\n }\r\n if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) {\r\n return true;\r\n }\r\n if (options.braceExpansion !== false && BRACE_EXPANSIONS_SYMBOLS_RE.test(pattern)) {\r\n return true;\r\n }\r\n return false;\r\n}\r\nexports.isDynamicPattern = isDynamicPattern;\r\nfunction convertToPositivePattern(pattern) {\r\n return isNegativePattern(pattern) ? pattern.slice(1) : pattern;\r\n}\r\nexports.convertToPositivePattern = convertToPositivePattern;\r\nfunction convertToNegativePattern(pattern) {\r\n return '!' + pattern;\r\n}\r\nexports.convertToNegativePattern = convertToNegativePattern;\r\nfunction isNegativePattern(pattern) {\r\n return pattern.startsWith('!') && pattern[1] !== '(';\r\n}\r\nexports.isNegativePattern = isNegativePattern;\r\nfunction isPositivePattern(pattern) {\r\n return !isNegativePattern(pattern);\r\n}\r\nexports.isPositivePattern = isPositivePattern;\r\nfunction getNegativePatterns(patterns) {\r\n return patterns.filter(isNegativePattern);\r\n}\r\nexports.getNegativePatterns = getNegativePatterns;\r\nfunction getPositivePatterns(patterns) {\r\n return patterns.filter(isPositivePattern);\r\n}\r\nexports.getPositivePatterns = getPositivePatterns;\r\nfunction getBaseDirectory(pattern) {\r\n return globParent(pattern, { flipBackslashes: false });\r\n}\r\nexports.getBaseDirectory = getBaseDirectory;\r\nfunction hasGlobStar(pattern) {\r\n return pattern.includes(GLOBSTAR);\r\n}\r\nexports.hasGlobStar = hasGlobStar;\r\nfunction endsWithSlashGlobStar(pattern) {\r\n return pattern.endsWith('/' + GLOBSTAR);\r\n}\r\nexports.endsWithSlashGlobStar = endsWithSlashGlobStar;\r\nfunction isAffectDepthOfReadingPattern(pattern) {\r\n const basename = path.basename(pattern);\r\n return endsWithSlashGlobStar(pattern) || isStaticPattern(basename);\r\n}\r\nexports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern;\r\nfunction expandPatternsWithBraceExpansion(patterns) {\r\n return patterns.reduce((collection, pattern) => {\r\n return collection.concat(expandBraceExpansion(pattern));\r\n }, []);\r\n}\r\nexports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion;\r\nfunction expandBraceExpansion(pattern) {\r\n return micromatch.braces(pattern, {\r\n expand: true,\r\n nodupes: true\r\n });\r\n}\r\nexports.expandBraceExpansion = expandBraceExpansion;\r\nfunction getPatternParts(pattern, options) {\r\n let { parts } = picomatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true }));\r\n /**\r\n * The scan method returns an empty array in some cases.\r\n * See micromatch/picomatch#58 for more details.\r\n */\r\n if (parts.length === 0) {\r\n parts = [pattern];\r\n }\r\n /**\r\n * The scan method does not return an empty part for the pattern with a forward slash.\r\n * This is another part of micromatch/picomatch#58.\r\n */\r\n if (parts[0].startsWith('/')) {\r\n parts[0] = parts[0].slice(1);\r\n parts.unshift('');\r\n }\r\n return parts;\r\n}\r\nexports.getPatternParts = getPatternParts;\r\nfunction makeRe(pattern, options) {\r\n return micromatch.makeRe(pattern, options);\r\n}\r\nexports.makeRe = makeRe;\r\nfunction convertPatternsToRe(patterns, options) {\r\n return patterns.map((pattern) => makeRe(pattern, options));\r\n}\r\nexports.convertPatternsToRe = convertPatternsToRe;\r\nfunction matchAny(entry, patternsRe) {\r\n return patternsRe.some((patternRe) => patternRe.test(entry));\r\n}\r\nexports.matchAny = matchAny;\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.merge = void 0;\r\nconst merge2 = require(\"merge2\");\r\nfunction merge(streams) {\r\n const mergedStream = merge2(streams);\r\n streams.forEach((stream) => {\r\n stream.once('error', (error) => mergedStream.emit('error', error));\r\n });\r\n mergedStream.once('close', () => propagateCloseEventToSources(streams));\r\n mergedStream.once('end', () => propagateCloseEventToSources(streams));\r\n return mergedStream;\r\n}\r\nexports.merge = merge;\r\nfunction propagateCloseEventToSources(streams) {\r\n streams.forEach((stream) => stream.emit('close'));\r\n}\r\n","\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nexports.isEmpty = exports.isString = void 0;\r\nfunction isString(input) {\r\n return typeof input === 'string';\r\n}\r\nexports.isString = isString;\r\nfunction isEmpty(input) {\r\n return input === '';\r\n}\r\nexports.isEmpty = isEmpty;\r\n","'use strict'\n\nvar reusify = require('reusify')\n\nfunction fastqueue (context, worker, concurrency) {\n if (typeof context === 'function') {\n concurrency = worker\n worker = context\n context = null\n }\n\n if (concurrency < 1) {\n throw new Error('fastqueue concurrency must be greater than 1')\n }\n\n var cache = reusify(Task)\n var queueHead = null\n var queueTail = null\n var _running = 0\n var errorHandler = null\n\n var self = {\n push: push,\n drain: noop,\n saturated: noop,\n pause: pause,\n paused: false,\n concurrency: concurrency,\n running: running,\n resume: resume,\n idle: idle,\n length: length,\n getQueue: getQueue,\n unshift: unshift,\n empty: noop,\n kill: kill,\n killAndDrain: killAndDrain,\n error: error\n }\n\n return self\n\n function running () {\n return _running\n }\n\n function pause () {\n self.paused = true\n }\n\n function length () {\n var current = queueHead\n var counter = 0\n\n while (current) {\n current = current.next\n counter++\n }\n\n return counter\n }\n\n function getQueue () {\n var current = queueHead\n var tasks = []\n\n while (current) {\n tasks.push(current.value)\n current = current.next\n }\n\n return tasks\n }\n\n function resume () {\n if (!self.paused) return\n self.paused = false\n for (var i = 0; i < self.concurrency; i++) {\n _running++\n release()\n }\n }\n\n function idle () {\n return _running === 0 && self.length() === 0\n }\n\n function push (value, done) {\n var current = cache.get()\n\n current.context = context\n current.release = release\n current.value = value\n current.callback = done || noop\n current.errorHandler = errorHandler\n\n if (_running === self.concurrency || self.paused) {\n if (queueTail) {\n queueTail.next = current\n queueTail = current\n } else {\n queueHead = current\n queueTail = current\n self.saturated()\n }\n } else {\n _running++\n worker.call(context, current.value, current.worked)\n }\n }\n\n function unshift (value, done) {\n var current = cache.get()\n\n current.context = context\n current.release = release\n current.value = value\n current.callback = done || noop\n\n if (_running === self.concurrency || self.paused) {\n if (queueHead) {\n current.next = queueHead\n queueHead = current\n } else {\n queueHead = current\n queueTail = current\n self.saturated()\n }\n } else {\n _running++\n worker.call(context, current.value, current.worked)\n }\n }\n\n function release (holder) {\n if (holder) {\n cache.release(holder)\n }\n var next = queueHead\n if (next) {\n if (!self.paused) {\n if (queueTail === queueHead) {\n queueTail = null\n }\n queueHead = next.next\n next.next = null\n worker.call(context, next.value, next.worked)\n if (queueTail === null) {\n self.empty()\n }\n } else {\n _running--\n }\n } else if (--_running === 0) {\n self.drain()\n }\n }\n\n function kill () {\n queueHead = null\n queueTail = null\n self.drain = noop\n }\n\n function killAndDrain () {\n queueHead = null\n queueTail = null\n self.drain()\n self.drain = noop\n }\n\n function error (handler) {\n errorHandler = handler\n }\n}\n\nfunction noop () {}\n\nfunction Task () {\n this.value = null\n this.callback = noop\n this.next = null\n this.release = noop\n this.context = null\n this.errorHandler = null\n\n var self = this\n\n this.worked = function worked (err, result) {\n var callback = self.callback\n var errorHandler = self.errorHandler\n var val = self.value\n self.value = null\n self.callback = noop\n if (self.errorHandler) {\n errorHandler(err, val)\n }\n callback.call(self.context, err, result)\n self.release(self)\n }\n}\n\nmodule.exports = fastqueue\n","'use strict';\n\nvar isGlob = require('is-glob');\nvar pathPosixDirname = require('path').posix.dirname;\nvar isWin32 = require('os').platform() === 'win32';\n\nvar slash = '/';\nvar backslash = /\\\\/g;\nvar enclosure = /[\\{\\[].*[\\/]*.*[\\}\\]]$/;\nvar globby = /(^|[^\\\\])([\\{\\[]|\\([^\\)]+$)/;\nvar escaped = /\\\\([\\!\\*\\?\\|\\[\\]\\(\\)\\{\\}])/g;\n\n/**\n * @param {string} str\n * @param {Object} opts\n * @param {boolean} [opts.flipBackslashes=true]\n */\nmodule.exports = function globParent(str, opts) {\n var options = Object.assign({ flipBackslashes: true }, opts);\n\n // flip windows path separators\n if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) {\n str = str.replace(backslash, slash);\n }\n\n // special case for strings ending in enclosure containing path separator\n if (enclosure.test(str)) {\n str += slash;\n }\n\n // preserves full path in case of trailing path separator\n str += 'a';\n\n // remove path parts that are globby\n do {\n str = pathPosixDirname(str);\n } while (isGlob(str) || globby.test(str));\n\n // remove escape chars and return result\n return str.replace(escaped, '$1');\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst types_1 = require(\"./types\");\nfunction createRejection(error, ...beforeErrorGroups) {\n const promise = (async () => {\n if (error instanceof types_1.RequestError) {\n try {\n for (const hooks of beforeErrorGroups) {\n if (hooks) {\n for (const hook of hooks) {\n // eslint-disable-next-line no-await-in-loop\n error = await hook(error);\n }\n }\n }\n }\n catch (error_) {\n error = error_;\n }\n }\n throw error;\n })();\n const returnPromise = () => promise;\n promise.json = returnPromise;\n promise.text = returnPromise;\n promise.buffer = returnPromise;\n promise.on = returnPromise;\n return promise;\n}\nexports.default = createRejection;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst events_1 = require(\"events\");\nconst is_1 = require(\"@sindresorhus/is\");\nconst PCancelable = require(\"p-cancelable\");\nconst types_1 = require(\"./types\");\nconst parse_body_1 = require(\"./parse-body\");\nconst core_1 = require(\"../core\");\nconst proxy_events_1 = require(\"../core/utils/proxy-events\");\nconst get_buffer_1 = require(\"../core/utils/get-buffer\");\nconst is_response_ok_1 = require(\"../core/utils/is-response-ok\");\nconst proxiedRequestEvents = [\n 'request',\n 'response',\n 'redirect',\n 'uploadProgress',\n 'downloadProgress'\n];\nfunction asPromise(normalizedOptions) {\n let globalRequest;\n let globalResponse;\n const emitter = new events_1.EventEmitter();\n const promise = new PCancelable((resolve, reject, onCancel) => {\n const makeRequest = (retryCount) => {\n const request = new core_1.default(undefined, normalizedOptions);\n request.retryCount = retryCount;\n request._noPipe = true;\n onCancel(() => request.destroy());\n onCancel.shouldReject = false;\n onCancel(() => reject(new types_1.CancelError(request)));\n globalRequest = request;\n request.once('response', async (response) => {\n var _a;\n response.retryCount = retryCount;\n if (response.request.aborted) {\n // Canceled while downloading - will throw a `CancelError` or `TimeoutError` error\n return;\n }\n // Download body\n let rawBody;\n try {\n rawBody = await get_buffer_1.default(request);\n response.rawBody = rawBody;\n }\n catch (_b) {\n // The same error is caught below.\n // See request.once('error')\n return;\n }\n if (request._isAboutToError) {\n return;\n }\n // Parse body\n const contentEncoding = ((_a = response.headers['content-encoding']) !== null && _a !== void 0 ? _a : '').toLowerCase();\n const isCompressed = ['gzip', 'deflate', 'br'].includes(contentEncoding);\n const { options } = request;\n if (isCompressed && !options.decompress) {\n response.body = rawBody;\n }\n else {\n try {\n response.body = parse_body_1.default(response, options.responseType, options.parseJson, options.encoding);\n }\n catch (error) {\n // Fallback to `utf8`\n response.body = rawBody.toString();\n if (is_response_ok_1.isResponseOk(response)) {\n request._beforeError(error);\n return;\n }\n }\n }\n try {\n for (const [index, hook] of options.hooks.afterResponse.entries()) {\n // @ts-expect-error TS doesn't notice that CancelableRequest is a Promise\n // eslint-disable-next-line no-await-in-loop\n response = await hook(response, async (updatedOptions) => {\n const typedOptions = core_1.default.normalizeArguments(undefined, {\n ...updatedOptions,\n retry: {\n calculateDelay: () => 0\n },\n throwHttpErrors: false,\n resolveBodyOnly: false\n }, options);\n // Remove any further hooks for that request, because we'll call them anyway.\n // The loop continues. We don't want duplicates (asPromise recursion).\n typedOptions.hooks.afterResponse = typedOptions.hooks.afterResponse.slice(0, index);\n for (const hook of typedOptions.hooks.beforeRetry) {\n // eslint-disable-next-line no-await-in-loop\n await hook(typedOptions);\n }\n const promise = asPromise(typedOptions);\n onCancel(() => {\n promise.catch(() => { });\n promise.cancel();\n });\n return promise;\n });\n }\n }\n catch (error) {\n request._beforeError(new types_1.RequestError(error.message, error, request));\n return;\n }\n if (!is_response_ok_1.isResponseOk(response)) {\n request._beforeError(new types_1.HTTPError(response));\n return;\n }\n globalResponse = response;\n resolve(request.options.resolveBodyOnly ? response.body : response);\n });\n const onError = (error) => {\n if (promise.isCanceled) {\n return;\n }\n const { options } = request;\n if (error instanceof types_1.HTTPError && !options.throwHttpErrors) {\n const { response } = error;\n resolve(request.options.resolveBodyOnly ? response.body : response);\n return;\n }\n reject(error);\n };\n request.once('error', onError);\n const previousBody = request.options.body;\n request.once('retry', (newRetryCount, error) => {\n var _a, _b;\n if (previousBody === ((_a = error.request) === null || _a === void 0 ? void 0 : _a.options.body) && is_1.default.nodeStream((_b = error.request) === null || _b === void 0 ? void 0 : _b.options.body)) {\n onError(error);\n return;\n }\n makeRequest(newRetryCount);\n });\n proxy_events_1.default(request, emitter, proxiedRequestEvents);\n };\n makeRequest(0);\n });\n promise.on = (event, fn) => {\n emitter.on(event, fn);\n return promise;\n };\n const shortcut = (responseType) => {\n const newPromise = (async () => {\n // Wait until downloading has ended\n await promise;\n const { options } = globalResponse.request;\n return parse_body_1.default(globalResponse, responseType, options.parseJson, options.encoding);\n })();\n Object.defineProperties(newPromise, Object.getOwnPropertyDescriptors(promise));\n return newPromise;\n };\n promise.json = () => {\n const { headers } = globalRequest.options;\n if (!globalRequest.writableFinished && headers.accept === undefined) {\n headers.accept = 'application/json';\n }\n return shortcut('json');\n };\n promise.buffer = () => shortcut('buffer');\n promise.text = () => shortcut('text');\n return promise;\n}\nexports.default = asPromise;\n__exportStar(require(\"./types\"), exports);\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst is_1 = require(\"@sindresorhus/is\");\nconst normalizeArguments = (options, defaults) => {\n if (is_1.default.null_(options.encoding)) {\n throw new TypeError('To get a Buffer, set `options.responseType` to `buffer` instead');\n }\n is_1.assert.any([is_1.default.string, is_1.default.undefined], options.encoding);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.resolveBodyOnly);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.methodRewriting);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.isStream);\n is_1.assert.any([is_1.default.string, is_1.default.undefined], options.responseType);\n // `options.responseType`\n if (options.responseType === undefined) {\n options.responseType = 'text';\n }\n // `options.retry`\n const { retry } = options;\n if (defaults) {\n options.retry = { ...defaults.retry };\n }\n else {\n options.retry = {\n calculateDelay: retryObject => retryObject.computedValue,\n limit: 0,\n methods: [],\n statusCodes: [],\n errorCodes: [],\n maxRetryAfter: undefined\n };\n }\n if (is_1.default.object(retry)) {\n options.retry = {\n ...options.retry,\n ...retry\n };\n options.retry.methods = [...new Set(options.retry.methods.map(method => method.toUpperCase()))];\n options.retry.statusCodes = [...new Set(options.retry.statusCodes)];\n options.retry.errorCodes = [...new Set(options.retry.errorCodes)];\n }\n else if (is_1.default.number(retry)) {\n options.retry.limit = retry;\n }\n if (is_1.default.undefined(options.retry.maxRetryAfter)) {\n options.retry.maxRetryAfter = Math.min(\n // TypeScript is not smart enough to handle `.filter(x => is.number(x))`.\n // eslint-disable-next-line unicorn/no-fn-reference-in-iterator\n ...[options.timeout.request, options.timeout.connect].filter(is_1.default.number));\n }\n // `options.pagination`\n if (is_1.default.object(options.pagination)) {\n if (defaults) {\n options.pagination = {\n ...defaults.pagination,\n ...options.pagination\n };\n }\n const { pagination } = options;\n if (!is_1.default.function_(pagination.transform)) {\n throw new Error('`options.pagination.transform` must be implemented');\n }\n if (!is_1.default.function_(pagination.shouldContinue)) {\n throw new Error('`options.pagination.shouldContinue` must be implemented');\n }\n if (!is_1.default.function_(pagination.filter)) {\n throw new TypeError('`options.pagination.filter` must be implemented');\n }\n if (!is_1.default.function_(pagination.paginate)) {\n throw new Error('`options.pagination.paginate` must be implemented');\n }\n }\n // JSON mode\n if (options.responseType === 'json' && options.headers.accept === undefined) {\n options.headers.accept = 'application/json';\n }\n return options;\n};\nexports.default = normalizeArguments;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst types_1 = require(\"./types\");\nconst parseBody = (response, responseType, parseJson, encoding) => {\n const { rawBody } = response;\n try {\n if (responseType === 'text') {\n return rawBody.toString(encoding);\n }\n if (responseType === 'json') {\n return rawBody.length === 0 ? '' : parseJson(rawBody.toString());\n }\n if (responseType === 'buffer') {\n return rawBody;\n }\n throw new types_1.ParseError({\n message: `Unknown body type '${responseType}'`,\n name: 'Error'\n }, response);\n }\n catch (error) {\n throw new types_1.ParseError(error, response);\n }\n};\nexports.default = parseBody;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CancelError = exports.ParseError = void 0;\nconst core_1 = require(\"../core\");\n/**\nAn error to be thrown when server response code is 2xx, and parsing body fails.\nIncludes a `response` property.\n*/\nclass ParseError extends core_1.RequestError {\n constructor(error, response) {\n const { options } = response.request;\n super(`${error.message} in \"${options.url.toString()}\"`, error, response.request);\n this.name = 'ParseError';\n }\n}\nexports.ParseError = ParseError;\n/**\nAn error to be thrown when the request is aborted with `.cancel()`.\n*/\nclass CancelError extends core_1.RequestError {\n constructor(request) {\n super('Promise was canceled', {}, request);\n this.name = 'CancelError';\n }\n get isCanceled() {\n return true;\n }\n}\nexports.CancelError = CancelError;\n__exportStar(require(\"../core\"), exports);\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.retryAfterStatusCodes = void 0;\nexports.retryAfterStatusCodes = new Set([413, 429, 503]);\nconst calculateRetryDelay = ({ attemptCount, retryOptions, error, retryAfter }) => {\n if (attemptCount > retryOptions.limit) {\n return 0;\n }\n const hasMethod = retryOptions.methods.includes(error.options.method);\n const hasErrorCode = retryOptions.errorCodes.includes(error.code);\n const hasStatusCode = error.response && retryOptions.statusCodes.includes(error.response.statusCode);\n if (!hasMethod || (!hasErrorCode && !hasStatusCode)) {\n return 0;\n }\n if (error.response) {\n if (retryAfter) {\n if (retryOptions.maxRetryAfter === undefined || retryAfter > retryOptions.maxRetryAfter) {\n return 0;\n }\n return retryAfter;\n }\n if (error.response.statusCode === 413) {\n return 0;\n }\n }\n const noise = Math.random() * 100;\n return ((2 ** (attemptCount - 1)) * 1000) + noise;\n};\nexports.default = calculateRetryDelay;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.UnsupportedProtocolError = exports.ReadError = exports.TimeoutError = exports.UploadError = exports.CacheError = exports.HTTPError = exports.MaxRedirectsError = exports.RequestError = exports.setNonEnumerableProperties = exports.knownHookEvents = exports.withoutBody = exports.kIsNormalizedAlready = void 0;\nconst util_1 = require(\"util\");\nconst stream_1 = require(\"stream\");\nconst fs_1 = require(\"fs\");\nconst url_1 = require(\"url\");\nconst http = require(\"http\");\nconst http_1 = require(\"http\");\nconst https = require(\"https\");\nconst http_timer_1 = require(\"@szmarczak/http-timer\");\nconst cacheable_lookup_1 = require(\"cacheable-lookup\");\nconst CacheableRequest = require(\"cacheable-request\");\nconst decompressResponse = require(\"decompress-response\");\n// @ts-expect-error Missing types\nconst http2wrapper = require(\"http2-wrapper\");\nconst lowercaseKeys = require(\"lowercase-keys\");\nconst is_1 = require(\"@sindresorhus/is\");\nconst get_body_size_1 = require(\"./utils/get-body-size\");\nconst is_form_data_1 = require(\"./utils/is-form-data\");\nconst proxy_events_1 = require(\"./utils/proxy-events\");\nconst timed_out_1 = require(\"./utils/timed-out\");\nconst url_to_options_1 = require(\"./utils/url-to-options\");\nconst options_to_url_1 = require(\"./utils/options-to-url\");\nconst weakable_map_1 = require(\"./utils/weakable-map\");\nconst get_buffer_1 = require(\"./utils/get-buffer\");\nconst dns_ip_version_1 = require(\"./utils/dns-ip-version\");\nconst is_response_ok_1 = require(\"./utils/is-response-ok\");\nconst deprecation_warning_1 = require(\"../utils/deprecation-warning\");\nconst normalize_arguments_1 = require(\"../as-promise/normalize-arguments\");\nconst calculate_retry_delay_1 = require(\"./calculate-retry-delay\");\nlet globalDnsCache;\nconst kRequest = Symbol('request');\nconst kResponse = Symbol('response');\nconst kResponseSize = Symbol('responseSize');\nconst kDownloadedSize = Symbol('downloadedSize');\nconst kBodySize = Symbol('bodySize');\nconst kUploadedSize = Symbol('uploadedSize');\nconst kServerResponsesPiped = Symbol('serverResponsesPiped');\nconst kUnproxyEvents = Symbol('unproxyEvents');\nconst kIsFromCache = Symbol('isFromCache');\nconst kCancelTimeouts = Symbol('cancelTimeouts');\nconst kStartedReading = Symbol('startedReading');\nconst kStopReading = Symbol('stopReading');\nconst kTriggerRead = Symbol('triggerRead');\nconst kBody = Symbol('body');\nconst kJobs = Symbol('jobs');\nconst kOriginalResponse = Symbol('originalResponse');\nconst kRetryTimeout = Symbol('retryTimeout');\nexports.kIsNormalizedAlready = Symbol('isNormalizedAlready');\nconst supportsBrotli = is_1.default.string(process.versions.brotli);\nexports.withoutBody = new Set(['GET', 'HEAD']);\nexports.knownHookEvents = [\n 'init',\n 'beforeRequest',\n 'beforeRedirect',\n 'beforeError',\n 'beforeRetry',\n // Promise-Only\n 'afterResponse'\n];\nfunction validateSearchParameters(searchParameters) {\n // eslint-disable-next-line guard-for-in\n for (const key in searchParameters) {\n const value = searchParameters[key];\n if (!is_1.default.string(value) && !is_1.default.number(value) && !is_1.default.boolean(value) && !is_1.default.null_(value) && !is_1.default.undefined(value)) {\n throw new TypeError(`The \\`searchParams\\` value '${String(value)}' must be a string, number, boolean or null`);\n }\n }\n}\nfunction isClientRequest(clientRequest) {\n return is_1.default.object(clientRequest) && !('statusCode' in clientRequest);\n}\nconst cacheableStore = new weakable_map_1.default();\nconst waitForOpenFile = async (file) => new Promise((resolve, reject) => {\n const onError = (error) => {\n reject(error);\n };\n // Node.js 12 has incomplete types\n if (!file.pending) {\n resolve();\n }\n file.once('error', onError);\n file.once('ready', () => {\n file.off('error', onError);\n resolve();\n });\n});\nconst redirectCodes = new Set([300, 301, 302, 303, 304, 307, 308]);\nconst nonEnumerableProperties = [\n 'context',\n 'body',\n 'json',\n 'form'\n];\nexports.setNonEnumerableProperties = (sources, to) => {\n // Non enumerable properties shall not be merged\n const properties = {};\n for (const source of sources) {\n if (!source) {\n continue;\n }\n for (const name of nonEnumerableProperties) {\n if (!(name in source)) {\n continue;\n }\n properties[name] = {\n writable: true,\n configurable: true,\n enumerable: false,\n // @ts-expect-error TS doesn't see the check above\n value: source[name]\n };\n }\n }\n Object.defineProperties(to, properties);\n};\n/**\nAn error to be thrown when a request fails.\nContains a `code` property with error class code, like `ECONNREFUSED`.\n*/\nclass RequestError extends Error {\n constructor(message, error, self) {\n var _a;\n super(message);\n Error.captureStackTrace(this, this.constructor);\n this.name = 'RequestError';\n this.code = error.code;\n if (self instanceof Request) {\n Object.defineProperty(this, 'request', {\n enumerable: false,\n value: self\n });\n Object.defineProperty(this, 'response', {\n enumerable: false,\n value: self[kResponse]\n });\n Object.defineProperty(this, 'options', {\n // This fails because of TS 3.7.2 useDefineForClassFields\n // Ref: https://github.com/microsoft/TypeScript/issues/34972\n enumerable: false,\n value: self.options\n });\n }\n else {\n Object.defineProperty(this, 'options', {\n // This fails because of TS 3.7.2 useDefineForClassFields\n // Ref: https://github.com/microsoft/TypeScript/issues/34972\n enumerable: false,\n value: self\n });\n }\n this.timings = (_a = this.request) === null || _a === void 0 ? void 0 : _a.timings;\n // Recover the original stacktrace\n if (is_1.default.string(error.stack) && is_1.default.string(this.stack)) {\n const indexOfMessage = this.stack.indexOf(this.message) + this.message.length;\n const thisStackTrace = this.stack.slice(indexOfMessage).split('\\n').reverse();\n const errorStackTrace = error.stack.slice(error.stack.indexOf(error.message) + error.message.length).split('\\n').reverse();\n // Remove duplicated traces\n while (errorStackTrace.length !== 0 && errorStackTrace[0] === thisStackTrace[0]) {\n thisStackTrace.shift();\n }\n this.stack = `${this.stack.slice(0, indexOfMessage)}${thisStackTrace.reverse().join('\\n')}${errorStackTrace.reverse().join('\\n')}`;\n }\n }\n}\nexports.RequestError = RequestError;\n/**\nAn error to be thrown when the server redirects you more than ten times.\nIncludes a `response` property.\n*/\nclass MaxRedirectsError extends RequestError {\n constructor(request) {\n super(`Redirected ${request.options.maxRedirects} times. Aborting.`, {}, request);\n this.name = 'MaxRedirectsError';\n }\n}\nexports.MaxRedirectsError = MaxRedirectsError;\n/**\nAn error to be thrown when the server response code is not 2xx nor 3xx if `options.followRedirect` is `true`, but always except for 304.\nIncludes a `response` property.\n*/\nclass HTTPError extends RequestError {\n constructor(response) {\n super(`Response code ${response.statusCode} (${response.statusMessage})`, {}, response.request);\n this.name = 'HTTPError';\n }\n}\nexports.HTTPError = HTTPError;\n/**\nAn error to be thrown when a cache method fails.\nFor example, if the database goes down or there's a filesystem error.\n*/\nclass CacheError extends RequestError {\n constructor(error, request) {\n super(error.message, error, request);\n this.name = 'CacheError';\n }\n}\nexports.CacheError = CacheError;\n/**\nAn error to be thrown when the request body is a stream and an error occurs while reading from that stream.\n*/\nclass UploadError extends RequestError {\n constructor(error, request) {\n super(error.message, error, request);\n this.name = 'UploadError';\n }\n}\nexports.UploadError = UploadError;\n/**\nAn error to be thrown when the request is aborted due to a timeout.\nIncludes an `event` and `timings` property.\n*/\nclass TimeoutError extends RequestError {\n constructor(error, timings, request) {\n super(error.message, error, request);\n this.name = 'TimeoutError';\n this.event = error.event;\n this.timings = timings;\n }\n}\nexports.TimeoutError = TimeoutError;\n/**\nAn error to be thrown when reading from response stream fails.\n*/\nclass ReadError extends RequestError {\n constructor(error, request) {\n super(error.message, error, request);\n this.name = 'ReadError';\n }\n}\nexports.ReadError = ReadError;\n/**\nAn error to be thrown when given an unsupported protocol.\n*/\nclass UnsupportedProtocolError extends RequestError {\n constructor(options) {\n super(`Unsupported protocol \"${options.url.protocol}\"`, {}, options);\n this.name = 'UnsupportedProtocolError';\n }\n}\nexports.UnsupportedProtocolError = UnsupportedProtocolError;\nconst proxiedRequestEvents = [\n 'socket',\n 'connect',\n 'continue',\n 'information',\n 'upgrade',\n 'timeout'\n];\nclass Request extends stream_1.Duplex {\n constructor(url, options = {}, defaults) {\n super({\n // This must be false, to enable throwing after destroy\n // It is used for retry logic in Promise API\n autoDestroy: false,\n // It needs to be zero because we're just proxying the data to another stream\n highWaterMark: 0\n });\n this[kDownloadedSize] = 0;\n this[kUploadedSize] = 0;\n this.requestInitialized = false;\n this[kServerResponsesPiped] = new Set();\n this.redirects = [];\n this[kStopReading] = false;\n this[kTriggerRead] = false;\n this[kJobs] = [];\n this.retryCount = 0;\n // TODO: Remove this when targeting Node.js >= 12\n this._progressCallbacks = [];\n const unlockWrite = () => this._unlockWrite();\n const lockWrite = () => this._lockWrite();\n this.on('pipe', (source) => {\n source.prependListener('data', unlockWrite);\n source.on('data', lockWrite);\n source.prependListener('end', unlockWrite);\n source.on('end', lockWrite);\n });\n this.on('unpipe', (source) => {\n source.off('data', unlockWrite);\n source.off('data', lockWrite);\n source.off('end', unlockWrite);\n source.off('end', lockWrite);\n });\n this.on('pipe', source => {\n if (source instanceof http_1.IncomingMessage) {\n this.options.headers = {\n ...source.headers,\n ...this.options.headers\n };\n }\n });\n const { json, body, form } = options;\n if (json || body || form) {\n this._lockWrite();\n }\n if (exports.kIsNormalizedAlready in options) {\n this.options = options;\n }\n else {\n try {\n // @ts-expect-error Common TypeScript bug saying that `this.constructor` is not accessible\n this.options = this.constructor.normalizeArguments(url, options, defaults);\n }\n catch (error) {\n // TODO: Move this to `_destroy()`\n if (is_1.default.nodeStream(options.body)) {\n options.body.destroy();\n }\n this.destroy(error);\n return;\n }\n }\n (async () => {\n var _a;\n try {\n if (this.options.body instanceof fs_1.ReadStream) {\n await waitForOpenFile(this.options.body);\n }\n const { url: normalizedURL } = this.options;\n if (!normalizedURL) {\n throw new TypeError('Missing `url` property');\n }\n this.requestUrl = normalizedURL.toString();\n decodeURI(this.requestUrl);\n await this._finalizeBody();\n await this._makeRequest();\n if (this.destroyed) {\n (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.destroy();\n return;\n }\n // Queued writes etc.\n for (const job of this[kJobs]) {\n job();\n }\n // Prevent memory leak\n this[kJobs].length = 0;\n this.requestInitialized = true;\n }\n catch (error) {\n if (error instanceof RequestError) {\n this._beforeError(error);\n return;\n }\n // This is a workaround for https://github.com/nodejs/node/issues/33335\n if (!this.destroyed) {\n this.destroy(error);\n }\n }\n })();\n }\n static normalizeArguments(url, options, defaults) {\n var _a, _b, _c, _d, _e;\n const rawOptions = options;\n if (is_1.default.object(url) && !is_1.default.urlInstance(url)) {\n options = { ...defaults, ...url, ...options };\n }\n else {\n if (url && options && options.url !== undefined) {\n throw new TypeError('The `url` option is mutually exclusive with the `input` argument');\n }\n options = { ...defaults, ...options };\n if (url !== undefined) {\n options.url = url;\n }\n if (is_1.default.urlInstance(options.url)) {\n options.url = new url_1.URL(options.url.toString());\n }\n }\n // TODO: Deprecate URL options in Got 12.\n // Support extend-specific options\n if (options.cache === false) {\n options.cache = undefined;\n }\n if (options.dnsCache === false) {\n options.dnsCache = undefined;\n }\n // Nice type assertions\n is_1.assert.any([is_1.default.string, is_1.default.undefined], options.method);\n is_1.assert.any([is_1.default.object, is_1.default.undefined], options.headers);\n is_1.assert.any([is_1.default.string, is_1.default.urlInstance, is_1.default.undefined], options.prefixUrl);\n is_1.assert.any([is_1.default.object, is_1.default.undefined], options.cookieJar);\n is_1.assert.any([is_1.default.object, is_1.default.string, is_1.default.undefined], options.searchParams);\n is_1.assert.any([is_1.default.object, is_1.default.string, is_1.default.undefined], options.cache);\n is_1.assert.any([is_1.default.object, is_1.default.number, is_1.default.undefined], options.timeout);\n is_1.assert.any([is_1.default.object, is_1.default.undefined], options.context);\n is_1.assert.any([is_1.default.object, is_1.default.undefined], options.hooks);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.decompress);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.ignoreInvalidCookies);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.followRedirect);\n is_1.assert.any([is_1.default.number, is_1.default.undefined], options.maxRedirects);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.throwHttpErrors);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.http2);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.allowGetBody);\n is_1.assert.any([is_1.default.string, is_1.default.undefined], options.localAddress);\n is_1.assert.any([dns_ip_version_1.isDnsLookupIpVersion, is_1.default.undefined], options.dnsLookupIpVersion);\n is_1.assert.any([is_1.default.object, is_1.default.undefined], options.https);\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.rejectUnauthorized);\n if (options.https) {\n is_1.assert.any([is_1.default.boolean, is_1.default.undefined], options.https.rejectUnauthorized);\n is_1.assert.any([is_1.default.function_, is_1.default.undefined], options.https.checkServerIdentity);\n is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.certificateAuthority);\n is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.key);\n is_1.assert.any([is_1.default.string, is_1.default.object, is_1.default.array, is_1.default.undefined], options.https.certificate);\n is_1.assert.any([is_1.default.string, is_1.default.undefined], options.https.passphrase);\n is_1.assert.any([is_1.default.string, is_1.default.buffer, is_1.default.array, is_1.default.undefined], options.https.pfx);\n }\n is_1.assert.any([is_1.default.object, is_1.default.undefined], options.cacheOptions);\n // `options.method`\n if (is_1.default.string(options.method)) {\n options.method = options.method.toUpperCase();\n }\n else {\n options.method = 'GET';\n }\n // `options.headers`\n if (options.headers === (defaults === null || defaults === void 0 ? void 0 : defaults.headers)) {\n options.headers = { ...options.headers };\n }\n else {\n options.headers = lowercaseKeys({ ...(defaults === null || defaults === void 0 ? void 0 : defaults.headers), ...options.headers });\n }\n // Disallow legacy `url.Url`\n if ('slashes' in options) {\n throw new TypeError('The legacy `url.Url` has been deprecated. Use `URL` instead.');\n }\n // `options.auth`\n if ('auth' in options) {\n throw new TypeError('Parameter `auth` is deprecated. Use `username` / `password` instead.');\n }\n // `options.searchParams`\n if ('searchParams' in options) {\n if (options.searchParams && options.searchParams !== (defaults === null || defaults === void 0 ? void 0 : defaults.searchParams)) {\n let searchParameters;\n if (is_1.default.string(options.searchParams) || (options.searchParams instanceof url_1.URLSearchParams)) {\n searchParameters = new url_1.URLSearchParams(options.searchParams);\n }\n else {\n validateSearchParameters(options.searchParams);\n searchParameters = new url_1.URLSearchParams();\n // eslint-disable-next-line guard-for-in\n for (const key in options.searchParams) {\n const value = options.searchParams[key];\n if (value === null) {\n searchParameters.append(key, '');\n }\n else if (value !== undefined) {\n searchParameters.append(key, value);\n }\n }\n }\n // `normalizeArguments()` is also used to merge options\n (_a = defaults === null || defaults === void 0 ? void 0 : defaults.searchParams) === null || _a === void 0 ? void 0 : _a.forEach((value, key) => {\n // Only use default if one isn't already defined\n if (!searchParameters.has(key)) {\n searchParameters.append(key, value);\n }\n });\n options.searchParams = searchParameters;\n }\n }\n // `options.username` & `options.password`\n options.username = (_b = options.username) !== null && _b !== void 0 ? _b : '';\n options.password = (_c = options.password) !== null && _c !== void 0 ? _c : '';\n // `options.prefixUrl` & `options.url`\n if (is_1.default.undefined(options.prefixUrl)) {\n options.prefixUrl = (_d = defaults === null || defaults === void 0 ? void 0 : defaults.prefixUrl) !== null && _d !== void 0 ? _d : '';\n }\n else {\n options.prefixUrl = options.prefixUrl.toString();\n if (options.prefixUrl !== '' && !options.prefixUrl.endsWith('/')) {\n options.prefixUrl += '/';\n }\n }\n if (is_1.default.string(options.url)) {\n if (options.url.startsWith('/')) {\n throw new Error('`input` must not start with a slash when using `prefixUrl`');\n }\n options.url = options_to_url_1.default(options.prefixUrl + options.url, options);\n }\n else if ((is_1.default.undefined(options.url) && options.prefixUrl !== '') || options.protocol) {\n options.url = options_to_url_1.default(options.prefixUrl, options);\n }\n if (options.url) {\n if ('port' in options) {\n delete options.port;\n }\n // Make it possible to change `options.prefixUrl`\n let { prefixUrl } = options;\n Object.defineProperty(options, 'prefixUrl', {\n set: (value) => {\n const url = options.url;\n if (!url.href.startsWith(value)) {\n throw new Error(`Cannot change \\`prefixUrl\\` from ${prefixUrl} to ${value}: ${url.href}`);\n }\n options.url = new url_1.URL(value + url.href.slice(prefixUrl.length));\n prefixUrl = value;\n },\n get: () => prefixUrl\n });\n // Support UNIX sockets\n let { protocol } = options.url;\n if (protocol === 'unix:') {\n protocol = 'http:';\n options.url = new url_1.URL(`http://unix${options.url.pathname}${options.url.search}`);\n }\n // Set search params\n if (options.searchParams) {\n // eslint-disable-next-line @typescript-eslint/no-base-to-string\n options.url.search = options.searchParams.toString();\n }\n // Protocol check\n if (protocol !== 'http:' && protocol !== 'https:') {\n throw new UnsupportedProtocolError(options);\n }\n // Update `username`\n if (options.username === '') {\n options.username = options.url.username;\n }\n else {\n options.url.username = options.username;\n }\n // Update `password`\n if (options.password === '') {\n options.password = options.url.password;\n }\n else {\n options.url.password = options.password;\n }\n }\n // `options.cookieJar`\n const { cookieJar } = options;\n if (cookieJar) {\n let { setCookie, getCookieString } = cookieJar;\n is_1.assert.function_(setCookie);\n is_1.assert.function_(getCookieString);\n /* istanbul ignore next: Horrible `tough-cookie` v3 check */\n if (setCookie.length === 4 && getCookieString.length === 0) {\n setCookie = util_1.promisify(setCookie.bind(options.cookieJar));\n getCookieString = util_1.promisify(getCookieString.bind(options.cookieJar));\n options.cookieJar = {\n setCookie,\n getCookieString: getCookieString\n };\n }\n }\n // `options.cache`\n const { cache } = options;\n if (cache) {\n if (!cacheableStore.has(cache)) {\n cacheableStore.set(cache, new CacheableRequest(((requestOptions, handler) => {\n const result = requestOptions[kRequest](requestOptions, handler);\n // TODO: remove this when `cacheable-request` supports async request functions.\n if (is_1.default.promise(result)) {\n // @ts-expect-error\n // We only need to implement the error handler in order to support HTTP2 caching.\n // The result will be a promise anyway.\n result.once = (event, handler) => {\n if (event === 'error') {\n result.catch(handler);\n }\n else if (event === 'abort') {\n // The empty catch is needed here in case when\n // it rejects before it's `await`ed in `_makeRequest`.\n (async () => {\n try {\n const request = (await result);\n request.once('abort', handler);\n }\n catch (_a) { }\n })();\n }\n else {\n /* istanbul ignore next: safety check */\n throw new Error(`Unknown HTTP2 promise event: ${event}`);\n }\n return result;\n };\n }\n return result;\n }), cache));\n }\n }\n // `options.cacheOptions`\n options.cacheOptions = { ...options.cacheOptions };\n // `options.dnsCache`\n if (options.dnsCache === true) {\n if (!globalDnsCache) {\n globalDnsCache = new cacheable_lookup_1.default();\n }\n options.dnsCache = globalDnsCache;\n }\n else if (!is_1.default.undefined(options.dnsCache) && !options.dnsCache.lookup) {\n throw new TypeError(`Parameter \\`dnsCache\\` must be a CacheableLookup instance or a boolean, got ${is_1.default(options.dnsCache)}`);\n }\n // `options.timeout`\n if (is_1.default.number(options.timeout)) {\n options.timeout = { request: options.timeout };\n }\n else if (defaults && options.timeout !== defaults.timeout) {\n options.timeout = {\n ...defaults.timeout,\n ...options.timeout\n };\n }\n else {\n options.timeout = { ...options.timeout };\n }\n // `options.context`\n if (!options.context) {\n options.context = {};\n }\n // `options.hooks`\n const areHooksDefault = options.hooks === (defaults === null || defaults === void 0 ? void 0 : defaults.hooks);\n options.hooks = { ...options.hooks };\n for (const event of exports.knownHookEvents) {\n if (event in options.hooks) {\n if (is_1.default.array(options.hooks[event])) {\n // See https://github.com/microsoft/TypeScript/issues/31445#issuecomment-576929044\n options.hooks[event] = [...options.hooks[event]];\n }\n else {\n throw new TypeError(`Parameter \\`${event}\\` must be an Array, got ${is_1.default(options.hooks[event])}`);\n }\n }\n else {\n options.hooks[event] = [];\n }\n }\n if (defaults && !areHooksDefault) {\n for (const event of exports.knownHookEvents) {\n const defaultHooks = defaults.hooks[event];\n if (defaultHooks.length > 0) {\n // See https://github.com/microsoft/TypeScript/issues/31445#issuecomment-576929044\n options.hooks[event] = [\n ...defaults.hooks[event],\n ...options.hooks[event]\n ];\n }\n }\n }\n // DNS options\n if ('family' in options) {\n deprecation_warning_1.default('\"options.family\" was never documented, please use \"options.dnsLookupIpVersion\"');\n }\n // HTTPS options\n if (defaults === null || defaults === void 0 ? void 0 : defaults.https) {\n options.https = { ...defaults.https, ...options.https };\n }\n if ('rejectUnauthorized' in options) {\n deprecation_warning_1.default('\"options.rejectUnauthorized\" is now deprecated, please use \"options.https.rejectUnauthorized\"');\n }\n if ('checkServerIdentity' in options) {\n deprecation_warning_1.default('\"options.checkServerIdentity\" was never documented, please use \"options.https.checkServerIdentity\"');\n }\n if ('ca' in options) {\n deprecation_warning_1.default('\"options.ca\" was never documented, please use \"options.https.certificateAuthority\"');\n }\n if ('key' in options) {\n deprecation_warning_1.default('\"options.key\" was never documented, please use \"options.https.key\"');\n }\n if ('cert' in options) {\n deprecation_warning_1.default('\"options.cert\" was never documented, please use \"options.https.certificate\"');\n }\n if ('passphrase' in options) {\n deprecation_warning_1.default('\"options.passphrase\" was never documented, please use \"options.https.passphrase\"');\n }\n if ('pfx' in options) {\n deprecation_warning_1.default('\"options.pfx\" was never documented, please use \"options.https.pfx\"');\n }\n // Other options\n if ('followRedirects' in options) {\n throw new TypeError('The `followRedirects` option does not exist. Use `followRedirect` instead.');\n }\n if (options.agent) {\n for (const key in options.agent) {\n if (key !== 'http' && key !== 'https' && key !== 'http2') {\n throw new TypeError(`Expected the \\`options.agent\\` properties to be \\`http\\`, \\`https\\` or \\`http2\\`, got \\`${key}\\``);\n }\n }\n }\n options.maxRedirects = (_e = options.maxRedirects) !== null && _e !== void 0 ? _e : 0;\n // Set non-enumerable properties\n exports.setNonEnumerableProperties([defaults, rawOptions], options);\n return normalize_arguments_1.default(options, defaults);\n }\n _lockWrite() {\n const onLockedWrite = () => {\n throw new TypeError('The payload has been already provided');\n };\n this.write = onLockedWrite;\n this.end = onLockedWrite;\n }\n _unlockWrite() {\n this.write = super.write;\n this.end = super.end;\n }\n async _finalizeBody() {\n const { options } = this;\n const { headers } = options;\n const isForm = !is_1.default.undefined(options.form);\n const isJSON = !is_1.default.undefined(options.json);\n const isBody = !is_1.default.undefined(options.body);\n const hasPayload = isForm || isJSON || isBody;\n const cannotHaveBody = exports.withoutBody.has(options.method) && !(options.method === 'GET' && options.allowGetBody);\n this._cannotHaveBody = cannotHaveBody;\n if (hasPayload) {\n if (cannotHaveBody) {\n throw new TypeError(`The \\`${options.method}\\` method cannot be used with a body`);\n }\n if ([isBody, isForm, isJSON].filter(isTrue => isTrue).length > 1) {\n throw new TypeError('The `body`, `json` and `form` options are mutually exclusive');\n }\n if (isBody &&\n !(options.body instanceof stream_1.Readable) &&\n !is_1.default.string(options.body) &&\n !is_1.default.buffer(options.body) &&\n !is_form_data_1.default(options.body)) {\n throw new TypeError('The `body` option must be a stream.Readable, string or Buffer');\n }\n if (isForm && !is_1.default.object(options.form)) {\n throw new TypeError('The `form` option must be an Object');\n }\n {\n // Serialize body\n const noContentType = !is_1.default.string(headers['content-type']);\n if (isBody) {\n // Special case for https://github.com/form-data/form-data\n if (is_form_data_1.default(options.body) && noContentType) {\n headers['content-type'] = `multipart/form-data; boundary=${options.body.getBoundary()}`;\n }\n this[kBody] = options.body;\n }\n else if (isForm) {\n if (noContentType) {\n headers['content-type'] = 'application/x-www-form-urlencoded';\n }\n this[kBody] = (new url_1.URLSearchParams(options.form)).toString();\n }\n else {\n if (noContentType) {\n headers['content-type'] = 'application/json';\n }\n this[kBody] = options.stringifyJson(options.json);\n }\n const uploadBodySize = await get_body_size_1.default(this[kBody], options.headers);\n // See https://tools.ietf.org/html/rfc7230#section-3.3.2\n // A user agent SHOULD send a Content-Length in a request message when\n // no Transfer-Encoding is sent and the request method defines a meaning\n // for an enclosed payload body. For example, a Content-Length header\n // field is normally sent in a POST request even when the value is 0\n // (indicating an empty payload body). A user agent SHOULD NOT send a\n // Content-Length header field when the request message does not contain\n // a payload body and the method semantics do not anticipate such a\n // body.\n if (is_1.default.undefined(headers['content-length']) && is_1.default.undefined(headers['transfer-encoding'])) {\n if (!cannotHaveBody && !is_1.default.undefined(uploadBodySize)) {\n headers['content-length'] = String(uploadBodySize);\n }\n }\n }\n }\n else if (cannotHaveBody) {\n this._lockWrite();\n }\n else {\n this._unlockWrite();\n }\n this[kBodySize] = Number(headers['content-length']) || undefined;\n }\n async _onResponseBase(response) {\n const { options } = this;\n const { url } = options;\n this[kOriginalResponse] = response;\n if (options.decompress) {\n response = decompressResponse(response);\n }\n const statusCode = response.statusCode;\n const typedResponse = response;\n typedResponse.statusMessage = typedResponse.statusMessage ? typedResponse.statusMessage : http.STATUS_CODES[statusCode];\n typedResponse.url = options.url.toString();\n typedResponse.requestUrl = this.requestUrl;\n typedResponse.redirectUrls = this.redirects;\n typedResponse.request = this;\n typedResponse.isFromCache = response.fromCache || false;\n typedResponse.ip = this.ip;\n typedResponse.retryCount = this.retryCount;\n this[kIsFromCache] = typedResponse.isFromCache;\n this[kResponseSize] = Number(response.headers['content-length']) || undefined;\n this[kResponse] = response;\n response.once('end', () => {\n this[kResponseSize] = this[kDownloadedSize];\n this.emit('downloadProgress', this.downloadProgress);\n });\n response.once('error', (error) => {\n // Force clean-up, because some packages don't do this.\n // TODO: Fix decompress-response\n response.destroy();\n this._beforeError(new ReadError(error, this));\n });\n response.once('aborted', () => {\n this._beforeError(new ReadError({\n name: 'Error',\n message: 'The server aborted pending request',\n code: 'ECONNRESET'\n }, this));\n });\n this.emit('downloadProgress', this.downloadProgress);\n const rawCookies = response.headers['set-cookie'];\n if (is_1.default.object(options.cookieJar) && rawCookies) {\n let promises = rawCookies.map(async (rawCookie) => options.cookieJar.setCookie(rawCookie, url.toString()));\n if (options.ignoreInvalidCookies) {\n promises = promises.map(async (p) => p.catch(() => { }));\n }\n try {\n await Promise.all(promises);\n }\n catch (error) {\n this._beforeError(error);\n return;\n }\n }\n if (options.followRedirect && response.headers.location && redirectCodes.has(statusCode)) {\n // We're being redirected, we don't care about the response.\n // It'd be best to abort the request, but we can't because\n // we would have to sacrifice the TCP connection. We don't want that.\n response.resume();\n if (this[kRequest]) {\n this[kCancelTimeouts]();\n // eslint-disable-next-line @typescript-eslint/no-dynamic-delete\n delete this[kRequest];\n this[kUnproxyEvents]();\n }\n const shouldBeGet = statusCode === 303 && options.method !== 'GET' && options.method !== 'HEAD';\n if (shouldBeGet || !options.methodRewriting) {\n // Server responded with \"see other\", indicating that the resource exists at another location,\n // and the client should request it from that location via GET or HEAD.\n options.method = 'GET';\n if ('body' in options) {\n delete options.body;\n }\n if ('json' in options) {\n delete options.json;\n }\n if ('form' in options) {\n delete options.form;\n }\n this[kBody] = undefined;\n delete options.headers['content-length'];\n }\n if (this.redirects.length >= options.maxRedirects) {\n this._beforeError(new MaxRedirectsError(this));\n return;\n }\n try {\n // Do not remove. See https://github.com/sindresorhus/got/pull/214\n const redirectBuffer = Buffer.from(response.headers.location, 'binary').toString();\n // Handles invalid URLs. See https://github.com/sindresorhus/got/issues/604\n const redirectUrl = new url_1.URL(redirectBuffer, url);\n const redirectString = redirectUrl.toString();\n decodeURI(redirectString);\n // Redirecting to a different site, clear sensitive data.\n if (redirectUrl.hostname !== url.hostname || redirectUrl.port !== url.port) {\n if ('host' in options.headers) {\n delete options.headers.host;\n }\n if ('cookie' in options.headers) {\n delete options.headers.cookie;\n }\n if ('authorization' in options.headers) {\n delete options.headers.authorization;\n }\n if (options.username || options.password) {\n options.username = '';\n options.password = '';\n }\n }\n else {\n redirectUrl.username = options.username;\n redirectUrl.password = options.password;\n }\n this.redirects.push(redirectString);\n options.url = redirectUrl;\n for (const hook of options.hooks.beforeRedirect) {\n // eslint-disable-next-line no-await-in-loop\n await hook(options, typedResponse);\n }\n this.emit('redirect', typedResponse, options);\n await this._makeRequest();\n }\n catch (error) {\n this._beforeError(error);\n return;\n }\n return;\n }\n if (options.isStream && options.throwHttpErrors && !is_response_ok_1.isResponseOk(typedResponse)) {\n this._beforeError(new HTTPError(typedResponse));\n return;\n }\n response.on('readable', () => {\n if (this[kTriggerRead]) {\n this._read();\n }\n });\n this.on('resume', () => {\n response.resume();\n });\n this.on('pause', () => {\n response.pause();\n });\n response.once('end', () => {\n this.push(null);\n });\n this.emit('response', response);\n for (const destination of this[kServerResponsesPiped]) {\n if (destination.headersSent) {\n continue;\n }\n // eslint-disable-next-line guard-for-in\n for (const key in response.headers) {\n const isAllowed = options.decompress ? key !== 'content-encoding' : true;\n const value = response.headers[key];\n if (isAllowed) {\n destination.setHeader(key, value);\n }\n }\n destination.statusCode = statusCode;\n }\n }\n async _onResponse(response) {\n try {\n await this._onResponseBase(response);\n }\n catch (error) {\n /* istanbul ignore next: better safe than sorry */\n this._beforeError(error);\n }\n }\n _onRequest(request) {\n const { options } = this;\n const { timeout, url } = options;\n http_timer_1.default(request);\n this[kCancelTimeouts] = timed_out_1.default(request, timeout, url);\n const responseEventName = options.cache ? 'cacheableResponse' : 'response';\n request.once(responseEventName, (response) => {\n void this._onResponse(response);\n });\n request.once('error', (error) => {\n var _a;\n // Force clean-up, because some packages (e.g. nock) don't do this.\n request.destroy();\n // Node.js <= 12.18.2 mistakenly emits the response `end` first.\n (_a = request.res) === null || _a === void 0 ? void 0 : _a.removeAllListeners('end');\n error = error instanceof timed_out_1.TimeoutError ? new TimeoutError(error, this.timings, this) : new RequestError(error.message, error, this);\n this._beforeError(error);\n });\n this[kUnproxyEvents] = proxy_events_1.default(request, this, proxiedRequestEvents);\n this[kRequest] = request;\n this.emit('uploadProgress', this.uploadProgress);\n // Send body\n const body = this[kBody];\n const currentRequest = this.redirects.length === 0 ? this : request;\n if (is_1.default.nodeStream(body)) {\n body.pipe(currentRequest);\n body.once('error', (error) => {\n this._beforeError(new UploadError(error, this));\n });\n }\n else {\n this._unlockWrite();\n if (!is_1.default.undefined(body)) {\n this._writeRequest(body, undefined, () => { });\n currentRequest.end();\n this._lockWrite();\n }\n else if (this._cannotHaveBody || this._noPipe) {\n currentRequest.end();\n this._lockWrite();\n }\n }\n this.emit('request', request);\n }\n async _createCacheableRequest(url, options) {\n return new Promise((resolve, reject) => {\n // TODO: Remove `utils/url-to-options.ts` when `cacheable-request` is fixed\n Object.assign(options, url_to_options_1.default(url));\n // `http-cache-semantics` checks this\n // TODO: Fix this ignore.\n // @ts-expect-error\n delete options.url;\n let request;\n // This is ugly\n const cacheRequest = cacheableStore.get(options.cache)(options, async (response) => {\n // TODO: Fix `cacheable-response`\n response._readableState.autoDestroy = false;\n if (request) {\n (await request).emit('cacheableResponse', response);\n }\n resolve(response);\n });\n // Restore options\n options.url = url;\n cacheRequest.once('error', reject);\n cacheRequest.once('request', async (requestOrPromise) => {\n request = requestOrPromise;\n resolve(request);\n });\n });\n }\n async _makeRequest() {\n var _a, _b, _c, _d, _e;\n const { options } = this;\n const { headers } = options;\n for (const key in headers) {\n if (is_1.default.undefined(headers[key])) {\n // eslint-disable-next-line @typescript-eslint/no-dynamic-delete\n delete headers[key];\n }\n else if (is_1.default.null_(headers[key])) {\n throw new TypeError(`Use \\`undefined\\` instead of \\`null\\` to delete the \\`${key}\\` header`);\n }\n }\n if (options.decompress && is_1.default.undefined(headers['accept-encoding'])) {\n headers['accept-encoding'] = supportsBrotli ? 'gzip, deflate, br' : 'gzip, deflate';\n }\n // Set cookies\n if (options.cookieJar) {\n const cookieString = await options.cookieJar.getCookieString(options.url.toString());\n if (is_1.default.nonEmptyString(cookieString)) {\n options.headers.cookie = cookieString;\n }\n }\n for (const hook of options.hooks.beforeRequest) {\n // eslint-disable-next-line no-await-in-loop\n const result = await hook(options);\n if (!is_1.default.undefined(result)) {\n // @ts-expect-error Skip the type mismatch to support abstract responses\n options.request = () => result;\n break;\n }\n }\n if (options.body && this[kBody] !== options.body) {\n this[kBody] = options.body;\n }\n const { agent, request, timeout, url } = options;\n if (options.dnsCache && !('lookup' in options)) {\n options.lookup = options.dnsCache.lookup;\n }\n // UNIX sockets\n if (url.hostname === 'unix') {\n const matches = /(?.+?):(?.+)/.exec(`${url.pathname}${url.search}`);\n if (matches === null || matches === void 0 ? void 0 : matches.groups) {\n const { socketPath, path } = matches.groups;\n Object.assign(options, {\n socketPath,\n path,\n host: ''\n });\n }\n }\n const isHttps = url.protocol === 'https:';\n // Fallback function\n let fallbackFn;\n if (options.http2) {\n fallbackFn = http2wrapper.auto;\n }\n else {\n fallbackFn = isHttps ? https.request : http.request;\n }\n const realFn = (_a = options.request) !== null && _a !== void 0 ? _a : fallbackFn;\n // Cache support\n const fn = options.cache ? this._createCacheableRequest : realFn;\n // Pass an agent directly when HTTP2 is disabled\n if (agent && !options.http2) {\n options.agent = agent[isHttps ? 'https' : 'http'];\n }\n // Prepare plain HTTP request options\n options[kRequest] = realFn;\n delete options.request;\n // TODO: Fix this ignore.\n // @ts-expect-error\n delete options.timeout;\n const requestOptions = options;\n requestOptions.shared = (_b = options.cacheOptions) === null || _b === void 0 ? void 0 : _b.shared;\n requestOptions.cacheHeuristic = (_c = options.cacheOptions) === null || _c === void 0 ? void 0 : _c.cacheHeuristic;\n requestOptions.immutableMinTimeToLive = (_d = options.cacheOptions) === null || _d === void 0 ? void 0 : _d.immutableMinTimeToLive;\n requestOptions.ignoreCargoCult = (_e = options.cacheOptions) === null || _e === void 0 ? void 0 : _e.ignoreCargoCult;\n // If `dnsLookupIpVersion` is not present do not override `family`\n if (options.dnsLookupIpVersion !== undefined) {\n try {\n requestOptions.family = dns_ip_version_1.dnsLookupIpVersionToFamily(options.dnsLookupIpVersion);\n }\n catch (_f) {\n throw new Error('Invalid `dnsLookupIpVersion` option value');\n }\n }\n // HTTPS options remapping\n if (options.https) {\n if ('rejectUnauthorized' in options.https) {\n requestOptions.rejectUnauthorized = options.https.rejectUnauthorized;\n }\n if (options.https.checkServerIdentity) {\n requestOptions.checkServerIdentity = options.https.checkServerIdentity;\n }\n if (options.https.certificateAuthority) {\n requestOptions.ca = options.https.certificateAuthority;\n }\n if (options.https.certificate) {\n requestOptions.cert = options.https.certificate;\n }\n if (options.https.key) {\n requestOptions.key = options.https.key;\n }\n if (options.https.passphrase) {\n requestOptions.passphrase = options.https.passphrase;\n }\n if (options.https.pfx) {\n requestOptions.pfx = options.https.pfx;\n }\n }\n try {\n let requestOrResponse = await fn(url, requestOptions);\n if (is_1.default.undefined(requestOrResponse)) {\n requestOrResponse = fallbackFn(url, requestOptions);\n }\n // Restore options\n options.request = request;\n options.timeout = timeout;\n options.agent = agent;\n // HTTPS options restore\n if (options.https) {\n if ('rejectUnauthorized' in options.https) {\n delete requestOptions.rejectUnauthorized;\n }\n if (options.https.checkServerIdentity) {\n // @ts-expect-error - This one will be removed when we remove the alias.\n delete requestOptions.checkServerIdentity;\n }\n if (options.https.certificateAuthority) {\n delete requestOptions.ca;\n }\n if (options.https.certificate) {\n delete requestOptions.cert;\n }\n if (options.https.key) {\n delete requestOptions.key;\n }\n if (options.https.passphrase) {\n delete requestOptions.passphrase;\n }\n if (options.https.pfx) {\n delete requestOptions.pfx;\n }\n }\n if (isClientRequest(requestOrResponse)) {\n this._onRequest(requestOrResponse);\n // Emit the response after the stream has been ended\n }\n else if (this.writable) {\n this.once('finish', () => {\n void this._onResponse(requestOrResponse);\n });\n this._unlockWrite();\n this.end();\n this._lockWrite();\n }\n else {\n void this._onResponse(requestOrResponse);\n }\n }\n catch (error) {\n if (error instanceof CacheableRequest.CacheError) {\n throw new CacheError(error, this);\n }\n throw new RequestError(error.message, error, this);\n }\n }\n async _error(error) {\n try {\n for (const hook of this.options.hooks.beforeError) {\n // eslint-disable-next-line no-await-in-loop\n error = await hook(error);\n }\n }\n catch (error_) {\n error = new RequestError(error_.message, error_, this);\n }\n this.destroy(error);\n }\n _beforeError(error) {\n if (this[kStopReading]) {\n return;\n }\n const { options } = this;\n const retryCount = this.retryCount + 1;\n this[kStopReading] = true;\n if (!(error instanceof RequestError)) {\n error = new RequestError(error.message, error, this);\n }\n const typedError = error;\n const { response } = typedError;\n void (async () => {\n if (response && !response.body) {\n response.setEncoding(this._readableState.encoding);\n try {\n response.rawBody = await get_buffer_1.default(response);\n response.body = response.rawBody.toString();\n }\n catch (_a) { }\n }\n if (this.listenerCount('retry') !== 0) {\n let backoff;\n try {\n let retryAfter;\n if (response && 'retry-after' in response.headers) {\n retryAfter = Number(response.headers['retry-after']);\n if (Number.isNaN(retryAfter)) {\n retryAfter = Date.parse(response.headers['retry-after']) - Date.now();\n if (retryAfter <= 0) {\n retryAfter = 1;\n }\n }\n else {\n retryAfter *= 1000;\n }\n }\n backoff = await options.retry.calculateDelay({\n attemptCount: retryCount,\n retryOptions: options.retry,\n error: typedError,\n retryAfter,\n computedValue: calculate_retry_delay_1.default({\n attemptCount: retryCount,\n retryOptions: options.retry,\n error: typedError,\n retryAfter,\n computedValue: 0\n })\n });\n }\n catch (error_) {\n void this._error(new RequestError(error_.message, error_, this));\n return;\n }\n if (backoff) {\n const retry = async () => {\n try {\n for (const hook of this.options.hooks.beforeRetry) {\n // eslint-disable-next-line no-await-in-loop\n await hook(this.options, typedError, retryCount);\n }\n }\n catch (error_) {\n void this._error(new RequestError(error_.message, error, this));\n return;\n }\n // Something forced us to abort the retry\n if (this.destroyed) {\n return;\n }\n this.destroy();\n this.emit('retry', retryCount, error);\n };\n this[kRetryTimeout] = setTimeout(retry, backoff);\n return;\n }\n }\n void this._error(typedError);\n })();\n }\n _read() {\n this[kTriggerRead] = true;\n const response = this[kResponse];\n if (response && !this[kStopReading]) {\n // We cannot put this in the `if` above\n // because `.read()` also triggers the `end` event\n if (response.readableLength) {\n this[kTriggerRead] = false;\n }\n let data;\n while ((data = response.read()) !== null) {\n this[kDownloadedSize] += data.length;\n this[kStartedReading] = true;\n const progress = this.downloadProgress;\n if (progress.percent < 1) {\n this.emit('downloadProgress', progress);\n }\n this.push(data);\n }\n }\n }\n // Node.js 12 has incorrect types, so the encoding must be a string\n _write(chunk, encoding, callback) {\n const write = () => {\n this._writeRequest(chunk, encoding, callback);\n };\n if (this.requestInitialized) {\n write();\n }\n else {\n this[kJobs].push(write);\n }\n }\n _writeRequest(chunk, encoding, callback) {\n if (this[kRequest].destroyed) {\n // Probably the `ClientRequest` instance will throw\n return;\n }\n this._progressCallbacks.push(() => {\n this[kUploadedSize] += Buffer.byteLength(chunk, encoding);\n const progress = this.uploadProgress;\n if (progress.percent < 1) {\n this.emit('uploadProgress', progress);\n }\n });\n // TODO: What happens if it's from cache? Then this[kRequest] won't be defined.\n this[kRequest].write(chunk, encoding, (error) => {\n if (!error && this._progressCallbacks.length > 0) {\n this._progressCallbacks.shift()();\n }\n callback(error);\n });\n }\n _final(callback) {\n const endRequest = () => {\n // FIX: Node.js 10 calls the write callback AFTER the end callback!\n while (this._progressCallbacks.length !== 0) {\n this._progressCallbacks.shift()();\n }\n // We need to check if `this[kRequest]` is present,\n // because it isn't when we use cache.\n if (!(kRequest in this)) {\n callback();\n return;\n }\n if (this[kRequest].destroyed) {\n callback();\n return;\n }\n this[kRequest].end((error) => {\n if (!error) {\n this[kBodySize] = this[kUploadedSize];\n this.emit('uploadProgress', this.uploadProgress);\n this[kRequest].emit('upload-complete');\n }\n callback(error);\n });\n };\n if (this.requestInitialized) {\n endRequest();\n }\n else {\n this[kJobs].push(endRequest);\n }\n }\n _destroy(error, callback) {\n var _a;\n this[kStopReading] = true;\n // Prevent further retries\n clearTimeout(this[kRetryTimeout]);\n if (kRequest in this) {\n this[kCancelTimeouts]();\n // TODO: Remove the next `if` when these get fixed:\n // - https://github.com/nodejs/node/issues/32851\n if (!((_a = this[kResponse]) === null || _a === void 0 ? void 0 : _a.complete)) {\n this[kRequest].destroy();\n }\n }\n if (error !== null && !is_1.default.undefined(error) && !(error instanceof RequestError)) {\n error = new RequestError(error.message, error, this);\n }\n callback(error);\n }\n get _isAboutToError() {\n return this[kStopReading];\n }\n /**\n The remote IP address.\n */\n get ip() {\n var _a;\n return (_a = this.socket) === null || _a === void 0 ? void 0 : _a.remoteAddress;\n }\n /**\n Indicates whether the request has been aborted or not.\n */\n get aborted() {\n var _a, _b, _c;\n return ((_b = (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.destroyed) !== null && _b !== void 0 ? _b : this.destroyed) && !((_c = this[kOriginalResponse]) === null || _c === void 0 ? void 0 : _c.complete);\n }\n get socket() {\n var _a, _b;\n return (_b = (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.socket) !== null && _b !== void 0 ? _b : undefined;\n }\n /**\n Progress event for downloading (receiving a response).\n */\n get downloadProgress() {\n let percent;\n if (this[kResponseSize]) {\n percent = this[kDownloadedSize] / this[kResponseSize];\n }\n else if (this[kResponseSize] === this[kDownloadedSize]) {\n percent = 1;\n }\n else {\n percent = 0;\n }\n return {\n percent,\n transferred: this[kDownloadedSize],\n total: this[kResponseSize]\n };\n }\n /**\n Progress event for uploading (sending a request).\n */\n get uploadProgress() {\n let percent;\n if (this[kBodySize]) {\n percent = this[kUploadedSize] / this[kBodySize];\n }\n else if (this[kBodySize] === this[kUploadedSize]) {\n percent = 1;\n }\n else {\n percent = 0;\n }\n return {\n percent,\n transferred: this[kUploadedSize],\n total: this[kBodySize]\n };\n }\n /**\n The object contains the following properties:\n\n - `start` - Time when the request started.\n - `socket` - Time when a socket was assigned to the request.\n - `lookup` - Time when the DNS lookup finished.\n - `connect` - Time when the socket successfully connected.\n - `secureConnect` - Time when the socket securely connected.\n - `upload` - Time when the request finished uploading.\n - `response` - Time when the request fired `response` event.\n - `end` - Time when the response fired `end` event.\n - `error` - Time when the request fired `error` event.\n - `abort` - Time when the request fired `abort` event.\n - `phases`\n - `wait` - `timings.socket - timings.start`\n - `dns` - `timings.lookup - timings.socket`\n - `tcp` - `timings.connect - timings.lookup`\n - `tls` - `timings.secureConnect - timings.connect`\n - `request` - `timings.upload - (timings.secureConnect || timings.connect)`\n - `firstByte` - `timings.response - timings.upload`\n - `download` - `timings.end - timings.response`\n - `total` - `(timings.end || timings.error || timings.abort) - timings.start`\n\n If something has not been measured yet, it will be `undefined`.\n\n __Note__: The time is a `number` representing the milliseconds elapsed since the UNIX epoch.\n */\n get timings() {\n var _a;\n return (_a = this[kRequest]) === null || _a === void 0 ? void 0 : _a.timings;\n }\n /**\n Whether the response was retrieved from the cache.\n */\n get isFromCache() {\n return this[kIsFromCache];\n }\n pipe(destination, options) {\n if (this[kStartedReading]) {\n throw new Error('Failed to pipe. The response has been emitted already.');\n }\n if (destination instanceof http_1.ServerResponse) {\n this[kServerResponsesPiped].add(destination);\n }\n return super.pipe(destination, options);\n }\n unpipe(destination) {\n if (destination instanceof http_1.ServerResponse) {\n this[kServerResponsesPiped].delete(destination);\n }\n super.unpipe(destination);\n return this;\n }\n}\nexports.default = Request;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.dnsLookupIpVersionToFamily = exports.isDnsLookupIpVersion = void 0;\nconst conversionTable = {\n auto: 0,\n ipv4: 4,\n ipv6: 6\n};\nexports.isDnsLookupIpVersion = (value) => {\n return value in conversionTable;\n};\nexports.dnsLookupIpVersionToFamily = (dnsLookupIpVersion) => {\n if (exports.isDnsLookupIpVersion(dnsLookupIpVersion)) {\n return conversionTable[dnsLookupIpVersion];\n }\n throw new Error('Invalid DNS lookup IP version');\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst fs_1 = require(\"fs\");\nconst util_1 = require(\"util\");\nconst is_1 = require(\"@sindresorhus/is\");\nconst is_form_data_1 = require(\"./is-form-data\");\nconst statAsync = util_1.promisify(fs_1.stat);\nexports.default = async (body, headers) => {\n if (headers && 'content-length' in headers) {\n return Number(headers['content-length']);\n }\n if (!body) {\n return 0;\n }\n if (is_1.default.string(body)) {\n return Buffer.byteLength(body);\n }\n if (is_1.default.buffer(body)) {\n return body.length;\n }\n if (is_form_data_1.default(body)) {\n return util_1.promisify(body.getLength.bind(body))();\n }\n if (body instanceof fs_1.ReadStream) {\n const { size } = await statAsync(body.path);\n if (size === 0) {\n return undefined;\n }\n return size;\n }\n return undefined;\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// TODO: Update https://github.com/sindresorhus/get-stream\nconst getBuffer = async (stream) => {\n const chunks = [];\n let length = 0;\n for await (const chunk of stream) {\n chunks.push(chunk);\n length += Buffer.byteLength(chunk);\n }\n if (Buffer.isBuffer(chunks[0])) {\n return Buffer.concat(chunks, length);\n }\n return Buffer.from(chunks.join(''));\n};\nexports.default = getBuffer;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst is_1 = require(\"@sindresorhus/is\");\nexports.default = (body) => is_1.default.nodeStream(body) && is_1.default.function_(body.getBoundary);\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isResponseOk = void 0;\nexports.isResponseOk = (response) => {\n const { statusCode } = response;\n const limitStatusCode = response.request.options.followRedirect ? 299 : 399;\n return (statusCode >= 200 && statusCode <= limitStatusCode) || statusCode === 304;\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n/* istanbul ignore file: deprecated */\nconst url_1 = require(\"url\");\nconst keys = [\n 'protocol',\n 'host',\n 'hostname',\n 'port',\n 'pathname',\n 'search'\n];\nexports.default = (origin, options) => {\n var _a, _b;\n if (options.path) {\n if (options.pathname) {\n throw new TypeError('Parameters `path` and `pathname` are mutually exclusive.');\n }\n if (options.search) {\n throw new TypeError('Parameters `path` and `search` are mutually exclusive.');\n }\n if (options.searchParams) {\n throw new TypeError('Parameters `path` and `searchParams` are mutually exclusive.');\n }\n }\n if (options.search && options.searchParams) {\n throw new TypeError('Parameters `search` and `searchParams` are mutually exclusive.');\n }\n if (!origin) {\n if (!options.protocol) {\n throw new TypeError('No URL protocol specified');\n }\n origin = `${options.protocol}//${(_b = (_a = options.hostname) !== null && _a !== void 0 ? _a : options.host) !== null && _b !== void 0 ? _b : ''}`;\n }\n const url = new url_1.URL(origin);\n if (options.path) {\n const searchIndex = options.path.indexOf('?');\n if (searchIndex === -1) {\n options.pathname = options.path;\n }\n else {\n options.pathname = options.path.slice(0, searchIndex);\n options.search = options.path.slice(searchIndex + 1);\n }\n delete options.path;\n }\n for (const key of keys) {\n if (options[key]) {\n url[key] = options[key].toString();\n }\n }\n return url;\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction default_1(from, to, events) {\n const fns = {};\n for (const event of events) {\n fns[event] = (...args) => {\n to.emit(event, ...args);\n };\n from.on(event, fns[event]);\n }\n return () => {\n for (const event of events) {\n from.off(event, fns[event]);\n }\n };\n}\nexports.default = default_1;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.TimeoutError = void 0;\nconst net = require(\"net\");\nconst unhandle_1 = require(\"./unhandle\");\nconst reentry = Symbol('reentry');\nconst noop = () => { };\nclass TimeoutError extends Error {\n constructor(threshold, event) {\n super(`Timeout awaiting '${event}' for ${threshold}ms`);\n this.event = event;\n this.name = 'TimeoutError';\n this.code = 'ETIMEDOUT';\n }\n}\nexports.TimeoutError = TimeoutError;\nexports.default = (request, delays, options) => {\n if (reentry in request) {\n return noop;\n }\n request[reentry] = true;\n const cancelers = [];\n const { once, unhandleAll } = unhandle_1.default();\n const addTimeout = (delay, callback, event) => {\n var _a;\n const timeout = setTimeout(callback, delay, delay, event);\n (_a = timeout.unref) === null || _a === void 0 ? void 0 : _a.call(timeout);\n const cancel = () => {\n clearTimeout(timeout);\n };\n cancelers.push(cancel);\n return cancel;\n };\n const { host, hostname } = options;\n const timeoutHandler = (delay, event) => {\n request.destroy(new TimeoutError(delay, event));\n };\n const cancelTimeouts = () => {\n for (const cancel of cancelers) {\n cancel();\n }\n unhandleAll();\n };\n request.once('error', error => {\n cancelTimeouts();\n // Save original behavior\n /* istanbul ignore next */\n if (request.listenerCount('error') === 0) {\n throw error;\n }\n });\n request.once('close', cancelTimeouts);\n once(request, 'response', (response) => {\n once(response, 'end', cancelTimeouts);\n });\n if (typeof delays.request !== 'undefined') {\n addTimeout(delays.request, timeoutHandler, 'request');\n }\n if (typeof delays.socket !== 'undefined') {\n const socketTimeoutHandler = () => {\n timeoutHandler(delays.socket, 'socket');\n };\n request.setTimeout(delays.socket, socketTimeoutHandler);\n // `request.setTimeout(0)` causes a memory leak.\n // We can just remove the listener and forget about the timer - it's unreffed.\n // See https://github.com/sindresorhus/got/issues/690\n cancelers.push(() => {\n request.removeListener('timeout', socketTimeoutHandler);\n });\n }\n once(request, 'socket', (socket) => {\n var _a;\n const { socketPath } = request;\n /* istanbul ignore next: hard to test */\n if (socket.connecting) {\n const hasPath = Boolean(socketPath !== null && socketPath !== void 0 ? socketPath : net.isIP((_a = hostname !== null && hostname !== void 0 ? hostname : host) !== null && _a !== void 0 ? _a : '') !== 0);\n if (typeof delays.lookup !== 'undefined' && !hasPath && typeof socket.address().address === 'undefined') {\n const cancelTimeout = addTimeout(delays.lookup, timeoutHandler, 'lookup');\n once(socket, 'lookup', cancelTimeout);\n }\n if (typeof delays.connect !== 'undefined') {\n const timeConnect = () => addTimeout(delays.connect, timeoutHandler, 'connect');\n if (hasPath) {\n once(socket, 'connect', timeConnect());\n }\n else {\n once(socket, 'lookup', (error) => {\n if (error === null) {\n once(socket, 'connect', timeConnect());\n }\n });\n }\n }\n if (typeof delays.secureConnect !== 'undefined' && options.protocol === 'https:') {\n once(socket, 'connect', () => {\n const cancelTimeout = addTimeout(delays.secureConnect, timeoutHandler, 'secureConnect');\n once(socket, 'secureConnect', cancelTimeout);\n });\n }\n }\n if (typeof delays.send !== 'undefined') {\n const timeRequest = () => addTimeout(delays.send, timeoutHandler, 'send');\n /* istanbul ignore next: hard to test */\n if (socket.connecting) {\n once(socket, 'connect', () => {\n once(request, 'upload-complete', timeRequest());\n });\n }\n else {\n once(request, 'upload-complete', timeRequest());\n }\n }\n });\n if (typeof delays.response !== 'undefined') {\n once(request, 'upload-complete', () => {\n const cancelTimeout = addTimeout(delays.response, timeoutHandler, 'response');\n once(request, 'response', cancelTimeout);\n });\n }\n return cancelTimeouts;\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n// When attaching listeners, it's very easy to forget about them.\n// Especially if you do error handling and set timeouts.\n// So instead of checking if it's proper to throw an error on every timeout ever,\n// use this simple tool which will remove all listeners you have attached.\nexports.default = () => {\n const handlers = [];\n return {\n once(origin, event, fn) {\n origin.once(event, fn);\n handlers.push({ origin, event, fn });\n },\n unhandleAll() {\n for (const handler of handlers) {\n const { origin, event, fn } = handler;\n origin.removeListener(event, fn);\n }\n handlers.length = 0;\n }\n };\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst is_1 = require(\"@sindresorhus/is\");\nexports.default = (url) => {\n // Cast to URL\n url = url;\n const options = {\n protocol: url.protocol,\n hostname: is_1.default.string(url.hostname) && url.hostname.startsWith('[') ? url.hostname.slice(1, -1) : url.hostname,\n host: url.host,\n hash: url.hash,\n search: url.search,\n pathname: url.pathname,\n href: url.href,\n path: `${url.pathname || ''}${url.search || ''}`\n };\n if (is_1.default.string(url.port) && url.port.length > 0) {\n options.port = Number(url.port);\n }\n if (url.username || url.password) {\n options.auth = `${url.username || ''}:${url.password || ''}`;\n }\n return options;\n};\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nclass WeakableMap {\n constructor() {\n this.weakMap = new WeakMap();\n this.map = new Map();\n }\n set(key, value) {\n if (typeof key === 'object') {\n this.weakMap.set(key, value);\n }\n else {\n this.map.set(key, value);\n }\n }\n get(key) {\n if (typeof key === 'object') {\n return this.weakMap.get(key);\n }\n return this.map.get(key);\n }\n has(key) {\n if (typeof key === 'object') {\n return this.weakMap.has(key);\n }\n return this.map.has(key);\n }\n}\nexports.default = WeakableMap;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.defaultHandler = void 0;\nconst is_1 = require(\"@sindresorhus/is\");\nconst as_promise_1 = require(\"./as-promise\");\nconst create_rejection_1 = require(\"./as-promise/create-rejection\");\nconst core_1 = require(\"./core\");\nconst deep_freeze_1 = require(\"./utils/deep-freeze\");\nconst errors = {\n RequestError: as_promise_1.RequestError,\n CacheError: as_promise_1.CacheError,\n ReadError: as_promise_1.ReadError,\n HTTPError: as_promise_1.HTTPError,\n MaxRedirectsError: as_promise_1.MaxRedirectsError,\n TimeoutError: as_promise_1.TimeoutError,\n ParseError: as_promise_1.ParseError,\n CancelError: as_promise_1.CancelError,\n UnsupportedProtocolError: as_promise_1.UnsupportedProtocolError,\n UploadError: as_promise_1.UploadError\n};\n// The `delay` package weighs 10KB (!)\nconst delay = async (ms) => new Promise(resolve => {\n setTimeout(resolve, ms);\n});\nconst { normalizeArguments } = core_1.default;\nconst mergeOptions = (...sources) => {\n let mergedOptions;\n for (const source of sources) {\n mergedOptions = normalizeArguments(undefined, source, mergedOptions);\n }\n return mergedOptions;\n};\nconst getPromiseOrStream = (options) => options.isStream ? new core_1.default(undefined, options) : as_promise_1.default(options);\nconst isGotInstance = (value) => ('defaults' in value && 'options' in value.defaults);\nconst aliases = [\n 'get',\n 'post',\n 'put',\n 'patch',\n 'head',\n 'delete'\n];\nexports.defaultHandler = (options, next) => next(options);\nconst callInitHooks = (hooks, options) => {\n if (hooks) {\n for (const hook of hooks) {\n hook(options);\n }\n }\n};\nconst create = (defaults) => {\n // Proxy properties from next handlers\n defaults._rawHandlers = defaults.handlers;\n defaults.handlers = defaults.handlers.map(fn => ((options, next) => {\n // This will be assigned by assigning result\n let root;\n const result = fn(options, newOptions => {\n root = next(newOptions);\n return root;\n });\n if (result !== root && !options.isStream && root) {\n const typedResult = result;\n const { then: promiseThen, catch: promiseCatch, finally: promiseFianlly } = typedResult;\n Object.setPrototypeOf(typedResult, Object.getPrototypeOf(root));\n Object.defineProperties(typedResult, Object.getOwnPropertyDescriptors(root));\n // These should point to the new promise\n // eslint-disable-next-line promise/prefer-await-to-then\n typedResult.then = promiseThen;\n typedResult.catch = promiseCatch;\n typedResult.finally = promiseFianlly;\n }\n return result;\n }));\n // Got interface\n const got = ((url, options = {}, _defaults) => {\n var _a, _b;\n let iteration = 0;\n const iterateHandlers = (newOptions) => {\n return defaults.handlers[iteration++](newOptions, iteration === defaults.handlers.length ? getPromiseOrStream : iterateHandlers);\n };\n // TODO: Remove this in Got 12.\n if (is_1.default.plainObject(url)) {\n const mergedOptions = {\n ...url,\n ...options\n };\n core_1.setNonEnumerableProperties([url, options], mergedOptions);\n options = mergedOptions;\n url = undefined;\n }\n try {\n // Call `init` hooks\n let initHookError;\n try {\n callInitHooks(defaults.options.hooks.init, options);\n callInitHooks((_a = options.hooks) === null || _a === void 0 ? void 0 : _a.init, options);\n }\n catch (error) {\n initHookError = error;\n }\n // Normalize options & call handlers\n const normalizedOptions = normalizeArguments(url, options, _defaults !== null && _defaults !== void 0 ? _defaults : defaults.options);\n normalizedOptions[core_1.kIsNormalizedAlready] = true;\n if (initHookError) {\n throw new as_promise_1.RequestError(initHookError.message, initHookError, normalizedOptions);\n }\n return iterateHandlers(normalizedOptions);\n }\n catch (error) {\n if (options.isStream) {\n throw error;\n }\n else {\n return create_rejection_1.default(error, defaults.options.hooks.beforeError, (_b = options.hooks) === null || _b === void 0 ? void 0 : _b.beforeError);\n }\n }\n });\n got.extend = (...instancesOrOptions) => {\n const optionsArray = [defaults.options];\n let handlers = [...defaults._rawHandlers];\n let isMutableDefaults;\n for (const value of instancesOrOptions) {\n if (isGotInstance(value)) {\n optionsArray.push(value.defaults.options);\n handlers.push(...value.defaults._rawHandlers);\n isMutableDefaults = value.defaults.mutableDefaults;\n }\n else {\n optionsArray.push(value);\n if ('handlers' in value) {\n handlers.push(...value.handlers);\n }\n isMutableDefaults = value.mutableDefaults;\n }\n }\n handlers = handlers.filter(handler => handler !== exports.defaultHandler);\n if (handlers.length === 0) {\n handlers.push(exports.defaultHandler);\n }\n return create({\n options: mergeOptions(...optionsArray),\n handlers,\n mutableDefaults: Boolean(isMutableDefaults)\n });\n };\n // Pagination\n const paginateEach = (async function* (url, options) {\n // TODO: Remove this `@ts-expect-error` when upgrading to TypeScript 4.\n // Error: Argument of type 'Merge> | undefined' is not assignable to parameter of type 'Options | undefined'.\n // @ts-expect-error\n let normalizedOptions = normalizeArguments(url, options, defaults.options);\n normalizedOptions.resolveBodyOnly = false;\n const pagination = normalizedOptions.pagination;\n if (!is_1.default.object(pagination)) {\n throw new TypeError('`options.pagination` must be implemented');\n }\n const all = [];\n let { countLimit } = pagination;\n let numberOfRequests = 0;\n while (numberOfRequests < pagination.requestLimit) {\n if (numberOfRequests !== 0) {\n // eslint-disable-next-line no-await-in-loop\n await delay(pagination.backoff);\n }\n // @ts-expect-error FIXME!\n // TODO: Throw when result is not an instance of Response\n // eslint-disable-next-line no-await-in-loop\n const result = (await got(undefined, undefined, normalizedOptions));\n // eslint-disable-next-line no-await-in-loop\n const parsed = await pagination.transform(result);\n const current = [];\n for (const item of parsed) {\n if (pagination.filter(item, all, current)) {\n if (!pagination.shouldContinue(item, all, current)) {\n return;\n }\n yield item;\n if (pagination.stackAllItems) {\n all.push(item);\n }\n current.push(item);\n if (--countLimit <= 0) {\n return;\n }\n }\n }\n const optionsToMerge = pagination.paginate(result, all, current);\n if (optionsToMerge === false) {\n return;\n }\n if (optionsToMerge === result.request.options) {\n normalizedOptions = result.request.options;\n }\n else if (optionsToMerge !== undefined) {\n normalizedOptions = normalizeArguments(undefined, optionsToMerge, normalizedOptions);\n }\n numberOfRequests++;\n }\n });\n got.paginate = paginateEach;\n got.paginate.all = (async (url, options) => {\n const results = [];\n for await (const item of paginateEach(url, options)) {\n results.push(item);\n }\n return results;\n });\n // For those who like very descriptive names\n got.paginate.each = paginateEach;\n // Stream API\n got.stream = ((url, options) => got(url, { ...options, isStream: true }));\n // Shortcuts\n for (const method of aliases) {\n got[method] = ((url, options) => got(url, { ...options, method }));\n got.stream[method] = ((url, options) => {\n return got(url, { ...options, method, isStream: true });\n });\n }\n Object.assign(got, errors);\n Object.defineProperty(got, 'defaults', {\n value: defaults.mutableDefaults ? defaults : deep_freeze_1.default(defaults),\n writable: defaults.mutableDefaults,\n configurable: defaults.mutableDefaults,\n enumerable: true\n });\n got.mergeOptions = mergeOptions;\n return got;\n};\nexports.default = create;\n__exportStar(require(\"./types\"), exports);\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst url_1 = require(\"url\");\nconst create_1 = require(\"./create\");\nconst defaults = {\n options: {\n method: 'GET',\n retry: {\n limit: 2,\n methods: [\n 'GET',\n 'PUT',\n 'HEAD',\n 'DELETE',\n 'OPTIONS',\n 'TRACE'\n ],\n statusCodes: [\n 408,\n 413,\n 429,\n 500,\n 502,\n 503,\n 504,\n 521,\n 522,\n 524\n ],\n errorCodes: [\n 'ETIMEDOUT',\n 'ECONNRESET',\n 'EADDRINUSE',\n 'ECONNREFUSED',\n 'EPIPE',\n 'ENOTFOUND',\n 'ENETUNREACH',\n 'EAI_AGAIN'\n ],\n maxRetryAfter: undefined,\n calculateDelay: ({ computedValue }) => computedValue\n },\n timeout: {},\n headers: {\n 'user-agent': 'got (https://github.com/sindresorhus/got)'\n },\n hooks: {\n init: [],\n beforeRequest: [],\n beforeRedirect: [],\n beforeRetry: [],\n beforeError: [],\n afterResponse: []\n },\n cache: undefined,\n dnsCache: undefined,\n decompress: true,\n throwHttpErrors: true,\n followRedirect: true,\n isStream: false,\n responseType: 'text',\n resolveBodyOnly: false,\n maxRedirects: 10,\n prefixUrl: '',\n methodRewriting: true,\n ignoreInvalidCookies: false,\n context: {},\n // TODO: Set this to `true` when Got 12 gets released\n http2: false,\n allowGetBody: false,\n https: undefined,\n pagination: {\n transform: (response) => {\n if (response.request.options.responseType === 'json') {\n return response.body;\n }\n return JSON.parse(response.body);\n },\n paginate: response => {\n if (!Reflect.has(response.headers, 'link')) {\n return false;\n }\n const items = response.headers.link.split(',');\n let next;\n for (const item of items) {\n const parsed = item.split(';');\n if (parsed[1].includes('next')) {\n next = parsed[0].trimStart().trim();\n next = next.slice(1, -1);\n break;\n }\n }\n if (next) {\n const options = {\n url: new url_1.URL(next)\n };\n return options;\n }\n return false;\n },\n filter: () => true,\n shouldContinue: () => true,\n countLimit: Infinity,\n backoff: 0,\n requestLimit: 10000,\n stackAllItems: true\n },\n parseJson: (text) => JSON.parse(text),\n stringifyJson: (object) => JSON.stringify(object),\n cacheOptions: {}\n },\n handlers: [create_1.defaultHandler],\n mutableDefaults: false\n};\nconst got = create_1.default(defaults);\nexports.default = got;\n// For CommonJS default export support\nmodule.exports = got;\nmodule.exports.default = got;\nmodule.exports.__esModule = true; // Workaround for TS issue: https://github.com/sindresorhus/got/pull/1267\n__exportStar(require(\"./create\"), exports);\n__exportStar(require(\"./as-promise\"), exports);\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst is_1 = require(\"@sindresorhus/is\");\nfunction deepFreeze(object) {\n for (const value of Object.values(object)) {\n if (is_1.default.plainObject(value) || is_1.default.array(value)) {\n deepFreeze(value);\n }\n }\n return Object.freeze(object);\n}\nexports.default = deepFreeze;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst alreadyWarned = new Set();\nexports.default = (message) => {\n if (alreadyWarned.has(message)) {\n return;\n }\n alreadyWarned.add(message);\n // @ts-expect-error Missing types.\n process.emitWarning(`Got: ${message}`, {\n type: 'DeprecationWarning'\n });\n};\n","'use strict';\n// rfc7231 6.1\nconst statusCodeCacheableByDefault = new Set([\n 200,\n 203,\n 204,\n 206,\n 300,\n 301,\n 404,\n 405,\n 410,\n 414,\n 501,\n]);\n\n// This implementation does not understand partial responses (206)\nconst understoodStatuses = new Set([\n 200,\n 203,\n 204,\n 300,\n 301,\n 302,\n 303,\n 307,\n 308,\n 404,\n 405,\n 410,\n 414,\n 501,\n]);\n\nconst errorStatusCodes = new Set([\n 500,\n 502,\n 503, \n 504,\n]);\n\nconst hopByHopHeaders = {\n date: true, // included, because we add Age update Date\n connection: true,\n 'keep-alive': true,\n 'proxy-authenticate': true,\n 'proxy-authorization': true,\n te: true,\n trailer: true,\n 'transfer-encoding': true,\n upgrade: true,\n};\n\nconst excludedFromRevalidationUpdate = {\n // Since the old body is reused, it doesn't make sense to change properties of the body\n 'content-length': true,\n 'content-encoding': true,\n 'transfer-encoding': true,\n 'content-range': true,\n};\n\nfunction toNumberOrZero(s) {\n const n = parseInt(s, 10);\n return isFinite(n) ? n : 0;\n}\n\n// RFC 5861\nfunction isErrorResponse(response) {\n // consider undefined response as faulty\n if(!response) {\n return true\n }\n return errorStatusCodes.has(response.status);\n}\n\nfunction parseCacheControl(header) {\n const cc = {};\n if (!header) return cc;\n\n // TODO: When there is more than one value present for a given directive (e.g., two Expires header fields, multiple Cache-Control: max-age directives),\n // the directive's value is considered invalid. Caches are encouraged to consider responses that have invalid freshness information to be stale\n const parts = header.trim().split(/\\s*,\\s*/); // TODO: lame parsing\n for (const part of parts) {\n const [k, v] = part.split(/\\s*=\\s*/, 2);\n cc[k] = v === undefined ? true : v.replace(/^\"|\"$/g, ''); // TODO: lame unquoting\n }\n\n return cc;\n}\n\nfunction formatCacheControl(cc) {\n let parts = [];\n for (const k in cc) {\n const v = cc[k];\n parts.push(v === true ? k : k + '=' + v);\n }\n if (!parts.length) {\n return undefined;\n }\n return parts.join(', ');\n}\n\nmodule.exports = class CachePolicy {\n constructor(\n req,\n res,\n {\n shared,\n cacheHeuristic,\n immutableMinTimeToLive,\n ignoreCargoCult,\n _fromObject,\n } = {}\n ) {\n if (_fromObject) {\n this._fromObject(_fromObject);\n return;\n }\n\n if (!res || !res.headers) {\n throw Error('Response headers missing');\n }\n this._assertRequestHasHeaders(req);\n\n this._responseTime = this.now();\n this._isShared = shared !== false;\n this._cacheHeuristic =\n undefined !== cacheHeuristic ? cacheHeuristic : 0.1; // 10% matches IE\n this._immutableMinTtl =\n undefined !== immutableMinTimeToLive\n ? immutableMinTimeToLive\n : 24 * 3600 * 1000;\n\n this._status = 'status' in res ? res.status : 200;\n this._resHeaders = res.headers;\n this._rescc = parseCacheControl(res.headers['cache-control']);\n this._method = 'method' in req ? req.method : 'GET';\n this._url = req.url;\n this._host = req.headers.host;\n this._noAuthorization = !req.headers.authorization;\n this._reqHeaders = res.headers.vary ? req.headers : null; // Don't keep all request headers if they won't be used\n this._reqcc = parseCacheControl(req.headers['cache-control']);\n\n // Assume that if someone uses legacy, non-standard uncecessary options they don't understand caching,\n // so there's no point stricly adhering to the blindly copy&pasted directives.\n if (\n ignoreCargoCult &&\n 'pre-check' in this._rescc &&\n 'post-check' in this._rescc\n ) {\n delete this._rescc['pre-check'];\n delete this._rescc['post-check'];\n delete this._rescc['no-cache'];\n delete this._rescc['no-store'];\n delete this._rescc['must-revalidate'];\n this._resHeaders = Object.assign({}, this._resHeaders, {\n 'cache-control': formatCacheControl(this._rescc),\n });\n delete this._resHeaders.expires;\n delete this._resHeaders.pragma;\n }\n\n // When the Cache-Control header field is not present in a request, caches MUST consider the no-cache request pragma-directive\n // as having the same effect as if \"Cache-Control: no-cache\" were present (see Section 5.2.1).\n if (\n res.headers['cache-control'] == null &&\n /no-cache/.test(res.headers.pragma)\n ) {\n this._rescc['no-cache'] = true;\n }\n }\n\n now() {\n return Date.now();\n }\n\n storable() {\n // The \"no-store\" request directive indicates that a cache MUST NOT store any part of either this request or any response to it.\n return !!(\n !this._reqcc['no-store'] &&\n // A cache MUST NOT store a response to any request, unless:\n // The request method is understood by the cache and defined as being cacheable, and\n ('GET' === this._method ||\n 'HEAD' === this._method ||\n ('POST' === this._method && this._hasExplicitExpiration())) &&\n // the response status code is understood by the cache, and\n understoodStatuses.has(this._status) &&\n // the \"no-store\" cache directive does not appear in request or response header fields, and\n !this._rescc['no-store'] &&\n // the \"private\" response directive does not appear in the response, if the cache is shared, and\n (!this._isShared || !this._rescc.private) &&\n // the Authorization header field does not appear in the request, if the cache is shared,\n (!this._isShared ||\n this._noAuthorization ||\n this._allowsStoringAuthenticated()) &&\n // the response either:\n // contains an Expires header field, or\n (this._resHeaders.expires ||\n // contains a max-age response directive, or\n // contains a s-maxage response directive and the cache is shared, or\n // contains a public response directive.\n this._rescc['max-age'] ||\n (this._isShared && this._rescc['s-maxage']) ||\n this._rescc.public ||\n // has a status code that is defined as cacheable by default\n statusCodeCacheableByDefault.has(this._status))\n );\n }\n\n _hasExplicitExpiration() {\n // 4.2.1 Calculating Freshness Lifetime\n return (\n (this._isShared && this._rescc['s-maxage']) ||\n this._rescc['max-age'] ||\n this._resHeaders.expires\n );\n }\n\n _assertRequestHasHeaders(req) {\n if (!req || !req.headers) {\n throw Error('Request headers missing');\n }\n }\n\n satisfiesWithoutRevalidation(req) {\n this._assertRequestHasHeaders(req);\n\n // When presented with a request, a cache MUST NOT reuse a stored response, unless:\n // the presented request does not contain the no-cache pragma (Section 5.4), nor the no-cache cache directive,\n // unless the stored response is successfully validated (Section 4.3), and\n const requestCC = parseCacheControl(req.headers['cache-control']);\n if (requestCC['no-cache'] || /no-cache/.test(req.headers.pragma)) {\n return false;\n }\n\n if (requestCC['max-age'] && this.age() > requestCC['max-age']) {\n return false;\n }\n\n if (\n requestCC['min-fresh'] &&\n this.timeToLive() < 1000 * requestCC['min-fresh']\n ) {\n return false;\n }\n\n // the stored response is either:\n // fresh, or allowed to be served stale\n if (this.stale()) {\n const allowsStale =\n requestCC['max-stale'] &&\n !this._rescc['must-revalidate'] &&\n (true === requestCC['max-stale'] ||\n requestCC['max-stale'] > this.age() - this.maxAge());\n if (!allowsStale) {\n return false;\n }\n }\n\n return this._requestMatches(req, false);\n }\n\n _requestMatches(req, allowHeadMethod) {\n // The presented effective request URI and that of the stored response match, and\n return (\n (!this._url || this._url === req.url) &&\n this._host === req.headers.host &&\n // the request method associated with the stored response allows it to be used for the presented request, and\n (!req.method ||\n this._method === req.method ||\n (allowHeadMethod && 'HEAD' === req.method)) &&\n // selecting header fields nominated by the stored response (if any) match those presented, and\n this._varyMatches(req)\n );\n }\n\n _allowsStoringAuthenticated() {\n // following Cache-Control response directives (Section 5.2.2) have such an effect: must-revalidate, public, and s-maxage.\n return (\n this._rescc['must-revalidate'] ||\n this._rescc.public ||\n this._rescc['s-maxage']\n );\n }\n\n _varyMatches(req) {\n if (!this._resHeaders.vary) {\n return true;\n }\n\n // A Vary header field-value of \"*\" always fails to match\n if (this._resHeaders.vary === '*') {\n return false;\n }\n\n const fields = this._resHeaders.vary\n .trim()\n .toLowerCase()\n .split(/\\s*,\\s*/);\n for (const name of fields) {\n if (req.headers[name] !== this._reqHeaders[name]) return false;\n }\n return true;\n }\n\n _copyWithoutHopByHopHeaders(inHeaders) {\n const headers = {};\n for (const name in inHeaders) {\n if (hopByHopHeaders[name]) continue;\n headers[name] = inHeaders[name];\n }\n // 9.1. Connection\n if (inHeaders.connection) {\n const tokens = inHeaders.connection.trim().split(/\\s*,\\s*/);\n for (const name of tokens) {\n delete headers[name];\n }\n }\n if (headers.warning) {\n const warnings = headers.warning.split(/,/).filter(warning => {\n return !/^\\s*1[0-9][0-9]/.test(warning);\n });\n if (!warnings.length) {\n delete headers.warning;\n } else {\n headers.warning = warnings.join(',').trim();\n }\n }\n return headers;\n }\n\n responseHeaders() {\n const headers = this._copyWithoutHopByHopHeaders(this._resHeaders);\n const age = this.age();\n\n // A cache SHOULD generate 113 warning if it heuristically chose a freshness\n // lifetime greater than 24 hours and the response's age is greater than 24 hours.\n if (\n age > 3600 * 24 &&\n !this._hasExplicitExpiration() &&\n this.maxAge() > 3600 * 24\n ) {\n headers.warning =\n (headers.warning ? `${headers.warning}, ` : '') +\n '113 - \"rfc7234 5.5.4\"';\n }\n headers.age = `${Math.round(age)}`;\n headers.date = new Date(this.now()).toUTCString();\n return headers;\n }\n\n /**\n * Value of the Date response header or current time if Date was invalid\n * @return timestamp\n */\n date() {\n const serverDate = Date.parse(this._resHeaders.date);\n if (isFinite(serverDate)) {\n return serverDate;\n }\n return this._responseTime;\n }\n\n /**\n * Value of the Age header, in seconds, updated for the current time.\n * May be fractional.\n *\n * @return Number\n */\n age() {\n let age = this._ageValue();\n\n const residentTime = (this.now() - this._responseTime) / 1000;\n return age + residentTime;\n }\n\n _ageValue() {\n return toNumberOrZero(this._resHeaders.age);\n }\n\n /**\n * Value of applicable max-age (or heuristic equivalent) in seconds. This counts since response's `Date`.\n *\n * For an up-to-date value, see `timeToLive()`.\n *\n * @return Number\n */\n maxAge() {\n if (!this.storable() || this._rescc['no-cache']) {\n return 0;\n }\n\n // Shared responses with cookies are cacheable according to the RFC, but IMHO it'd be unwise to do so by default\n // so this implementation requires explicit opt-in via public header\n if (\n this._isShared &&\n (this._resHeaders['set-cookie'] &&\n !this._rescc.public &&\n !this._rescc.immutable)\n ) {\n return 0;\n }\n\n if (this._resHeaders.vary === '*') {\n return 0;\n }\n\n if (this._isShared) {\n if (this._rescc['proxy-revalidate']) {\n return 0;\n }\n // if a response includes the s-maxage directive, a shared cache recipient MUST ignore the Expires field.\n if (this._rescc['s-maxage']) {\n return toNumberOrZero(this._rescc['s-maxage']);\n }\n }\n\n // If a response includes a Cache-Control field with the max-age directive, a recipient MUST ignore the Expires field.\n if (this._rescc['max-age']) {\n return toNumberOrZero(this._rescc['max-age']);\n }\n\n const defaultMinTtl = this._rescc.immutable ? this._immutableMinTtl : 0;\n\n const serverDate = this.date();\n if (this._resHeaders.expires) {\n const expires = Date.parse(this._resHeaders.expires);\n // A cache recipient MUST interpret invalid date formats, especially the value \"0\", as representing a time in the past (i.e., \"already expired\").\n if (Number.isNaN(expires) || expires < serverDate) {\n return 0;\n }\n return Math.max(defaultMinTtl, (expires - serverDate) / 1000);\n }\n\n if (this._resHeaders['last-modified']) {\n const lastModified = Date.parse(this._resHeaders['last-modified']);\n if (isFinite(lastModified) && serverDate > lastModified) {\n return Math.max(\n defaultMinTtl,\n ((serverDate - lastModified) / 1000) * this._cacheHeuristic\n );\n }\n }\n\n return defaultMinTtl;\n }\n\n timeToLive() {\n const age = this.maxAge() - this.age();\n const staleIfErrorAge = age + toNumberOrZero(this._rescc['stale-if-error']);\n const staleWhileRevalidateAge = age + toNumberOrZero(this._rescc['stale-while-revalidate']);\n return Math.max(0, age, staleIfErrorAge, staleWhileRevalidateAge) * 1000;\n }\n\n stale() {\n return this.maxAge() <= this.age();\n }\n\n _useStaleIfError() {\n return this.maxAge() + toNumberOrZero(this._rescc['stale-if-error']) > this.age();\n }\n\n useStaleWhileRevalidate() {\n return this.maxAge() + toNumberOrZero(this._rescc['stale-while-revalidate']) > this.age();\n }\n\n static fromObject(obj) {\n return new this(undefined, undefined, { _fromObject: obj });\n }\n\n _fromObject(obj) {\n if (this._responseTime) throw Error('Reinitialized');\n if (!obj || obj.v !== 1) throw Error('Invalid serialization');\n\n this._responseTime = obj.t;\n this._isShared = obj.sh;\n this._cacheHeuristic = obj.ch;\n this._immutableMinTtl =\n obj.imm !== undefined ? obj.imm : 24 * 3600 * 1000;\n this._status = obj.st;\n this._resHeaders = obj.resh;\n this._rescc = obj.rescc;\n this._method = obj.m;\n this._url = obj.u;\n this._host = obj.h;\n this._noAuthorization = obj.a;\n this._reqHeaders = obj.reqh;\n this._reqcc = obj.reqcc;\n }\n\n toObject() {\n return {\n v: 1,\n t: this._responseTime,\n sh: this._isShared,\n ch: this._cacheHeuristic,\n imm: this._immutableMinTtl,\n st: this._status,\n resh: this._resHeaders,\n rescc: this._rescc,\n m: this._method,\n u: this._url,\n h: this._host,\n a: this._noAuthorization,\n reqh: this._reqHeaders,\n reqcc: this._reqcc,\n };\n }\n\n /**\n * Headers for sending to the origin server to revalidate stale response.\n * Allows server to return 304 to allow reuse of the previous response.\n *\n * Hop by hop headers are always stripped.\n * Revalidation headers may be added or removed, depending on request.\n */\n revalidationHeaders(incomingReq) {\n this._assertRequestHasHeaders(incomingReq);\n const headers = this._copyWithoutHopByHopHeaders(incomingReq.headers);\n\n // This implementation does not understand range requests\n delete headers['if-range'];\n\n if (!this._requestMatches(incomingReq, true) || !this.storable()) {\n // revalidation allowed via HEAD\n // not for the same resource, or wasn't allowed to be cached anyway\n delete headers['if-none-match'];\n delete headers['if-modified-since'];\n return headers;\n }\n\n /* MUST send that entity-tag in any cache validation request (using If-Match or If-None-Match) if an entity-tag has been provided by the origin server. */\n if (this._resHeaders.etag) {\n headers['if-none-match'] = headers['if-none-match']\n ? `${headers['if-none-match']}, ${this._resHeaders.etag}`\n : this._resHeaders.etag;\n }\n\n // Clients MAY issue simple (non-subrange) GET requests with either weak validators or strong validators. Clients MUST NOT use weak validators in other forms of request.\n const forbidsWeakValidators =\n headers['accept-ranges'] ||\n headers['if-match'] ||\n headers['if-unmodified-since'] ||\n (this._method && this._method != 'GET');\n\n /* SHOULD send the Last-Modified value in non-subrange cache validation requests (using If-Modified-Since) if only a Last-Modified value has been provided by the origin server.\n Note: This implementation does not understand partial responses (206) */\n if (forbidsWeakValidators) {\n delete headers['if-modified-since'];\n\n if (headers['if-none-match']) {\n const etags = headers['if-none-match']\n .split(/,/)\n .filter(etag => {\n return !/^\\s*W\\//.test(etag);\n });\n if (!etags.length) {\n delete headers['if-none-match'];\n } else {\n headers['if-none-match'] = etags.join(',').trim();\n }\n }\n } else if (\n this._resHeaders['last-modified'] &&\n !headers['if-modified-since']\n ) {\n headers['if-modified-since'] = this._resHeaders['last-modified'];\n }\n\n return headers;\n }\n\n /**\n * Creates new CachePolicy with information combined from the previews response,\n * and the new revalidation response.\n *\n * Returns {policy, modified} where modified is a boolean indicating\n * whether the response body has been modified, and old cached body can't be used.\n *\n * @return {Object} {policy: CachePolicy, modified: Boolean}\n */\n revalidatedPolicy(request, response) {\n this._assertRequestHasHeaders(request);\n if(this._useStaleIfError() && isErrorResponse(response)) { // I consider the revalidation request unsuccessful\n return {\n modified: false,\n matches: false,\n policy: this,\n };\n }\n if (!response || !response.headers) {\n throw Error('Response headers missing');\n }\n\n // These aren't going to be supported exactly, since one CachePolicy object\n // doesn't know about all the other cached objects.\n let matches = false;\n if (response.status !== undefined && response.status != 304) {\n matches = false;\n } else if (\n response.headers.etag &&\n !/^\\s*W\\//.test(response.headers.etag)\n ) {\n // \"All of the stored responses with the same strong validator are selected.\n // If none of the stored responses contain the same strong validator,\n // then the cache MUST NOT use the new response to update any stored responses.\"\n matches =\n this._resHeaders.etag &&\n this._resHeaders.etag.replace(/^\\s*W\\//, '') ===\n response.headers.etag;\n } else if (this._resHeaders.etag && response.headers.etag) {\n // \"If the new response contains a weak validator and that validator corresponds\n // to one of the cache's stored responses,\n // then the most recent of those matching stored responses is selected for update.\"\n matches =\n this._resHeaders.etag.replace(/^\\s*W\\//, '') ===\n response.headers.etag.replace(/^\\s*W\\//, '');\n } else if (this._resHeaders['last-modified']) {\n matches =\n this._resHeaders['last-modified'] ===\n response.headers['last-modified'];\n } else {\n // If the new response does not include any form of validator (such as in the case where\n // a client generates an If-Modified-Since request from a source other than the Last-Modified\n // response header field), and there is only one stored response, and that stored response also\n // lacks a validator, then that stored response is selected for update.\n if (\n !this._resHeaders.etag &&\n !this._resHeaders['last-modified'] &&\n !response.headers.etag &&\n !response.headers['last-modified']\n ) {\n matches = true;\n }\n }\n\n if (!matches) {\n return {\n policy: new this.constructor(request, response),\n // Client receiving 304 without body, even if it's invalid/mismatched has no option\n // but to reuse a cached body. We don't have a good way to tell clients to do\n // error recovery in such case.\n modified: response.status != 304,\n matches: false,\n };\n }\n\n // use other header fields provided in the 304 (Not Modified) response to replace all instances\n // of the corresponding header fields in the stored response.\n const headers = {};\n for (const k in this._resHeaders) {\n headers[k] =\n k in response.headers && !excludedFromRevalidationUpdate[k]\n ? response.headers[k]\n : this._resHeaders[k];\n }\n\n const newResponse = Object.assign({}, response, {\n status: this._status,\n method: this._method,\n headers,\n });\n return {\n policy: new this.constructor(request, newResponse, {\n shared: this._isShared,\n cacheHeuristic: this._cacheHeuristic,\n immutableMinTimeToLive: this._immutableMinTtl,\n }),\n modified: false,\n matches: true,\n };\n }\n};\n","'use strict';\nconst EventEmitter = require('events');\nconst tls = require('tls');\nconst http2 = require('http2');\nconst QuickLRU = require('quick-lru');\n\nconst kCurrentStreamsCount = Symbol('currentStreamsCount');\nconst kRequest = Symbol('request');\nconst kOriginSet = Symbol('cachedOriginSet');\nconst kGracefullyClosing = Symbol('gracefullyClosing');\n\nconst nameKeys = [\n\t// `http2.connect()` options\n\t'maxDeflateDynamicTableSize',\n\t'maxSessionMemory',\n\t'maxHeaderListPairs',\n\t'maxOutstandingPings',\n\t'maxReservedRemoteStreams',\n\t'maxSendHeaderBlockLength',\n\t'paddingStrategy',\n\n\t// `tls.connect()` options\n\t'localAddress',\n\t'path',\n\t'rejectUnauthorized',\n\t'minDHSize',\n\n\t// `tls.createSecureContext()` options\n\t'ca',\n\t'cert',\n\t'clientCertEngine',\n\t'ciphers',\n\t'key',\n\t'pfx',\n\t'servername',\n\t'minVersion',\n\t'maxVersion',\n\t'secureProtocol',\n\t'crl',\n\t'honorCipherOrder',\n\t'ecdhCurve',\n\t'dhparam',\n\t'secureOptions',\n\t'sessionIdContext'\n];\n\nconst getSortedIndex = (array, value, compare) => {\n\tlet low = 0;\n\tlet high = array.length;\n\n\twhile (low < high) {\n\t\tconst mid = (low + high) >>> 1;\n\n\t\t/* istanbul ignore next */\n\t\tif (compare(array[mid], value)) {\n\t\t\t// This never gets called because we use descending sort. Better to have this anyway.\n\t\t\tlow = mid + 1;\n\t\t} else {\n\t\t\thigh = mid;\n\t\t}\n\t}\n\n\treturn low;\n};\n\nconst compareSessions = (a, b) => {\n\treturn a.remoteSettings.maxConcurrentStreams > b.remoteSettings.maxConcurrentStreams;\n};\n\n// See https://tools.ietf.org/html/rfc8336\nconst closeCoveredSessions = (where, session) => {\n\t// Clients SHOULD NOT emit new requests on any connection whose Origin\n\t// Set is a proper subset of another connection's Origin Set, and they\n\t// SHOULD close it once all outstanding requests are satisfied.\n\tfor (const coveredSession of where) {\n\t\tif (\n\t\t\t// The set is a proper subset when its length is less than the other set.\n\t\t\tcoveredSession[kOriginSet].length < session[kOriginSet].length &&\n\n\t\t\t// And the other set includes all elements of the subset.\n\t\t\tcoveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin)) &&\n\n\t\t\t// Makes sure that the session can handle all requests from the covered session.\n\t\t\tcoveredSession[kCurrentStreamsCount] + session[kCurrentStreamsCount] <= session.remoteSettings.maxConcurrentStreams\n\t\t) {\n\t\t\t// This allows pending requests to finish and prevents making new requests.\n\t\t\tgracefullyClose(coveredSession);\n\t\t}\n\t}\n};\n\n// This is basically inverted `closeCoveredSessions(...)`.\nconst closeSessionIfCovered = (where, coveredSession) => {\n\tfor (const session of where) {\n\t\tif (\n\t\t\tcoveredSession[kOriginSet].length < session[kOriginSet].length &&\n\t\t\tcoveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin)) &&\n\t\t\tcoveredSession[kCurrentStreamsCount] + session[kCurrentStreamsCount] <= session.remoteSettings.maxConcurrentStreams\n\t\t) {\n\t\t\tgracefullyClose(coveredSession);\n\t\t}\n\t}\n};\n\nconst getSessions = ({agent, isFree}) => {\n\tconst result = {};\n\n\t// eslint-disable-next-line guard-for-in\n\tfor (const normalizedOptions in agent.sessions) {\n\t\tconst sessions = agent.sessions[normalizedOptions];\n\n\t\tconst filtered = sessions.filter(session => {\n\t\t\tconst result = session[Agent.kCurrentStreamsCount] < session.remoteSettings.maxConcurrentStreams;\n\n\t\t\treturn isFree ? result : !result;\n\t\t});\n\n\t\tif (filtered.length !== 0) {\n\t\t\tresult[normalizedOptions] = filtered;\n\t\t}\n\t}\n\n\treturn result;\n};\n\nconst gracefullyClose = session => {\n\tsession[kGracefullyClosing] = true;\n\n\tif (session[kCurrentStreamsCount] === 0) {\n\t\tsession.close();\n\t}\n};\n\nclass Agent extends EventEmitter {\n\tconstructor({timeout = 60000, maxSessions = Infinity, maxFreeSessions = 10, maxCachedTlsSessions = 100} = {}) {\n\t\tsuper();\n\n\t\t// A session is considered busy when its current streams count\n\t\t// is equal to or greater than the `maxConcurrentStreams` value.\n\n\t\t// A session is considered free when its current streams count\n\t\t// is less than the `maxConcurrentStreams` value.\n\n\t\t// SESSIONS[NORMALIZED_OPTIONS] = [];\n\t\tthis.sessions = {};\n\n\t\t// The queue for creating new sessions. It looks like this:\n\t\t// QUEUE[NORMALIZED_OPTIONS][NORMALIZED_ORIGIN] = ENTRY_FUNCTION\n\t\t//\n\t\t// The entry function has `listeners`, `completed` and `destroyed` properties.\n\t\t// `listeners` is an array of objects containing `resolve` and `reject` functions.\n\t\t// `completed` is a boolean. It's set to true after ENTRY_FUNCTION is executed.\n\t\t// `destroyed` is a boolean. If it's set to true, the session will be destroyed if hasn't connected yet.\n\t\tthis.queue = {};\n\n\t\t// Each session will use this timeout value.\n\t\tthis.timeout = timeout;\n\n\t\t// Max sessions in total\n\t\tthis.maxSessions = maxSessions;\n\n\t\t// Max free sessions in total\n\t\t// TODO: decreasing `maxFreeSessions` should close some sessions\n\t\tthis.maxFreeSessions = maxFreeSessions;\n\n\t\tthis._freeSessionsCount = 0;\n\t\tthis._sessionsCount = 0;\n\n\t\t// We don't support push streams by default.\n\t\tthis.settings = {\n\t\t\tenablePush: false\n\t\t};\n\n\t\t// Reusing TLS sessions increases performance.\n\t\tthis.tlsSessionCache = new QuickLRU({maxSize: maxCachedTlsSessions});\n\t}\n\n\tstatic normalizeOrigin(url, servername) {\n\t\tif (typeof url === 'string') {\n\t\t\turl = new URL(url);\n\t\t}\n\n\t\tif (servername && url.hostname !== servername) {\n\t\t\turl.hostname = servername;\n\t\t}\n\n\t\treturn url.origin;\n\t}\n\n\tnormalizeOptions(options) {\n\t\tlet normalized = '';\n\n\t\tif (options) {\n\t\t\tfor (const key of nameKeys) {\n\t\t\t\tif (options[key]) {\n\t\t\t\t\tnormalized += `:${options[key]}`;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\treturn normalized;\n\t}\n\n\t_tryToCreateNewSession(normalizedOptions, normalizedOrigin) {\n\t\tif (!(normalizedOptions in this.queue) || !(normalizedOrigin in this.queue[normalizedOptions])) {\n\t\t\treturn;\n\t\t}\n\n\t\tconst item = this.queue[normalizedOptions][normalizedOrigin];\n\n\t\t// The entry function can be run only once.\n\t\t// BUG: The session may be never created when:\n\t\t// - the first condition is false AND\n\t\t// - this function is never called with the same arguments in the future.\n\t\tif (this._sessionsCount < this.maxSessions && !item.completed) {\n\t\t\titem.completed = true;\n\n\t\t\titem();\n\t\t}\n\t}\n\n\tgetSession(origin, options, listeners) {\n\t\treturn new Promise((resolve, reject) => {\n\t\t\tif (Array.isArray(listeners)) {\n\t\t\t\tlisteners = [...listeners];\n\n\t\t\t\t// Resolve the current promise ASAP, we're just moving the listeners.\n\t\t\t\t// They will be executed at a different time.\n\t\t\t\tresolve();\n\t\t\t} else {\n\t\t\t\tlisteners = [{resolve, reject}];\n\t\t\t}\n\n\t\t\tconst normalizedOptions = this.normalizeOptions(options);\n\t\t\tconst normalizedOrigin = Agent.normalizeOrigin(origin, options && options.servername);\n\n\t\t\tif (normalizedOrigin === undefined) {\n\t\t\t\tfor (const {reject} of listeners) {\n\t\t\t\t\treject(new TypeError('The `origin` argument needs to be a string or an URL object'));\n\t\t\t\t}\n\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (normalizedOptions in this.sessions) {\n\t\t\t\tconst sessions = this.sessions[normalizedOptions];\n\n\t\t\t\tlet maxConcurrentStreams = -1;\n\t\t\t\tlet currentStreamsCount = -1;\n\t\t\t\tlet optimalSession;\n\n\t\t\t\t// We could just do this.sessions[normalizedOptions].find(...) but that isn't optimal.\n\t\t\t\t// Additionally, we are looking for session which has biggest current pending streams count.\n\t\t\t\tfor (const session of sessions) {\n\t\t\t\t\tconst sessionMaxConcurrentStreams = session.remoteSettings.maxConcurrentStreams;\n\n\t\t\t\t\tif (sessionMaxConcurrentStreams < maxConcurrentStreams) {\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\n\t\t\t\t\tif (session[kOriginSet].includes(normalizedOrigin)) {\n\t\t\t\t\t\tconst sessionCurrentStreamsCount = session[kCurrentStreamsCount];\n\n\t\t\t\t\t\tif (\n\t\t\t\t\t\t\tsessionCurrentStreamsCount >= sessionMaxConcurrentStreams ||\n\t\t\t\t\t\t\tsession[kGracefullyClosing] ||\n\t\t\t\t\t\t\t// Unfortunately the `close` event isn't called immediately,\n\t\t\t\t\t\t\t// so `session.destroyed` is `true`, but `session.closed` is `false`.\n\t\t\t\t\t\t\tsession.destroyed\n\t\t\t\t\t\t) {\n\t\t\t\t\t\t\tcontinue;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// We only need set this once.\n\t\t\t\t\t\tif (!optimalSession) {\n\t\t\t\t\t\t\tmaxConcurrentStreams = sessionMaxConcurrentStreams;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// We're looking for the session which has biggest current pending stream count,\n\t\t\t\t\t\t// in order to minimalize the amount of active sessions.\n\t\t\t\t\t\tif (sessionCurrentStreamsCount > currentStreamsCount) {\n\t\t\t\t\t\t\toptimalSession = session;\n\t\t\t\t\t\t\tcurrentStreamsCount = sessionCurrentStreamsCount;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif (optimalSession) {\n\t\t\t\t\t/* istanbul ignore next: safety check */\n\t\t\t\t\tif (listeners.length !== 1) {\n\t\t\t\t\t\tfor (const {reject} of listeners) {\n\t\t\t\t\t\t\tconst error = new Error(\n\t\t\t\t\t\t\t\t`Expected the length of listeners to be 1, got ${listeners.length}.\\n` +\n\t\t\t\t\t\t\t\t'Please report this to https://github.com/szmarczak/http2-wrapper/'\n\t\t\t\t\t\t\t);\n\n\t\t\t\t\t\t\treject(error);\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\n\t\t\t\t\tlisteners[0].resolve(optimalSession);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (normalizedOptions in this.queue) {\n\t\t\t\tif (normalizedOrigin in this.queue[normalizedOptions]) {\n\t\t\t\t\t// There's already an item in the queue, just attach ourselves to it.\n\t\t\t\t\tthis.queue[normalizedOptions][normalizedOrigin].listeners.push(...listeners);\n\n\t\t\t\t\t// This shouldn't be executed here.\n\t\t\t\t\t// See the comment inside _tryToCreateNewSession.\n\t\t\t\t\tthis._tryToCreateNewSession(normalizedOptions, normalizedOrigin);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tthis.queue[normalizedOptions] = {};\n\t\t\t}\n\n\t\t\t// The entry must be removed from the queue IMMEDIATELY when:\n\t\t\t// 1. the session connects successfully,\n\t\t\t// 2. an error occurs.\n\t\t\tconst removeFromQueue = () => {\n\t\t\t\t// Our entry can be replaced. We cannot remove the new one.\n\t\t\t\tif (normalizedOptions in this.queue && this.queue[normalizedOptions][normalizedOrigin] === entry) {\n\t\t\t\t\tdelete this.queue[normalizedOptions][normalizedOrigin];\n\n\t\t\t\t\tif (Object.keys(this.queue[normalizedOptions]).length === 0) {\n\t\t\t\t\t\tdelete this.queue[normalizedOptions];\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t};\n\n\t\t\t// The main logic is here\n\t\t\tconst entry = () => {\n\t\t\t\tconst name = `${normalizedOrigin}:${normalizedOptions}`;\n\t\t\t\tlet receivedSettings = false;\n\n\t\t\t\ttry {\n\t\t\t\t\tconst session = http2.connect(origin, {\n\t\t\t\t\t\tcreateConnection: this.createConnection,\n\t\t\t\t\t\tsettings: this.settings,\n\t\t\t\t\t\tsession: this.tlsSessionCache.get(name),\n\t\t\t\t\t\t...options\n\t\t\t\t\t});\n\t\t\t\t\tsession[kCurrentStreamsCount] = 0;\n\t\t\t\t\tsession[kGracefullyClosing] = false;\n\n\t\t\t\t\tconst isFree = () => session[kCurrentStreamsCount] < session.remoteSettings.maxConcurrentStreams;\n\t\t\t\t\tlet wasFree = true;\n\n\t\t\t\t\tsession.socket.once('session', tlsSession => {\n\t\t\t\t\t\tthis.tlsSessionCache.set(name, tlsSession);\n\t\t\t\t\t});\n\n\t\t\t\t\tsession.once('error', error => {\n\t\t\t\t\t\t// Listeners are empty when the session successfully connected.\n\t\t\t\t\t\tfor (const {reject} of listeners) {\n\t\t\t\t\t\t\treject(error);\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// The connection got broken, purge the cache.\n\t\t\t\t\t\tthis.tlsSessionCache.delete(name);\n\t\t\t\t\t});\n\n\t\t\t\t\tsession.setTimeout(this.timeout, () => {\n\t\t\t\t\t\t// Terminates all streams owned by this session.\n\t\t\t\t\t\t// TODO: Maybe the streams should have a \"Session timed out\" error?\n\t\t\t\t\t\tsession.destroy();\n\t\t\t\t\t});\n\n\t\t\t\t\tsession.once('close', () => {\n\t\t\t\t\t\tif (receivedSettings) {\n\t\t\t\t\t\t\t// 1. If it wasn't free then no need to decrease because\n\t\t\t\t\t\t\t// it has been decreased already in session.request().\n\t\t\t\t\t\t\t// 2. `stream.once('close')` won't increment the count\n\t\t\t\t\t\t\t// because the session is already closed.\n\t\t\t\t\t\t\tif (wasFree) {\n\t\t\t\t\t\t\t\tthis._freeSessionsCount--;\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tthis._sessionsCount--;\n\n\t\t\t\t\t\t\t// This cannot be moved to the stream logic,\n\t\t\t\t\t\t\t// because there may be a session that hadn't made a single request.\n\t\t\t\t\t\t\tconst where = this.sessions[normalizedOptions];\n\t\t\t\t\t\t\twhere.splice(where.indexOf(session), 1);\n\n\t\t\t\t\t\t\tif (where.length === 0) {\n\t\t\t\t\t\t\t\tdelete this.sessions[normalizedOptions];\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t// Broken connection\n\t\t\t\t\t\t\tconst error = new Error('Session closed without receiving a SETTINGS frame');\n\t\t\t\t\t\t\terror.code = 'HTTP2WRAPPER_NOSETTINGS';\n\n\t\t\t\t\t\t\tfor (const {reject} of listeners) {\n\t\t\t\t\t\t\t\treject(error);\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tremoveFromQueue();\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// There may be another session awaiting.\n\t\t\t\t\t\tthis._tryToCreateNewSession(normalizedOptions, normalizedOrigin);\n\t\t\t\t\t});\n\n\t\t\t\t\t// Iterates over the queue and processes listeners.\n\t\t\t\t\tconst processListeners = () => {\n\t\t\t\t\t\tif (!(normalizedOptions in this.queue) || !isFree()) {\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tfor (const origin of session[kOriginSet]) {\n\t\t\t\t\t\t\tif (origin in this.queue[normalizedOptions]) {\n\t\t\t\t\t\t\t\tconst {listeners} = this.queue[normalizedOptions][origin];\n\n\t\t\t\t\t\t\t\t// Prevents session overloading.\n\t\t\t\t\t\t\t\twhile (listeners.length !== 0 && isFree()) {\n\t\t\t\t\t\t\t\t\t// We assume `resolve(...)` calls `request(...)` *directly*,\n\t\t\t\t\t\t\t\t\t// otherwise the session will get overloaded.\n\t\t\t\t\t\t\t\t\tlisteners.shift().resolve(session);\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\tconst where = this.queue[normalizedOptions];\n\t\t\t\t\t\t\t\tif (where[origin].listeners.length === 0) {\n\t\t\t\t\t\t\t\t\tdelete where[origin];\n\n\t\t\t\t\t\t\t\t\tif (Object.keys(where).length === 0) {\n\t\t\t\t\t\t\t\t\t\tdelete this.queue[normalizedOptions];\n\t\t\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t// We're no longer free, no point in continuing.\n\t\t\t\t\t\t\t\tif (!isFree()) {\n\t\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t};\n\n\t\t\t\t\t// The Origin Set cannot shrink. No need to check if it suddenly became covered by another one.\n\t\t\t\t\tsession.on('origin', () => {\n\t\t\t\t\t\tsession[kOriginSet] = session.originSet;\n\n\t\t\t\t\t\tif (!isFree()) {\n\t\t\t\t\t\t\t// The session is full.\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tprocessListeners();\n\n\t\t\t\t\t\t// Close covered sessions (if possible).\n\t\t\t\t\t\tcloseCoveredSessions(this.sessions[normalizedOptions], session);\n\t\t\t\t\t});\n\n\t\t\t\t\tsession.once('remoteSettings', () => {\n\t\t\t\t\t\t// Fix Node.js bug preventing the process from exiting\n\t\t\t\t\t\tsession.ref();\n\t\t\t\t\t\tsession.unref();\n\n\t\t\t\t\t\tthis._sessionsCount++;\n\n\t\t\t\t\t\t// The Agent could have been destroyed already.\n\t\t\t\t\t\tif (entry.destroyed) {\n\t\t\t\t\t\t\tconst error = new Error('Agent has been destroyed');\n\n\t\t\t\t\t\t\tfor (const listener of listeners) {\n\t\t\t\t\t\t\t\tlistener.reject(error);\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tsession.destroy();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tsession[kOriginSet] = session.originSet;\n\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tconst where = this.sessions;\n\n\t\t\t\t\t\t\tif (normalizedOptions in where) {\n\t\t\t\t\t\t\t\tconst sessions = where[normalizedOptions];\n\t\t\t\t\t\t\t\tsessions.splice(getSortedIndex(sessions, session, compareSessions), 0, session);\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\twhere[normalizedOptions] = [session];\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tthis._freeSessionsCount += 1;\n\t\t\t\t\t\treceivedSettings = true;\n\n\t\t\t\t\t\tthis.emit('session', session);\n\n\t\t\t\t\t\tprocessListeners();\n\t\t\t\t\t\tremoveFromQueue();\n\n\t\t\t\t\t\t// TODO: Close last recently used (or least used?) session\n\t\t\t\t\t\tif (session[kCurrentStreamsCount] === 0 && this._freeSessionsCount > this.maxFreeSessions) {\n\t\t\t\t\t\t\tsession.close();\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// Check if we haven't managed to execute all listeners.\n\t\t\t\t\t\tif (listeners.length !== 0) {\n\t\t\t\t\t\t\t// Request for a new session with predefined listeners.\n\t\t\t\t\t\t\tthis.getSession(normalizedOrigin, options, listeners);\n\t\t\t\t\t\t\tlisteners.length = 0;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// `session.remoteSettings.maxConcurrentStreams` might get increased\n\t\t\t\t\t\tsession.on('remoteSettings', () => {\n\t\t\t\t\t\t\tprocessListeners();\n\n\t\t\t\t\t\t\t// In case the Origin Set changes\n\t\t\t\t\t\t\tcloseCoveredSessions(this.sessions[normalizedOptions], session);\n\t\t\t\t\t\t});\n\t\t\t\t\t});\n\n\t\t\t\t\t// Shim `session.request()` in order to catch all streams\n\t\t\t\t\tsession[kRequest] = session.request;\n\t\t\t\t\tsession.request = (headers, streamOptions) => {\n\t\t\t\t\t\tif (session[kGracefullyClosing]) {\n\t\t\t\t\t\t\tthrow new Error('The session is gracefully closing. No new streams are allowed.');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tconst stream = session[kRequest](headers, streamOptions);\n\n\t\t\t\t\t\t// The process won't exit until the session is closed or all requests are gone.\n\t\t\t\t\t\tsession.ref();\n\n\t\t\t\t\t\t++session[kCurrentStreamsCount];\n\n\t\t\t\t\t\tif (session[kCurrentStreamsCount] === session.remoteSettings.maxConcurrentStreams) {\n\t\t\t\t\t\t\tthis._freeSessionsCount--;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tstream.once('close', () => {\n\t\t\t\t\t\t\twasFree = isFree();\n\n\t\t\t\t\t\t\t--session[kCurrentStreamsCount];\n\n\t\t\t\t\t\t\tif (!session.destroyed && !session.closed) {\n\t\t\t\t\t\t\t\tcloseSessionIfCovered(this.sessions[normalizedOptions], session);\n\n\t\t\t\t\t\t\t\tif (isFree() && !session.closed) {\n\t\t\t\t\t\t\t\t\tif (!wasFree) {\n\t\t\t\t\t\t\t\t\t\tthis._freeSessionsCount++;\n\n\t\t\t\t\t\t\t\t\t\twasFree = true;\n\t\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t\tconst isEmpty = session[kCurrentStreamsCount] === 0;\n\n\t\t\t\t\t\t\t\t\tif (isEmpty) {\n\t\t\t\t\t\t\t\t\t\tsession.unref();\n\t\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t\tif (\n\t\t\t\t\t\t\t\t\t\tisEmpty &&\n\t\t\t\t\t\t\t\t\t\t(\n\t\t\t\t\t\t\t\t\t\t\tthis._freeSessionsCount > this.maxFreeSessions ||\n\t\t\t\t\t\t\t\t\t\t\tsession[kGracefullyClosing]\n\t\t\t\t\t\t\t\t\t\t)\n\t\t\t\t\t\t\t\t\t) {\n\t\t\t\t\t\t\t\t\t\tsession.close();\n\t\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\t\tcloseCoveredSessions(this.sessions[normalizedOptions], session);\n\t\t\t\t\t\t\t\t\t\tprocessListeners();\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t});\n\n\t\t\t\t\t\treturn stream;\n\t\t\t\t\t};\n\t\t\t\t} catch (error) {\n\t\t\t\t\tfor (const listener of listeners) {\n\t\t\t\t\t\tlistener.reject(error);\n\t\t\t\t\t}\n\n\t\t\t\t\tremoveFromQueue();\n\t\t\t\t}\n\t\t\t};\n\n\t\t\tentry.listeners = listeners;\n\t\t\tentry.completed = false;\n\t\t\tentry.destroyed = false;\n\n\t\t\tthis.queue[normalizedOptions][normalizedOrigin] = entry;\n\t\t\tthis._tryToCreateNewSession(normalizedOptions, normalizedOrigin);\n\t\t});\n\t}\n\n\trequest(origin, options, headers, streamOptions) {\n\t\treturn new Promise((resolve, reject) => {\n\t\t\tthis.getSession(origin, options, [{\n\t\t\t\treject,\n\t\t\t\tresolve: session => {\n\t\t\t\t\ttry {\n\t\t\t\t\t\tresolve(session.request(headers, streamOptions));\n\t\t\t\t\t} catch (error) {\n\t\t\t\t\t\treject(error);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}]);\n\t\t});\n\t}\n\n\tcreateConnection(origin, options) {\n\t\treturn Agent.connect(origin, options);\n\t}\n\n\tstatic connect(origin, options) {\n\t\toptions.ALPNProtocols = ['h2'];\n\n\t\tconst port = origin.port || 443;\n\t\tconst host = origin.hostname || origin.host;\n\n\t\tif (typeof options.servername === 'undefined') {\n\t\t\toptions.servername = host;\n\t\t}\n\n\t\treturn tls.connect(port, host, options);\n\t}\n\n\tcloseFreeSessions() {\n\t\tfor (const sessions of Object.values(this.sessions)) {\n\t\t\tfor (const session of sessions) {\n\t\t\t\tif (session[kCurrentStreamsCount] === 0) {\n\t\t\t\t\tsession.close();\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tdestroy(reason) {\n\t\tfor (const sessions of Object.values(this.sessions)) {\n\t\t\tfor (const session of sessions) {\n\t\t\t\tsession.destroy(reason);\n\t\t\t}\n\t\t}\n\n\t\tfor (const entriesOfAuthority of Object.values(this.queue)) {\n\t\t\tfor (const entry of Object.values(entriesOfAuthority)) {\n\t\t\t\tentry.destroyed = true;\n\t\t\t}\n\t\t}\n\n\t\t// New requests should NOT attach to destroyed sessions\n\t\tthis.queue = {};\n\t}\n\n\tget freeSessions() {\n\t\treturn getSessions({agent: this, isFree: true});\n\t}\n\n\tget busySessions() {\n\t\treturn getSessions({agent: this, isFree: false});\n\t}\n}\n\nAgent.kCurrentStreamsCount = kCurrentStreamsCount;\nAgent.kGracefullyClosing = kGracefullyClosing;\n\nmodule.exports = {\n\tAgent,\n\tglobalAgent: new Agent()\n};\n","'use strict';\nconst http = require('http');\nconst https = require('https');\nconst resolveALPN = require('resolve-alpn');\nconst QuickLRU = require('quick-lru');\nconst Http2ClientRequest = require('./client-request');\nconst calculateServerName = require('./utils/calculate-server-name');\nconst urlToOptions = require('./utils/url-to-options');\n\nconst cache = new QuickLRU({maxSize: 100});\nconst queue = new Map();\n\nconst installSocket = (agent, socket, options) => {\n\tsocket._httpMessage = {shouldKeepAlive: true};\n\n\tconst onFree = () => {\n\t\tagent.emit('free', socket, options);\n\t};\n\n\tsocket.on('free', onFree);\n\n\tconst onClose = () => {\n\t\tagent.removeSocket(socket, options);\n\t};\n\n\tsocket.on('close', onClose);\n\n\tconst onRemove = () => {\n\t\tagent.removeSocket(socket, options);\n\t\tsocket.off('close', onClose);\n\t\tsocket.off('free', onFree);\n\t\tsocket.off('agentRemove', onRemove);\n\t};\n\n\tsocket.on('agentRemove', onRemove);\n\n\tagent.emit('free', socket, options);\n};\n\nconst resolveProtocol = async options => {\n\tconst name = `${options.host}:${options.port}:${options.ALPNProtocols.sort()}`;\n\n\tif (!cache.has(name)) {\n\t\tif (queue.has(name)) {\n\t\t\tconst result = await queue.get(name);\n\t\t\treturn result.alpnProtocol;\n\t\t}\n\n\t\tconst {path, agent} = options;\n\t\toptions.path = options.socketPath;\n\n\t\tconst resultPromise = resolveALPN(options);\n\t\tqueue.set(name, resultPromise);\n\n\t\ttry {\n\t\t\tconst {socket, alpnProtocol} = await resultPromise;\n\t\t\tcache.set(name, alpnProtocol);\n\n\t\t\toptions.path = path;\n\n\t\t\tif (alpnProtocol === 'h2') {\n\t\t\t\t// https://github.com/nodejs/node/issues/33343\n\t\t\t\tsocket.destroy();\n\t\t\t} else {\n\t\t\t\tconst {globalAgent} = https;\n\t\t\t\tconst defaultCreateConnection = https.Agent.prototype.createConnection;\n\n\t\t\t\tif (agent) {\n\t\t\t\t\tif (agent.createConnection === defaultCreateConnection) {\n\t\t\t\t\t\tinstallSocket(agent, socket, options);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tsocket.destroy();\n\t\t\t\t\t}\n\t\t\t\t} else if (globalAgent.createConnection === defaultCreateConnection) {\n\t\t\t\t\tinstallSocket(globalAgent, socket, options);\n\t\t\t\t} else {\n\t\t\t\t\tsocket.destroy();\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tqueue.delete(name);\n\n\t\t\treturn alpnProtocol;\n\t\t} catch (error) {\n\t\t\tqueue.delete(name);\n\n\t\t\tthrow error;\n\t\t}\n\t}\n\n\treturn cache.get(name);\n};\n\nmodule.exports = async (input, options, callback) => {\n\tif (typeof input === 'string' || input instanceof URL) {\n\t\tinput = urlToOptions(new URL(input));\n\t}\n\n\tif (typeof options === 'function') {\n\t\tcallback = options;\n\t\toptions = undefined;\n\t}\n\n\toptions = {\n\t\tALPNProtocols: ['h2', 'http/1.1'],\n\t\t...input,\n\t\t...options,\n\t\tresolveSocket: true\n\t};\n\n\tif (!Array.isArray(options.ALPNProtocols) || options.ALPNProtocols.length === 0) {\n\t\tthrow new Error('The `ALPNProtocols` option must be an Array with at least one entry');\n\t}\n\n\toptions.protocol = options.protocol || 'https:';\n\tconst isHttps = options.protocol === 'https:';\n\n\toptions.host = options.hostname || options.host || 'localhost';\n\toptions.session = options.tlsSession;\n\toptions.servername = options.servername || calculateServerName(options);\n\toptions.port = options.port || (isHttps ? 443 : 80);\n\toptions._defaultAgent = isHttps ? https.globalAgent : http.globalAgent;\n\n\tconst agents = options.agent;\n\n\tif (agents) {\n\t\tif (agents.addRequest) {\n\t\t\tthrow new Error('The `options.agent` object can contain only `http`, `https` or `http2` properties');\n\t\t}\n\n\t\toptions.agent = agents[isHttps ? 'https' : 'http'];\n\t}\n\n\tif (isHttps) {\n\t\tconst protocol = await resolveProtocol(options);\n\n\t\tif (protocol === 'h2') {\n\t\t\tif (agents) {\n\t\t\t\toptions.agent = agents.http2;\n\t\t\t}\n\n\t\t\treturn new Http2ClientRequest(options, callback);\n\t\t}\n\t}\n\n\treturn http.request(options, callback);\n};\n\nmodule.exports.protocolCache = cache;\n","'use strict';\nconst http2 = require('http2');\nconst {Writable} = require('stream');\nconst {Agent, globalAgent} = require('./agent');\nconst IncomingMessage = require('./incoming-message');\nconst urlToOptions = require('./utils/url-to-options');\nconst proxyEvents = require('./utils/proxy-events');\nconst isRequestPseudoHeader = require('./utils/is-request-pseudo-header');\nconst {\n\tERR_INVALID_ARG_TYPE,\n\tERR_INVALID_PROTOCOL,\n\tERR_HTTP_HEADERS_SENT,\n\tERR_INVALID_HTTP_TOKEN,\n\tERR_HTTP_INVALID_HEADER_VALUE,\n\tERR_INVALID_CHAR\n} = require('./utils/errors');\n\nconst {\n\tHTTP2_HEADER_STATUS,\n\tHTTP2_HEADER_METHOD,\n\tHTTP2_HEADER_PATH,\n\tHTTP2_METHOD_CONNECT\n} = http2.constants;\n\nconst kHeaders = Symbol('headers');\nconst kOrigin = Symbol('origin');\nconst kSession = Symbol('session');\nconst kOptions = Symbol('options');\nconst kFlushedHeaders = Symbol('flushedHeaders');\nconst kJobs = Symbol('jobs');\n\nconst isValidHttpToken = /^[\\^`\\-\\w!#$%&*+.|~]+$/;\nconst isInvalidHeaderValue = /[^\\t\\u0020-\\u007E\\u0080-\\u00FF]/;\n\nclass ClientRequest extends Writable {\n\tconstructor(input, options, callback) {\n\t\tsuper({\n\t\t\tautoDestroy: false\n\t\t});\n\n\t\tconst hasInput = typeof input === 'string' || input instanceof URL;\n\t\tif (hasInput) {\n\t\t\tinput = urlToOptions(input instanceof URL ? input : new URL(input));\n\t\t}\n\n\t\tif (typeof options === 'function' || options === undefined) {\n\t\t\t// (options, callback)\n\t\t\tcallback = options;\n\t\t\toptions = hasInput ? input : {...input};\n\t\t} else {\n\t\t\t// (input, options, callback)\n\t\t\toptions = {...input, ...options};\n\t\t}\n\n\t\tif (options.h2session) {\n\t\t\tthis[kSession] = options.h2session;\n\t\t} else if (options.agent === false) {\n\t\t\tthis.agent = new Agent({maxFreeSessions: 0});\n\t\t} else if (typeof options.agent === 'undefined' || options.agent === null) {\n\t\t\tif (typeof options.createConnection === 'function') {\n\t\t\t\t// This is a workaround - we don't have to create the session on our own.\n\t\t\t\tthis.agent = new Agent({maxFreeSessions: 0});\n\t\t\t\tthis.agent.createConnection = options.createConnection;\n\t\t\t} else {\n\t\t\t\tthis.agent = globalAgent;\n\t\t\t}\n\t\t} else if (typeof options.agent.request === 'function') {\n\t\t\tthis.agent = options.agent;\n\t\t} else {\n\t\t\tthrow new ERR_INVALID_ARG_TYPE('options.agent', ['Agent-like Object', 'undefined', 'false'], options.agent);\n\t\t}\n\n\t\tif (options.protocol && options.protocol !== 'https:') {\n\t\t\tthrow new ERR_INVALID_PROTOCOL(options.protocol, 'https:');\n\t\t}\n\n\t\tconst port = options.port || options.defaultPort || (this.agent && this.agent.defaultPort) || 443;\n\t\tconst host = options.hostname || options.host || 'localhost';\n\n\t\t// Don't enforce the origin via options. It may be changed in an Agent.\n\t\tdelete options.hostname;\n\t\tdelete options.host;\n\t\tdelete options.port;\n\n\t\tconst {timeout} = options;\n\t\toptions.timeout = undefined;\n\n\t\tthis[kHeaders] = Object.create(null);\n\t\tthis[kJobs] = [];\n\n\t\tthis.socket = null;\n\t\tthis.connection = null;\n\n\t\tthis.method = options.method || 'GET';\n\t\tthis.path = options.path;\n\n\t\tthis.res = null;\n\t\tthis.aborted = false;\n\t\tthis.reusedSocket = false;\n\n\t\tif (options.headers) {\n\t\t\tfor (const [header, value] of Object.entries(options.headers)) {\n\t\t\t\tthis.setHeader(header, value);\n\t\t\t}\n\t\t}\n\n\t\tif (options.auth && !('authorization' in this[kHeaders])) {\n\t\t\tthis[kHeaders].authorization = 'Basic ' + Buffer.from(options.auth).toString('base64');\n\t\t}\n\n\t\toptions.session = options.tlsSession;\n\t\toptions.path = options.socketPath;\n\n\t\tthis[kOptions] = options;\n\n\t\t// Clients that generate HTTP/2 requests directly SHOULD use the :authority pseudo-header field instead of the Host header field.\n\t\tif (port === 443) {\n\t\t\tthis[kOrigin] = `https://${host}`;\n\n\t\t\tif (!(':authority' in this[kHeaders])) {\n\t\t\t\tthis[kHeaders][':authority'] = host;\n\t\t\t}\n\t\t} else {\n\t\t\tthis[kOrigin] = `https://${host}:${port}`;\n\n\t\t\tif (!(':authority' in this[kHeaders])) {\n\t\t\t\tthis[kHeaders][':authority'] = `${host}:${port}`;\n\t\t\t}\n\t\t}\n\n\t\tif (timeout) {\n\t\t\tthis.setTimeout(timeout);\n\t\t}\n\n\t\tif (callback) {\n\t\t\tthis.once('response', callback);\n\t\t}\n\n\t\tthis[kFlushedHeaders] = false;\n\t}\n\n\tget method() {\n\t\treturn this[kHeaders][HTTP2_HEADER_METHOD];\n\t}\n\n\tset method(value) {\n\t\tif (value) {\n\t\t\tthis[kHeaders][HTTP2_HEADER_METHOD] = value.toUpperCase();\n\t\t}\n\t}\n\n\tget path() {\n\t\treturn this[kHeaders][HTTP2_HEADER_PATH];\n\t}\n\n\tset path(value) {\n\t\tif (value) {\n\t\t\tthis[kHeaders][HTTP2_HEADER_PATH] = value;\n\t\t}\n\t}\n\n\tget _mustNotHaveABody() {\n\t\treturn this.method === 'GET' || this.method === 'HEAD' || this.method === 'DELETE';\n\t}\n\n\t_write(chunk, encoding, callback) {\n\t\t// https://github.com/nodejs/node/blob/654df09ae0c5e17d1b52a900a545f0664d8c7627/lib/internal/http2/util.js#L148-L156\n\t\tif (this._mustNotHaveABody) {\n\t\t\tcallback(new Error('The GET, HEAD and DELETE methods must NOT have a body'));\n\t\t\t/* istanbul ignore next: Node.js 12 throws directly */\n\t\t\treturn;\n\t\t}\n\n\t\tthis.flushHeaders();\n\n\t\tconst callWrite = () => this._request.write(chunk, encoding, callback);\n\t\tif (this._request) {\n\t\t\tcallWrite();\n\t\t} else {\n\t\t\tthis[kJobs].push(callWrite);\n\t\t}\n\t}\n\n\t_final(callback) {\n\t\tif (this.destroyed) {\n\t\t\treturn;\n\t\t}\n\n\t\tthis.flushHeaders();\n\n\t\tconst callEnd = () => {\n\t\t\t// For GET, HEAD and DELETE\n\t\t\tif (this._mustNotHaveABody) {\n\t\t\t\tcallback();\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tthis._request.end(callback);\n\t\t};\n\n\t\tif (this._request) {\n\t\t\tcallEnd();\n\t\t} else {\n\t\t\tthis[kJobs].push(callEnd);\n\t\t}\n\t}\n\n\tabort() {\n\t\tif (this.res && this.res.complete) {\n\t\t\treturn;\n\t\t}\n\n\t\tif (!this.aborted) {\n\t\t\tprocess.nextTick(() => this.emit('abort'));\n\t\t}\n\n\t\tthis.aborted = true;\n\n\t\tthis.destroy();\n\t}\n\n\t_destroy(error, callback) {\n\t\tif (this.res) {\n\t\t\tthis.res._dump();\n\t\t}\n\n\t\tif (this._request) {\n\t\t\tthis._request.destroy();\n\t\t}\n\n\t\tcallback(error);\n\t}\n\n\tasync flushHeaders() {\n\t\tif (this[kFlushedHeaders] || this.destroyed) {\n\t\t\treturn;\n\t\t}\n\n\t\tthis[kFlushedHeaders] = true;\n\n\t\tconst isConnectMethod = this.method === HTTP2_METHOD_CONNECT;\n\n\t\t// The real magic is here\n\t\tconst onStream = stream => {\n\t\t\tthis._request = stream;\n\n\t\t\tif (this.destroyed) {\n\t\t\t\tstream.destroy();\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// Forwards `timeout`, `continue`, `close` and `error` events to this instance.\n\t\t\tif (!isConnectMethod) {\n\t\t\t\tproxyEvents(stream, this, ['timeout', 'continue', 'close', 'error']);\n\t\t\t}\n\n\t\t\t// Wait for the `finish` event. We don't want to emit the `response` event\n\t\t\t// before `request.end()` is called.\n\t\t\tconst waitForEnd = fn => {\n\t\t\t\treturn (...args) => {\n\t\t\t\t\tif (!this.writable && !this.destroyed) {\n\t\t\t\t\t\tfn(...args);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tthis.once('finish', () => {\n\t\t\t\t\t\t\tfn(...args);\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t};\n\t\t\t};\n\n\t\t\t// This event tells we are ready to listen for the data.\n\t\t\tstream.once('response', waitForEnd((headers, flags, rawHeaders) => {\n\t\t\t\t// If we were to emit raw request stream, it would be as fast as the native approach.\n\t\t\t\t// Note that wrapping the raw stream in a Proxy instance won't improve the performance (already tested it).\n\t\t\t\tconst response = new IncomingMessage(this.socket, stream.readableHighWaterMark);\n\t\t\t\tthis.res = response;\n\n\t\t\t\tresponse.req = this;\n\t\t\t\tresponse.statusCode = headers[HTTP2_HEADER_STATUS];\n\t\t\t\tresponse.headers = headers;\n\t\t\t\tresponse.rawHeaders = rawHeaders;\n\n\t\t\t\tresponse.once('end', () => {\n\t\t\t\t\tif (this.aborted) {\n\t\t\t\t\t\tresponse.aborted = true;\n\t\t\t\t\t\tresponse.emit('aborted');\n\t\t\t\t\t} else {\n\t\t\t\t\t\tresponse.complete = true;\n\n\t\t\t\t\t\t// Has no effect, just be consistent with the Node.js behavior\n\t\t\t\t\t\tresponse.socket = null;\n\t\t\t\t\t\tresponse.connection = null;\n\t\t\t\t\t}\n\t\t\t\t});\n\n\t\t\t\tif (isConnectMethod) {\n\t\t\t\t\tresponse.upgrade = true;\n\n\t\t\t\t\t// The HTTP1 API says the socket is detached here,\n\t\t\t\t\t// but we can't do that so we pass the original HTTP2 request.\n\t\t\t\t\tif (this.emit('connect', response, stream, Buffer.alloc(0))) {\n\t\t\t\t\t\tthis.emit('close');\n\t\t\t\t\t} else {\n\t\t\t\t\t\t// No listeners attached, destroy the original request.\n\t\t\t\t\t\tstream.destroy();\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\t// Forwards data\n\t\t\t\t\tstream.on('data', chunk => {\n\t\t\t\t\t\tif (!response._dumped && !response.push(chunk)) {\n\t\t\t\t\t\t\tstream.pause();\n\t\t\t\t\t\t}\n\t\t\t\t\t});\n\n\t\t\t\t\tstream.once('end', () => {\n\t\t\t\t\t\tresponse.push(null);\n\t\t\t\t\t});\n\n\t\t\t\t\tif (!this.emit('response', response)) {\n\t\t\t\t\t\t// No listeners attached, dump the response.\n\t\t\t\t\t\tresponse._dump();\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}));\n\n\t\t\t// Emits `information` event\n\t\t\tstream.once('headers', waitForEnd(\n\t\t\t\theaders => this.emit('information', {statusCode: headers[HTTP2_HEADER_STATUS]})\n\t\t\t));\n\n\t\t\tstream.once('trailers', waitForEnd((trailers, flags, rawTrailers) => {\n\t\t\t\tconst {res} = this;\n\n\t\t\t\t// Assigns trailers to the response object.\n\t\t\t\tres.trailers = trailers;\n\t\t\t\tres.rawTrailers = rawTrailers;\n\t\t\t}));\n\n\t\t\tconst {socket} = stream.session;\n\t\t\tthis.socket = socket;\n\t\t\tthis.connection = socket;\n\n\t\t\tfor (const job of this[kJobs]) {\n\t\t\t\tjob();\n\t\t\t}\n\n\t\t\tthis.emit('socket', this.socket);\n\t\t};\n\n\t\t// Makes a HTTP2 request\n\t\tif (this[kSession]) {\n\t\t\ttry {\n\t\t\t\tonStream(this[kSession].request(this[kHeaders]));\n\t\t\t} catch (error) {\n\t\t\t\tthis.emit('error', error);\n\t\t\t}\n\t\t} else {\n\t\t\tthis.reusedSocket = true;\n\n\t\t\ttry {\n\t\t\t\tonStream(await this.agent.request(this[kOrigin], this[kOptions], this[kHeaders]));\n\t\t\t} catch (error) {\n\t\t\t\tthis.emit('error', error);\n\t\t\t}\n\t\t}\n\t}\n\n\tgetHeader(name) {\n\t\tif (typeof name !== 'string') {\n\t\t\tthrow new ERR_INVALID_ARG_TYPE('name', 'string', name);\n\t\t}\n\n\t\treturn this[kHeaders][name.toLowerCase()];\n\t}\n\n\tget headersSent() {\n\t\treturn this[kFlushedHeaders];\n\t}\n\n\tremoveHeader(name) {\n\t\tif (typeof name !== 'string') {\n\t\t\tthrow new ERR_INVALID_ARG_TYPE('name', 'string', name);\n\t\t}\n\n\t\tif (this.headersSent) {\n\t\t\tthrow new ERR_HTTP_HEADERS_SENT('remove');\n\t\t}\n\n\t\tdelete this[kHeaders][name.toLowerCase()];\n\t}\n\n\tsetHeader(name, value) {\n\t\tif (this.headersSent) {\n\t\t\tthrow new ERR_HTTP_HEADERS_SENT('set');\n\t\t}\n\n\t\tif (typeof name !== 'string' || (!isValidHttpToken.test(name) && !isRequestPseudoHeader(name))) {\n\t\t\tthrow new ERR_INVALID_HTTP_TOKEN('Header name', name);\n\t\t}\n\n\t\tif (typeof value === 'undefined') {\n\t\t\tthrow new ERR_HTTP_INVALID_HEADER_VALUE(value, name);\n\t\t}\n\n\t\tif (isInvalidHeaderValue.test(value)) {\n\t\t\tthrow new ERR_INVALID_CHAR('header content', name);\n\t\t}\n\n\t\tthis[kHeaders][name.toLowerCase()] = value;\n\t}\n\n\tsetNoDelay() {\n\t\t// HTTP2 sockets cannot be malformed, do nothing.\n\t}\n\n\tsetSocketKeepAlive() {\n\t\t// HTTP2 sockets cannot be malformed, do nothing.\n\t}\n\n\tsetTimeout(ms, callback) {\n\t\tconst applyTimeout = () => this._request.setTimeout(ms, callback);\n\n\t\tif (this._request) {\n\t\t\tapplyTimeout();\n\t\t} else {\n\t\t\tthis[kJobs].push(applyTimeout);\n\t\t}\n\n\t\treturn this;\n\t}\n\n\tget maxHeadersCount() {\n\t\tif (!this.destroyed && this._request) {\n\t\t\treturn this._request.session.localSettings.maxHeaderListSize;\n\t\t}\n\n\t\treturn undefined;\n\t}\n\n\tset maxHeadersCount(_value) {\n\t\t// Updating HTTP2 settings would affect all requests, do nothing.\n\t}\n}\n\nmodule.exports = ClientRequest;\n","'use strict';\nconst {Readable} = require('stream');\n\nclass IncomingMessage extends Readable {\n\tconstructor(socket, highWaterMark) {\n\t\tsuper({\n\t\t\thighWaterMark,\n\t\t\tautoDestroy: false\n\t\t});\n\n\t\tthis.statusCode = null;\n\t\tthis.statusMessage = '';\n\t\tthis.httpVersion = '2.0';\n\t\tthis.httpVersionMajor = 2;\n\t\tthis.httpVersionMinor = 0;\n\t\tthis.headers = {};\n\t\tthis.trailers = {};\n\t\tthis.req = null;\n\n\t\tthis.aborted = false;\n\t\tthis.complete = false;\n\t\tthis.upgrade = null;\n\n\t\tthis.rawHeaders = [];\n\t\tthis.rawTrailers = [];\n\n\t\tthis.socket = socket;\n\t\tthis.connection = socket;\n\n\t\tthis._dumped = false;\n\t}\n\n\t_destroy(error) {\n\t\tthis.req._request.destroy(error);\n\t}\n\n\tsetTimeout(ms, callback) {\n\t\tthis.req.setTimeout(ms, callback);\n\t\treturn this;\n\t}\n\n\t_dump() {\n\t\tif (!this._dumped) {\n\t\t\tthis._dumped = true;\n\n\t\t\tthis.removeAllListeners('data');\n\t\t\tthis.resume();\n\t\t}\n\t}\n\n\t_read() {\n\t\tif (this.req) {\n\t\t\tthis.req._request.resume();\n\t\t}\n\t}\n}\n\nmodule.exports = IncomingMessage;\n","'use strict';\nconst http2 = require('http2');\nconst agent = require('./agent');\nconst ClientRequest = require('./client-request');\nconst IncomingMessage = require('./incoming-message');\nconst auto = require('./auto');\n\nconst request = (url, options, callback) => {\n\treturn new ClientRequest(url, options, callback);\n};\n\nconst get = (url, options, callback) => {\n\t// eslint-disable-next-line unicorn/prevent-abbreviations\n\tconst req = new ClientRequest(url, options, callback);\n\treq.end();\n\n\treturn req;\n};\n\nmodule.exports = {\n\t...http2,\n\tClientRequest,\n\tIncomingMessage,\n\t...agent,\n\trequest,\n\tget,\n\tauto\n};\n","'use strict';\nconst net = require('net');\n/* istanbul ignore file: https://github.com/nodejs/node/blob/v13.0.1/lib/_http_agent.js */\n\nmodule.exports = options => {\n\tlet servername = options.host;\n\tconst hostHeader = options.headers && options.headers.host;\n\n\tif (hostHeader) {\n\t\tif (hostHeader.startsWith('[')) {\n\t\t\tconst index = hostHeader.indexOf(']');\n\t\t\tif (index === -1) {\n\t\t\t\tservername = hostHeader;\n\t\t\t} else {\n\t\t\t\tservername = hostHeader.slice(1, -1);\n\t\t\t}\n\t\t} else {\n\t\t\tservername = hostHeader.split(':', 1)[0];\n\t\t}\n\t}\n\n\tif (net.isIP(servername)) {\n\t\treturn '';\n\t}\n\n\treturn servername;\n};\n","'use strict';\n/* istanbul ignore file: https://github.com/nodejs/node/blob/master/lib/internal/errors.js */\n\nconst makeError = (Base, key, getMessage) => {\n\tmodule.exports[key] = class NodeError extends Base {\n\t\tconstructor(...args) {\n\t\t\tsuper(typeof getMessage === 'string' ? getMessage : getMessage(args));\n\t\t\tthis.name = `${super.name} [${key}]`;\n\t\t\tthis.code = key;\n\t\t}\n\t};\n};\n\nmakeError(TypeError, 'ERR_INVALID_ARG_TYPE', args => {\n\tconst type = args[0].includes('.') ? 'property' : 'argument';\n\n\tlet valid = args[1];\n\tconst isManyTypes = Array.isArray(valid);\n\n\tif (isManyTypes) {\n\t\tvalid = `${valid.slice(0, -1).join(', ')} or ${valid.slice(-1)}`;\n\t}\n\n\treturn `The \"${args[0]}\" ${type} must be ${isManyTypes ? 'one of' : 'of'} type ${valid}. Received ${typeof args[2]}`;\n});\n\nmakeError(TypeError, 'ERR_INVALID_PROTOCOL', args => {\n\treturn `Protocol \"${args[0]}\" not supported. Expected \"${args[1]}\"`;\n});\n\nmakeError(Error, 'ERR_HTTP_HEADERS_SENT', args => {\n\treturn `Cannot ${args[0]} headers after they are sent to the client`;\n});\n\nmakeError(TypeError, 'ERR_INVALID_HTTP_TOKEN', args => {\n\treturn `${args[0]} must be a valid HTTP token [${args[1]}]`;\n});\n\nmakeError(TypeError, 'ERR_HTTP_INVALID_HEADER_VALUE', args => {\n\treturn `Invalid value \"${args[0]} for header \"${args[1]}\"`;\n});\n\nmakeError(TypeError, 'ERR_INVALID_CHAR', args => {\n\treturn `Invalid character in ${args[0]} [${args[1]}]`;\n});\n","'use strict';\n\nmodule.exports = header => {\n\tswitch (header) {\n\t\tcase ':method':\n\t\tcase ':scheme':\n\t\tcase ':authority':\n\t\tcase ':path':\n\t\t\treturn true;\n\t\tdefault:\n\t\t\treturn false;\n\t}\n};\n","'use strict';\n\nmodule.exports = (from, to, events) => {\n\tfor (const event of events) {\n\t\tfrom.on(event, (...args) => to.emit(event, ...args));\n\t}\n};\n","'use strict';\n/* istanbul ignore file: https://github.com/nodejs/node/blob/a91293d4d9ab403046ab5eb022332e4e3d249bd3/lib/internal/url.js#L1257 */\n\nmodule.exports = url => {\n\tconst options = {\n\t\tprotocol: url.protocol,\n\t\thostname: typeof url.hostname === 'string' && url.hostname.startsWith('[') ? url.hostname.slice(1, -1) : url.hostname,\n\t\thost: url.host,\n\t\thash: url.hash,\n\t\tsearch: url.search,\n\t\tpathname: url.pathname,\n\t\thref: url.href,\n\t\tpath: `${url.pathname || ''}${url.search || ''}`\n\t};\n\n\tif (typeof url.port === 'string' && url.port.length !== 0) {\n\t\toptions.port = Number(url.port);\n\t}\n\n\tif (url.username || url.password) {\n\t\toptions.auth = `${url.username || ''}:${url.password || ''}`;\n\t}\n\n\treturn options;\n};\n","/*!\n * is-extglob \n *\n * Copyright (c) 2014-2016, Jon Schlinkert.\n * Licensed under the MIT License.\n */\n\nmodule.exports = function isExtglob(str) {\n if (typeof str !== 'string' || str === '') {\n return false;\n }\n\n var match;\n while ((match = /(\\\\).|([@?!+*]\\(.*\\))/g.exec(str))) {\n if (match[2]) return true;\n str = str.slice(match.index + match[0].length);\n }\n\n return false;\n};\n","/*!\n * is-glob \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nvar isExtglob = require('is-extglob');\nvar chars = { '{': '}', '(': ')', '[': ']'};\nvar strictRegex = /\\\\(.)|(^!|\\*|[\\].+)]\\?|\\[[^\\\\\\]]+\\]|\\{[^\\\\}]+\\}|\\(\\?[:!=][^\\\\)]+\\)|\\([^|]+\\|[^\\\\)]+\\))/;\nvar relaxedRegex = /\\\\(.)|(^!|[*?{}()[\\]]|\\(\\?)/;\n\nmodule.exports = function isGlob(str, options) {\n if (typeof str !== 'string' || str === '') {\n return false;\n }\n\n if (isExtglob(str)) {\n return true;\n }\n\n var regex = strictRegex;\n var match;\n\n // optionally relax regex\n if (options && options.strict === false) {\n regex = relaxedRegex;\n }\n\n while ((match = regex.exec(str))) {\n if (match[2]) return true;\n var idx = match.index + match[0].length;\n\n // if an open bracket/brace/paren is escaped,\n // set the index to the next closing character\n var open = match[1];\n var close = open ? chars[open] : null;\n if (open && close) {\n var n = str.indexOf(close, idx);\n if (n !== -1) {\n idx = n + 1;\n }\n }\n\n str = str.slice(idx);\n }\n return false;\n};\n","//TODO: handle reviver/dehydrate function like normal\n//and handle indentation, like normal.\n//if anyone needs this... please send pull request.\n\nexports.stringify = function stringify (o) {\n if('undefined' == typeof o) return o\n\n if(o && Buffer.isBuffer(o))\n return JSON.stringify(':base64:' + o.toString('base64'))\n\n if(o && o.toJSON)\n o = o.toJSON()\n\n if(o && 'object' === typeof o) {\n var s = ''\n var array = Array.isArray(o)\n s = array ? '[' : '{'\n var first = true\n\n for(var k in o) {\n var ignore = 'function' == typeof o[k] || (!array && 'undefined' === typeof o[k])\n if(Object.hasOwnProperty.call(o, k) && !ignore) {\n if(!first)\n s += ','\n first = false\n if (array) {\n if(o[k] == undefined)\n s += 'null'\n else\n s += stringify(o[k])\n } else if (o[k] !== void(0)) {\n s += stringify(k) + ':' + stringify(o[k])\n }\n }\n }\n\n s += array ? ']' : '}'\n\n return s\n } else if ('string' === typeof o) {\n return JSON.stringify(/^:/.test(o) ? ':' + o : o)\n } else if ('undefined' === typeof o) {\n return 'null';\n } else\n return JSON.stringify(o)\n}\n\nexports.parse = function (s) {\n return JSON.parse(s, function (key, value) {\n if('string' === typeof value) {\n if(/^:base64:/.test(value))\n return Buffer.from(value.substring(8), 'base64')\n else\n return /^:/.test(value) ? value.substring(1) : value \n }\n return value\n })\n}\n",null,"'use strict';\nmodule.exports = object => {\n\tconst result = {};\n\n\tfor (const [key, value] of Object.entries(object)) {\n\t\tresult[key.toLowerCase()] = value;\n\t}\n\n\treturn result;\n};\n","'use strict'\n/*\n * merge2\n * https://github.com/teambition/merge2\n *\n * Copyright (c) 2014-2020 Teambition\n * Licensed under the MIT license.\n */\nconst Stream = require('stream')\nconst PassThrough = Stream.PassThrough\nconst slice = Array.prototype.slice\n\nmodule.exports = merge2\n\nfunction merge2 () {\n const streamsQueue = []\n const args = slice.call(arguments)\n let merging = false\n let options = args[args.length - 1]\n\n if (options && !Array.isArray(options) && options.pipe == null) {\n args.pop()\n } else {\n options = {}\n }\n\n const doEnd = options.end !== false\n const doPipeError = options.pipeError === true\n if (options.objectMode == null) {\n options.objectMode = true\n }\n if (options.highWaterMark == null) {\n options.highWaterMark = 64 * 1024\n }\n const mergedStream = PassThrough(options)\n\n function addStream () {\n for (let i = 0, len = arguments.length; i < len; i++) {\n streamsQueue.push(pauseStreams(arguments[i], options))\n }\n mergeStream()\n return this\n }\n\n function mergeStream () {\n if (merging) {\n return\n }\n merging = true\n\n let streams = streamsQueue.shift()\n if (!streams) {\n process.nextTick(endStream)\n return\n }\n if (!Array.isArray(streams)) {\n streams = [streams]\n }\n\n let pipesCount = streams.length + 1\n\n function next () {\n if (--pipesCount > 0) {\n return\n }\n merging = false\n mergeStream()\n }\n\n function pipe (stream) {\n function onend () {\n stream.removeListener('merge2UnpipeEnd', onend)\n stream.removeListener('end', onend)\n if (doPipeError) {\n stream.removeListener('error', onerror)\n }\n next()\n }\n function onerror (err) {\n mergedStream.emit('error', err)\n }\n // skip ended stream\n if (stream._readableState.endEmitted) {\n return next()\n }\n\n stream.on('merge2UnpipeEnd', onend)\n stream.on('end', onend)\n\n if (doPipeError) {\n stream.on('error', onerror)\n }\n\n stream.pipe(mergedStream, { end: false })\n // compatible for old stream\n stream.resume()\n }\n\n for (let i = 0; i < streams.length; i++) {\n pipe(streams[i])\n }\n\n next()\n }\n\n function endStream () {\n merging = false\n // emit 'queueDrain' when all streams merged.\n mergedStream.emit('queueDrain')\n if (doEnd) {\n mergedStream.end()\n }\n }\n\n mergedStream.setMaxListeners(0)\n mergedStream.add = addStream\n mergedStream.on('unpipe', function (stream) {\n stream.emit('merge2UnpipeEnd')\n })\n\n if (args.length) {\n addStream.apply(null, args)\n }\n return mergedStream\n}\n\n// check and pause streams for pipe.\nfunction pauseStreams (streams, options) {\n if (!Array.isArray(streams)) {\n // Backwards-compat with old-style streams\n if (!streams._readableState && streams.pipe) {\n streams = streams.pipe(PassThrough(options))\n }\n if (!streams._readableState || !streams.pause || !streams.pipe) {\n throw new Error('Only readable stream can be merged.')\n }\n streams.pause()\n } else {\n for (let i = 0, len = streams.length; i < len; i++) {\n streams[i] = pauseStreams(streams[i], options)\n }\n }\n return streams\n}\n","'use strict';\n\n// We define these manually to ensure they're always copied\n// even if they would move up the prototype chain\n// https://nodejs.org/api/http.html#http_class_http_incomingmessage\nconst knownProps = [\n\t'destroy',\n\t'setTimeout',\n\t'socket',\n\t'headers',\n\t'trailers',\n\t'rawHeaders',\n\t'statusCode',\n\t'httpVersion',\n\t'httpVersionMinor',\n\t'httpVersionMajor',\n\t'rawTrailers',\n\t'statusMessage'\n];\n\nmodule.exports = (fromStream, toStream) => {\n\tconst fromProps = new Set(Object.keys(fromStream).concat(knownProps));\n\n\tfor (const prop of fromProps) {\n\t\t// Don't overwrite existing properties\n\t\tif (prop in toStream) {\n\t\t\tcontinue;\n\t\t}\n\n\t\ttoStream[prop] = typeof fromStream[prop] === 'function' ? fromStream[prop].bind(fromStream) : fromStream[prop];\n\t}\n};\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parse_url(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parse_url(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parse_url(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\nconst resolve_url = Url.resolve;\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\t\t\tfinalize();\n\t\t});\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tconst locationURL = location === null ? null : resolve_url(request.url, location);\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","'use strict';\n// TODO: Use the `URL` global when targeting Node.js 10\nconst URLParser = typeof URL === 'undefined' ? require('url').URL : URL;\n\n// https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs\nconst DATA_URL_DEFAULT_MIME_TYPE = 'text/plain';\nconst DATA_URL_DEFAULT_CHARSET = 'us-ascii';\n\nconst testParameter = (name, filters) => {\n\treturn filters.some(filter => filter instanceof RegExp ? filter.test(name) : filter === name);\n};\n\nconst normalizeDataURL = (urlString, {stripHash}) => {\n\tconst parts = urlString.match(/^data:(.*?),(.*?)(?:#(.*))?$/);\n\n\tif (!parts) {\n\t\tthrow new Error(`Invalid URL: ${urlString}`);\n\t}\n\n\tconst mediaType = parts[1].split(';');\n\tconst body = parts[2];\n\tconst hash = stripHash ? '' : parts[3];\n\n\tlet base64 = false;\n\n\tif (mediaType[mediaType.length - 1] === 'base64') {\n\t\tmediaType.pop();\n\t\tbase64 = true;\n\t}\n\n\t// Lowercase MIME type\n\tconst mimeType = (mediaType.shift() || '').toLowerCase();\n\tconst attributes = mediaType\n\t\t.map(attribute => {\n\t\t\tlet [key, value = ''] = attribute.split('=').map(string => string.trim());\n\n\t\t\t// Lowercase `charset`\n\t\t\tif (key === 'charset') {\n\t\t\t\tvalue = value.toLowerCase();\n\n\t\t\t\tif (value === DATA_URL_DEFAULT_CHARSET) {\n\t\t\t\t\treturn '';\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn `${key}${value ? `=${value}` : ''}`;\n\t\t})\n\t\t.filter(Boolean);\n\n\tconst normalizedMediaType = [\n\t\t...attributes\n\t];\n\n\tif (base64) {\n\t\tnormalizedMediaType.push('base64');\n\t}\n\n\tif (normalizedMediaType.length !== 0 || (mimeType && mimeType !== DATA_URL_DEFAULT_MIME_TYPE)) {\n\t\tnormalizedMediaType.unshift(mimeType);\n\t}\n\n\treturn `data:${normalizedMediaType.join(';')},${base64 ? body.trim() : body}${hash ? `#${hash}` : ''}`;\n};\n\nconst normalizeUrl = (urlString, options) => {\n\toptions = {\n\t\tdefaultProtocol: 'http:',\n\t\tnormalizeProtocol: true,\n\t\tforceHttp: false,\n\t\tforceHttps: false,\n\t\tstripAuthentication: true,\n\t\tstripHash: false,\n\t\tstripWWW: true,\n\t\tremoveQueryParameters: [/^utm_\\w+/i],\n\t\tremoveTrailingSlash: true,\n\t\tremoveDirectoryIndex: false,\n\t\tsortQueryParameters: true,\n\t\t...options\n\t};\n\n\t// TODO: Remove this at some point in the future\n\tif (Reflect.has(options, 'normalizeHttps')) {\n\t\tthrow new Error('options.normalizeHttps is renamed to options.forceHttp');\n\t}\n\n\tif (Reflect.has(options, 'normalizeHttp')) {\n\t\tthrow new Error('options.normalizeHttp is renamed to options.forceHttps');\n\t}\n\n\tif (Reflect.has(options, 'stripFragment')) {\n\t\tthrow new Error('options.stripFragment is renamed to options.stripHash');\n\t}\n\n\turlString = urlString.trim();\n\n\t// Data URL\n\tif (/^data:/i.test(urlString)) {\n\t\treturn normalizeDataURL(urlString, options);\n\t}\n\n\tconst hasRelativeProtocol = urlString.startsWith('//');\n\tconst isRelativeUrl = !hasRelativeProtocol && /^\\.*\\//.test(urlString);\n\n\t// Prepend protocol\n\tif (!isRelativeUrl) {\n\t\turlString = urlString.replace(/^(?!(?:\\w+:)?\\/\\/)|^\\/\\//, options.defaultProtocol);\n\t}\n\n\tconst urlObj = new URLParser(urlString);\n\n\tif (options.forceHttp && options.forceHttps) {\n\t\tthrow new Error('The `forceHttp` and `forceHttps` options cannot be used together');\n\t}\n\n\tif (options.forceHttp && urlObj.protocol === 'https:') {\n\t\turlObj.protocol = 'http:';\n\t}\n\n\tif (options.forceHttps && urlObj.protocol === 'http:') {\n\t\turlObj.protocol = 'https:';\n\t}\n\n\t// Remove auth\n\tif (options.stripAuthentication) {\n\t\turlObj.username = '';\n\t\turlObj.password = '';\n\t}\n\n\t// Remove hash\n\tif (options.stripHash) {\n\t\turlObj.hash = '';\n\t}\n\n\t// Remove duplicate slashes if not preceded by a protocol\n\tif (urlObj.pathname) {\n\t\t// TODO: Use the following instead when targeting Node.js 10\n\t\t// `urlObj.pathname = urlObj.pathname.replace(/(? {\n\t\t\tif (/^(?!\\/)/g.test(p1)) {\n\t\t\t\treturn `${p1}/`;\n\t\t\t}\n\n\t\t\treturn '/';\n\t\t});\n\t}\n\n\t// Decode URI octets\n\tif (urlObj.pathname) {\n\t\turlObj.pathname = decodeURI(urlObj.pathname);\n\t}\n\n\t// Remove directory index\n\tif (options.removeDirectoryIndex === true) {\n\t\toptions.removeDirectoryIndex = [/^index\\.[a-z]+$/];\n\t}\n\n\tif (Array.isArray(options.removeDirectoryIndex) && options.removeDirectoryIndex.length > 0) {\n\t\tlet pathComponents = urlObj.pathname.split('/');\n\t\tconst lastComponent = pathComponents[pathComponents.length - 1];\n\n\t\tif (testParameter(lastComponent, options.removeDirectoryIndex)) {\n\t\t\tpathComponents = pathComponents.slice(0, pathComponents.length - 1);\n\t\t\turlObj.pathname = pathComponents.slice(1).join('/') + '/';\n\t\t}\n\t}\n\n\tif (urlObj.hostname) {\n\t\t// Remove trailing dot\n\t\turlObj.hostname = urlObj.hostname.replace(/\\.$/, '');\n\n\t\t// Remove `www.`\n\t\tif (options.stripWWW && /^www\\.([a-z\\-\\d]{2,63})\\.([a-z.]{2,5})$/.test(urlObj.hostname)) {\n\t\t\t// Each label should be max 63 at length (min: 2).\n\t\t\t// The extension should be max 5 at length (min: 2).\n\t\t\t// Source: https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names\n\t\t\turlObj.hostname = urlObj.hostname.replace(/^www\\./, '');\n\t\t}\n\t}\n\n\t// Remove query unwanted parameters\n\tif (Array.isArray(options.removeQueryParameters)) {\n\t\tfor (const key of [...urlObj.searchParams.keys()]) {\n\t\t\tif (testParameter(key, options.removeQueryParameters)) {\n\t\t\t\turlObj.searchParams.delete(key);\n\t\t\t}\n\t\t}\n\t}\n\n\t// Sort query parameters\n\tif (options.sortQueryParameters) {\n\t\turlObj.searchParams.sort();\n\t}\n\n\tif (options.removeTrailingSlash) {\n\t\turlObj.pathname = urlObj.pathname.replace(/\\/$/, '');\n\t}\n\n\t// Take advantage of many of the Node `url` normalizations\n\turlString = urlObj.toString();\n\n\t// Remove ending `/`\n\tif ((options.removeTrailingSlash || urlObj.pathname === '/') && urlObj.hash === '') {\n\t\turlString = urlString.replace(/\\/$/, '');\n\t}\n\n\t// Restore relative protocol, if applicable\n\tif (hasRelativeProtocol && !options.normalizeProtocol) {\n\t\turlString = urlString.replace(/^http:\\/\\//, '//');\n\t}\n\n\t// Remove http/https\n\tif (options.stripProtocol) {\n\t\turlString = urlString.replace(/^(?:https?:)?\\/\\//, '');\n\t}\n\n\treturn urlString;\n};\n\nmodule.exports = normalizeUrl;\n// TODO: Remove this for the next major release\nmodule.exports.default = normalizeUrl;\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","'use strict';\n\nclass CancelError extends Error {\n\tconstructor(reason) {\n\t\tsuper(reason || 'Promise was canceled');\n\t\tthis.name = 'CancelError';\n\t}\n\n\tget isCanceled() {\n\t\treturn true;\n\t}\n}\n\nclass PCancelable {\n\tstatic fn(userFn) {\n\t\treturn (...arguments_) => {\n\t\t\treturn new PCancelable((resolve, reject, onCancel) => {\n\t\t\t\targuments_.push(onCancel);\n\t\t\t\t// eslint-disable-next-line promise/prefer-await-to-then\n\t\t\t\tuserFn(...arguments_).then(resolve, reject);\n\t\t\t});\n\t\t};\n\t}\n\n\tconstructor(executor) {\n\t\tthis._cancelHandlers = [];\n\t\tthis._isPending = true;\n\t\tthis._isCanceled = false;\n\t\tthis._rejectOnCancel = true;\n\n\t\tthis._promise = new Promise((resolve, reject) => {\n\t\t\tthis._reject = reject;\n\n\t\t\tconst onResolve = value => {\n\t\t\t\tthis._isPending = false;\n\t\t\t\tresolve(value);\n\t\t\t};\n\n\t\t\tconst onReject = error => {\n\t\t\t\tthis._isPending = false;\n\t\t\t\treject(error);\n\t\t\t};\n\n\t\t\tconst onCancel = handler => {\n\t\t\t\tif (!this._isPending) {\n\t\t\t\t\tthrow new Error('The `onCancel` handler was attached after the promise settled.');\n\t\t\t\t}\n\n\t\t\t\tthis._cancelHandlers.push(handler);\n\t\t\t};\n\n\t\t\tObject.defineProperties(onCancel, {\n\t\t\t\tshouldReject: {\n\t\t\t\t\tget: () => this._rejectOnCancel,\n\t\t\t\t\tset: boolean => {\n\t\t\t\t\t\tthis._rejectOnCancel = boolean;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t});\n\n\t\t\treturn executor(onResolve, onReject, onCancel);\n\t\t});\n\t}\n\n\tthen(onFulfilled, onRejected) {\n\t\t// eslint-disable-next-line promise/prefer-await-to-then\n\t\treturn this._promise.then(onFulfilled, onRejected);\n\t}\n\n\tcatch(onRejected) {\n\t\treturn this._promise.catch(onRejected);\n\t}\n\n\tfinally(onFinally) {\n\t\treturn this._promise.finally(onFinally);\n\t}\n\n\tcancel(reason) {\n\t\tif (!this._isPending || this._isCanceled) {\n\t\t\treturn;\n\t\t}\n\n\t\tif (this._cancelHandlers.length > 0) {\n\t\t\ttry {\n\t\t\t\tfor (const handler of this._cancelHandlers) {\n\t\t\t\t\thandler();\n\t\t\t\t}\n\t\t\t} catch (error) {\n\t\t\t\tthis._reject(error);\n\t\t\t}\n\t\t}\n\n\t\tthis._isCanceled = true;\n\t\tif (this._rejectOnCancel) {\n\t\t\tthis._reject(new CancelError(reason));\n\t\t}\n\t}\n\n\tget isCanceled() {\n\t\treturn this._isCanceled;\n\t}\n}\n\nObject.setPrototypeOf(PCancelable.prototype, Promise.prototype);\n\nmodule.exports = PCancelable;\nmodule.exports.CancelError = CancelError;\n","'use strict';\n\nmodule.exports = require('./lib/picomatch');\n","'use strict';\n\nconst path = require('path');\nconst WIN_SLASH = '\\\\\\\\/';\nconst WIN_NO_SLASH = `[^${WIN_SLASH}]`;\n\n/**\n * Posix glob regex\n */\n\nconst DOT_LITERAL = '\\\\.';\nconst PLUS_LITERAL = '\\\\+';\nconst QMARK_LITERAL = '\\\\?';\nconst SLASH_LITERAL = '\\\\/';\nconst ONE_CHAR = '(?=.)';\nconst QMARK = '[^/]';\nconst END_ANCHOR = `(?:${SLASH_LITERAL}|$)`;\nconst START_ANCHOR = `(?:^|${SLASH_LITERAL})`;\nconst DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`;\nconst NO_DOT = `(?!${DOT_LITERAL})`;\nconst NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`;\nconst NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`;\nconst NO_DOTS_SLASH = `(?!${DOTS_SLASH})`;\nconst QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`;\nconst STAR = `${QMARK}*?`;\n\nconst POSIX_CHARS = {\n DOT_LITERAL,\n PLUS_LITERAL,\n QMARK_LITERAL,\n SLASH_LITERAL,\n ONE_CHAR,\n QMARK,\n END_ANCHOR,\n DOTS_SLASH,\n NO_DOT,\n NO_DOTS,\n NO_DOT_SLASH,\n NO_DOTS_SLASH,\n QMARK_NO_DOT,\n STAR,\n START_ANCHOR\n};\n\n/**\n * Windows glob regex\n */\n\nconst WINDOWS_CHARS = {\n ...POSIX_CHARS,\n\n SLASH_LITERAL: `[${WIN_SLASH}]`,\n QMARK: WIN_NO_SLASH,\n STAR: `${WIN_NO_SLASH}*?`,\n DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`,\n NO_DOT: `(?!${DOT_LITERAL})`,\n NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,\n NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`,\n NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,\n QMARK_NO_DOT: `[^.${WIN_SLASH}]`,\n START_ANCHOR: `(?:^|[${WIN_SLASH}])`,\n END_ANCHOR: `(?:[${WIN_SLASH}]|$)`\n};\n\n/**\n * POSIX Bracket Regex\n */\n\nconst POSIX_REGEX_SOURCE = {\n alnum: 'a-zA-Z0-9',\n alpha: 'a-zA-Z',\n ascii: '\\\\x00-\\\\x7F',\n blank: ' \\\\t',\n cntrl: '\\\\x00-\\\\x1F\\\\x7F',\n digit: '0-9',\n graph: '\\\\x21-\\\\x7E',\n lower: 'a-z',\n print: '\\\\x20-\\\\x7E ',\n punct: '\\\\-!\"#$%&\\'()\\\\*+,./:;<=>?@[\\\\]^_`{|}~',\n space: ' \\\\t\\\\r\\\\n\\\\v\\\\f',\n upper: 'A-Z',\n word: 'A-Za-z0-9_',\n xdigit: 'A-Fa-f0-9'\n};\n\nmodule.exports = {\n MAX_LENGTH: 1024 * 64,\n POSIX_REGEX_SOURCE,\n\n // regular expressions\n REGEX_BACKSLASH: /\\\\(?![*+?^${}(|)[\\]])/g,\n REGEX_NON_SPECIAL_CHARS: /^[^@![\\].,$*+?^{}()|\\\\/]+/,\n REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\\]]/,\n REGEX_SPECIAL_CHARS_BACKREF: /(\\\\?)((\\W)(\\3*))/g,\n REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\\]])/g,\n REGEX_REMOVE_BACKSLASH: /(?:\\[.*?[^\\\\]\\]|\\\\(?=.))/g,\n\n // Replace globs with equivalent patterns to reduce parsing time.\n REPLACEMENTS: {\n '***': '*',\n '**/**': '**',\n '**/**/**': '**'\n },\n\n // Digits\n CHAR_0: 48, /* 0 */\n CHAR_9: 57, /* 9 */\n\n // Alphabet chars.\n CHAR_UPPERCASE_A: 65, /* A */\n CHAR_LOWERCASE_A: 97, /* a */\n CHAR_UPPERCASE_Z: 90, /* Z */\n CHAR_LOWERCASE_Z: 122, /* z */\n\n CHAR_LEFT_PARENTHESES: 40, /* ( */\n CHAR_RIGHT_PARENTHESES: 41, /* ) */\n\n CHAR_ASTERISK: 42, /* * */\n\n // Non-alphabetic chars.\n CHAR_AMPERSAND: 38, /* & */\n CHAR_AT: 64, /* @ */\n CHAR_BACKWARD_SLASH: 92, /* \\ */\n CHAR_CARRIAGE_RETURN: 13, /* \\r */\n CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */\n CHAR_COLON: 58, /* : */\n CHAR_COMMA: 44, /* , */\n CHAR_DOT: 46, /* . */\n CHAR_DOUBLE_QUOTE: 34, /* \" */\n CHAR_EQUAL: 61, /* = */\n CHAR_EXCLAMATION_MARK: 33, /* ! */\n CHAR_FORM_FEED: 12, /* \\f */\n CHAR_FORWARD_SLASH: 47, /* / */\n CHAR_GRAVE_ACCENT: 96, /* ` */\n CHAR_HASH: 35, /* # */\n CHAR_HYPHEN_MINUS: 45, /* - */\n CHAR_LEFT_ANGLE_BRACKET: 60, /* < */\n CHAR_LEFT_CURLY_BRACE: 123, /* { */\n CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */\n CHAR_LINE_FEED: 10, /* \\n */\n CHAR_NO_BREAK_SPACE: 160, /* \\u00A0 */\n CHAR_PERCENT: 37, /* % */\n CHAR_PLUS: 43, /* + */\n CHAR_QUESTION_MARK: 63, /* ? */\n CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */\n CHAR_RIGHT_CURLY_BRACE: 125, /* } */\n CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */\n CHAR_SEMICOLON: 59, /* ; */\n CHAR_SINGLE_QUOTE: 39, /* ' */\n CHAR_SPACE: 32, /* */\n CHAR_TAB: 9, /* \\t */\n CHAR_UNDERSCORE: 95, /* _ */\n CHAR_VERTICAL_LINE: 124, /* | */\n CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \\uFEFF */\n\n SEP: path.sep,\n\n /**\n * Create EXTGLOB_CHARS\n */\n\n extglobChars(chars) {\n return {\n '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` },\n '?': { type: 'qmark', open: '(?:', close: ')?' },\n '+': { type: 'plus', open: '(?:', close: ')+' },\n '*': { type: 'star', open: '(?:', close: ')*' },\n '@': { type: 'at', open: '(?:', close: ')' }\n };\n },\n\n /**\n * Create GLOB_CHARS\n */\n\n globChars(win32) {\n return win32 === true ? WINDOWS_CHARS : POSIX_CHARS;\n }\n};\n","'use strict';\n\nconst constants = require('./constants');\nconst utils = require('./utils');\n\n/**\n * Constants\n */\n\nconst {\n MAX_LENGTH,\n POSIX_REGEX_SOURCE,\n REGEX_NON_SPECIAL_CHARS,\n REGEX_SPECIAL_CHARS_BACKREF,\n REPLACEMENTS\n} = constants;\n\n/**\n * Helpers\n */\n\nconst expandRange = (args, options) => {\n if (typeof options.expandRange === 'function') {\n return options.expandRange(...args, options);\n }\n\n args.sort();\n const value = `[${args.join('-')}]`;\n\n try {\n /* eslint-disable-next-line no-new */\n new RegExp(value);\n } catch (ex) {\n return args.map(v => utils.escapeRegex(v)).join('..');\n }\n\n return value;\n};\n\n/**\n * Create the message for a syntax error\n */\n\nconst syntaxError = (type, char) => {\n return `Missing ${type}: \"${char}\" - use \"\\\\\\\\${char}\" to match literal characters`;\n};\n\n/**\n * Parse the given input string.\n * @param {String} input\n * @param {Object} options\n * @return {Object}\n */\n\nconst parse = (input, options) => {\n if (typeof input !== 'string') {\n throw new TypeError('Expected a string');\n }\n\n input = REPLACEMENTS[input] || input;\n\n const opts = { ...options };\n const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;\n\n let len = input.length;\n if (len > max) {\n throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);\n }\n\n const bos = { type: 'bos', value: '', output: opts.prepend || '' };\n const tokens = [bos];\n\n const capture = opts.capture ? '' : '?:';\n const win32 = utils.isWindows(options);\n\n // create constants based on platform, for windows or posix\n const PLATFORM_CHARS = constants.globChars(win32);\n const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS);\n\n const {\n DOT_LITERAL,\n PLUS_LITERAL,\n SLASH_LITERAL,\n ONE_CHAR,\n DOTS_SLASH,\n NO_DOT,\n NO_DOT_SLASH,\n NO_DOTS_SLASH,\n QMARK,\n QMARK_NO_DOT,\n STAR,\n START_ANCHOR\n } = PLATFORM_CHARS;\n\n const globstar = (opts) => {\n return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;\n };\n\n const nodot = opts.dot ? '' : NO_DOT;\n const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT;\n let star = opts.bash === true ? globstar(opts) : STAR;\n\n if (opts.capture) {\n star = `(${star})`;\n }\n\n // minimatch options support\n if (typeof opts.noext === 'boolean') {\n opts.noextglob = opts.noext;\n }\n\n const state = {\n input,\n index: -1,\n start: 0,\n dot: opts.dot === true,\n consumed: '',\n output: '',\n prefix: '',\n backtrack: false,\n negated: false,\n brackets: 0,\n braces: 0,\n parens: 0,\n quotes: 0,\n globstar: false,\n tokens\n };\n\n input = utils.removePrefix(input, state);\n len = input.length;\n\n const extglobs = [];\n const braces = [];\n const stack = [];\n let prev = bos;\n let value;\n\n /**\n * Tokenizing helpers\n */\n\n const eos = () => state.index === len - 1;\n const peek = state.peek = (n = 1) => input[state.index + n];\n const advance = state.advance = () => input[++state.index];\n const remaining = () => input.slice(state.index + 1);\n const consume = (value = '', num = 0) => {\n state.consumed += value;\n state.index += num;\n };\n const append = token => {\n state.output += token.output != null ? token.output : token.value;\n consume(token.value);\n };\n\n const negate = () => {\n let count = 1;\n\n while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) {\n advance();\n state.start++;\n count++;\n }\n\n if (count % 2 === 0) {\n return false;\n }\n\n state.negated = true;\n state.start++;\n return true;\n };\n\n const increment = type => {\n state[type]++;\n stack.push(type);\n };\n\n const decrement = type => {\n state[type]--;\n stack.pop();\n };\n\n /**\n * Push tokens onto the tokens array. This helper speeds up\n * tokenizing by 1) helping us avoid backtracking as much as possible,\n * and 2) helping us avoid creating extra tokens when consecutive\n * characters are plain text. This improves performance and simplifies\n * lookbehinds.\n */\n\n const push = tok => {\n if (prev.type === 'globstar') {\n const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace');\n const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren'));\n\n if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) {\n state.output = state.output.slice(0, -prev.output.length);\n prev.type = 'star';\n prev.value = '*';\n prev.output = star;\n state.output += prev.output;\n }\n }\n\n if (extglobs.length && tok.type !== 'paren' && !EXTGLOB_CHARS[tok.value]) {\n extglobs[extglobs.length - 1].inner += tok.value;\n }\n\n if (tok.value || tok.output) append(tok);\n if (prev && prev.type === 'text' && tok.type === 'text') {\n prev.value += tok.value;\n prev.output = (prev.output || '') + tok.value;\n return;\n }\n\n tok.prev = prev;\n tokens.push(tok);\n prev = tok;\n };\n\n const extglobOpen = (type, value) => {\n const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' };\n\n token.prev = prev;\n token.parens = state.parens;\n token.output = state.output;\n const output = (opts.capture ? '(' : '') + token.open;\n\n increment('parens');\n push({ type, value, output: state.output ? '' : ONE_CHAR });\n push({ type: 'paren', extglob: true, value: advance(), output });\n extglobs.push(token);\n };\n\n const extglobClose = token => {\n let output = token.close + (opts.capture ? ')' : '');\n\n if (token.type === 'negate') {\n let extglobStar = star;\n\n if (token.inner && token.inner.length > 1 && token.inner.includes('/')) {\n extglobStar = globstar(opts);\n }\n\n if (extglobStar !== star || eos() || /^\\)+$/.test(remaining())) {\n output = token.close = `)$))${extglobStar}`;\n }\n\n if (token.prev.type === 'bos' && eos()) {\n state.negatedExtglob = true;\n }\n }\n\n push({ type: 'paren', extglob: true, value, output });\n decrement('parens');\n };\n\n /**\n * Fast paths\n */\n\n if (opts.fastpaths !== false && !/(^[*!]|[/()[\\]{}\"])/.test(input)) {\n let backslashes = false;\n\n let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => {\n if (first === '\\\\') {\n backslashes = true;\n return m;\n }\n\n if (first === '?') {\n if (esc) {\n return esc + first + (rest ? QMARK.repeat(rest.length) : '');\n }\n if (index === 0) {\n return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : '');\n }\n return QMARK.repeat(chars.length);\n }\n\n if (first === '.') {\n return DOT_LITERAL.repeat(chars.length);\n }\n\n if (first === '*') {\n if (esc) {\n return esc + first + (rest ? star : '');\n }\n return star;\n }\n return esc ? m : `\\\\${m}`;\n });\n\n if (backslashes === true) {\n if (opts.unescape === true) {\n output = output.replace(/\\\\/g, '');\n } else {\n output = output.replace(/\\\\+/g, m => {\n return m.length % 2 === 0 ? '\\\\\\\\' : (m ? '\\\\' : '');\n });\n }\n }\n\n if (output === input && opts.contains === true) {\n state.output = input;\n return state;\n }\n\n state.output = utils.wrapOutput(output, state, options);\n return state;\n }\n\n /**\n * Tokenize input until we reach end-of-string\n */\n\n while (!eos()) {\n value = advance();\n\n if (value === '\\u0000') {\n continue;\n }\n\n /**\n * Escaped characters\n */\n\n if (value === '\\\\') {\n const next = peek();\n\n if (next === '/' && opts.bash !== true) {\n continue;\n }\n\n if (next === '.' || next === ';') {\n continue;\n }\n\n if (!next) {\n value += '\\\\';\n push({ type: 'text', value });\n continue;\n }\n\n // collapse slashes to reduce potential for exploits\n const match = /^\\\\+/.exec(remaining());\n let slashes = 0;\n\n if (match && match[0].length > 2) {\n slashes = match[0].length;\n state.index += slashes;\n if (slashes % 2 !== 0) {\n value += '\\\\';\n }\n }\n\n if (opts.unescape === true) {\n value = advance() || '';\n } else {\n value += advance() || '';\n }\n\n if (state.brackets === 0) {\n push({ type: 'text', value });\n continue;\n }\n }\n\n /**\n * If we're inside a regex character class, continue\n * until we reach the closing bracket.\n */\n\n if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) {\n if (opts.posix !== false && value === ':') {\n const inner = prev.value.slice(1);\n if (inner.includes('[')) {\n prev.posix = true;\n\n if (inner.includes(':')) {\n const idx = prev.value.lastIndexOf('[');\n const pre = prev.value.slice(0, idx);\n const rest = prev.value.slice(idx + 2);\n const posix = POSIX_REGEX_SOURCE[rest];\n if (posix) {\n prev.value = pre + posix;\n state.backtrack = true;\n advance();\n\n if (!bos.output && tokens.indexOf(prev) === 1) {\n bos.output = ONE_CHAR;\n }\n continue;\n }\n }\n }\n }\n\n if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) {\n value = `\\\\${value}`;\n }\n\n if (value === ']' && (prev.value === '[' || prev.value === '[^')) {\n value = `\\\\${value}`;\n }\n\n if (opts.posix === true && value === '!' && prev.value === '[') {\n value = '^';\n }\n\n prev.value += value;\n append({ value });\n continue;\n }\n\n /**\n * If we're inside a quoted string, continue\n * until we reach the closing double quote.\n */\n\n if (state.quotes === 1 && value !== '\"') {\n value = utils.escapeRegex(value);\n prev.value += value;\n append({ value });\n continue;\n }\n\n /**\n * Double quotes\n */\n\n if (value === '\"') {\n state.quotes = state.quotes === 1 ? 0 : 1;\n if (opts.keepQuotes === true) {\n push({ type: 'text', value });\n }\n continue;\n }\n\n /**\n * Parentheses\n */\n\n if (value === '(') {\n increment('parens');\n push({ type: 'paren', value });\n continue;\n }\n\n if (value === ')') {\n if (state.parens === 0 && opts.strictBrackets === true) {\n throw new SyntaxError(syntaxError('opening', '('));\n }\n\n const extglob = extglobs[extglobs.length - 1];\n if (extglob && state.parens === extglob.parens + 1) {\n extglobClose(extglobs.pop());\n continue;\n }\n\n push({ type: 'paren', value, output: state.parens ? ')' : '\\\\)' });\n decrement('parens');\n continue;\n }\n\n /**\n * Square brackets\n */\n\n if (value === '[') {\n if (opts.nobracket === true || !remaining().includes(']')) {\n if (opts.nobracket !== true && opts.strictBrackets === true) {\n throw new SyntaxError(syntaxError('closing', ']'));\n }\n\n value = `\\\\${value}`;\n } else {\n increment('brackets');\n }\n\n push({ type: 'bracket', value });\n continue;\n }\n\n if (value === ']') {\n if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) {\n push({ type: 'text', value, output: `\\\\${value}` });\n continue;\n }\n\n if (state.brackets === 0) {\n if (opts.strictBrackets === true) {\n throw new SyntaxError(syntaxError('opening', '['));\n }\n\n push({ type: 'text', value, output: `\\\\${value}` });\n continue;\n }\n\n decrement('brackets');\n\n const prevValue = prev.value.slice(1);\n if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) {\n value = `/${value}`;\n }\n\n prev.value += value;\n append({ value });\n\n // when literal brackets are explicitly disabled\n // assume we should match with a regex character class\n if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) {\n continue;\n }\n\n const escaped = utils.escapeRegex(prev.value);\n state.output = state.output.slice(0, -prev.value.length);\n\n // when literal brackets are explicitly enabled\n // assume we should escape the brackets to match literal characters\n if (opts.literalBrackets === true) {\n state.output += escaped;\n prev.value = escaped;\n continue;\n }\n\n // when the user specifies nothing, try to match both\n prev.value = `(${capture}${escaped}|${prev.value})`;\n state.output += prev.value;\n continue;\n }\n\n /**\n * Braces\n */\n\n if (value === '{' && opts.nobrace !== true) {\n increment('braces');\n\n const open = {\n type: 'brace',\n value,\n output: '(',\n outputIndex: state.output.length,\n tokensIndex: state.tokens.length\n };\n\n braces.push(open);\n push(open);\n continue;\n }\n\n if (value === '}') {\n const brace = braces[braces.length - 1];\n\n if (opts.nobrace === true || !brace) {\n push({ type: 'text', value, output: value });\n continue;\n }\n\n let output = ')';\n\n if (brace.dots === true) {\n const arr = tokens.slice();\n const range = [];\n\n for (let i = arr.length - 1; i >= 0; i--) {\n tokens.pop();\n if (arr[i].type === 'brace') {\n break;\n }\n if (arr[i].type !== 'dots') {\n range.unshift(arr[i].value);\n }\n }\n\n output = expandRange(range, opts);\n state.backtrack = true;\n }\n\n if (brace.comma !== true && brace.dots !== true) {\n const out = state.output.slice(0, brace.outputIndex);\n const toks = state.tokens.slice(brace.tokensIndex);\n brace.value = brace.output = '\\\\{';\n value = output = '\\\\}';\n state.output = out;\n for (const t of toks) {\n state.output += (t.output || t.value);\n }\n }\n\n push({ type: 'brace', value, output });\n decrement('braces');\n braces.pop();\n continue;\n }\n\n /**\n * Pipes\n */\n\n if (value === '|') {\n if (extglobs.length > 0) {\n extglobs[extglobs.length - 1].conditions++;\n }\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Commas\n */\n\n if (value === ',') {\n let output = value;\n\n const brace = braces[braces.length - 1];\n if (brace && stack[stack.length - 1] === 'braces') {\n brace.comma = true;\n output = '|';\n }\n\n push({ type: 'comma', value, output });\n continue;\n }\n\n /**\n * Slashes\n */\n\n if (value === '/') {\n // if the beginning of the glob is \"./\", advance the start\n // to the current index, and don't add the \"./\" characters\n // to the state. This greatly simplifies lookbehinds when\n // checking for BOS characters like \"!\" and \".\" (not \"./\")\n if (prev.type === 'dot' && state.index === state.start + 1) {\n state.start = state.index + 1;\n state.consumed = '';\n state.output = '';\n tokens.pop();\n prev = bos; // reset \"prev\" to the first token\n continue;\n }\n\n push({ type: 'slash', value, output: SLASH_LITERAL });\n continue;\n }\n\n /**\n * Dots\n */\n\n if (value === '.') {\n if (state.braces > 0 && prev.type === 'dot') {\n if (prev.value === '.') prev.output = DOT_LITERAL;\n const brace = braces[braces.length - 1];\n prev.type = 'dots';\n prev.output += value;\n prev.value += value;\n brace.dots = true;\n continue;\n }\n\n if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') {\n push({ type: 'text', value, output: DOT_LITERAL });\n continue;\n }\n\n push({ type: 'dot', value, output: DOT_LITERAL });\n continue;\n }\n\n /**\n * Question marks\n */\n\n if (value === '?') {\n const isGroup = prev && prev.value === '(';\n if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {\n extglobOpen('qmark', value);\n continue;\n }\n\n if (prev && prev.type === 'paren') {\n const next = peek();\n let output = value;\n\n if (next === '<' && !utils.supportsLookbehinds()) {\n throw new Error('Node.js v10 or higher is required for regex lookbehinds');\n }\n\n if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\\w+>)/.test(remaining()))) {\n output = `\\\\${value}`;\n }\n\n push({ type: 'text', value, output });\n continue;\n }\n\n if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) {\n push({ type: 'qmark', value, output: QMARK_NO_DOT });\n continue;\n }\n\n push({ type: 'qmark', value, output: QMARK });\n continue;\n }\n\n /**\n * Exclamation\n */\n\n if (value === '!') {\n if (opts.noextglob !== true && peek() === '(') {\n if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) {\n extglobOpen('negate', value);\n continue;\n }\n }\n\n if (opts.nonegate !== true && state.index === 0) {\n negate();\n continue;\n }\n }\n\n /**\n * Plus\n */\n\n if (value === '+') {\n if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {\n extglobOpen('plus', value);\n continue;\n }\n\n if ((prev && prev.value === '(') || opts.regex === false) {\n push({ type: 'plus', value, output: PLUS_LITERAL });\n continue;\n }\n\n if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) {\n push({ type: 'plus', value });\n continue;\n }\n\n push({ type: 'plus', value: PLUS_LITERAL });\n continue;\n }\n\n /**\n * Plain text\n */\n\n if (value === '@') {\n if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {\n push({ type: 'at', extglob: true, value, output: '' });\n continue;\n }\n\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Plain text\n */\n\n if (value !== '*') {\n if (value === '$' || value === '^') {\n value = `\\\\${value}`;\n }\n\n const match = REGEX_NON_SPECIAL_CHARS.exec(remaining());\n if (match) {\n value += match[0];\n state.index += match[0].length;\n }\n\n push({ type: 'text', value });\n continue;\n }\n\n /**\n * Stars\n */\n\n if (prev && (prev.type === 'globstar' || prev.star === true)) {\n prev.type = 'star';\n prev.star = true;\n prev.value += value;\n prev.output = star;\n state.backtrack = true;\n state.globstar = true;\n consume(value);\n continue;\n }\n\n let rest = remaining();\n if (opts.noextglob !== true && /^\\([^?]/.test(rest)) {\n extglobOpen('star', value);\n continue;\n }\n\n if (prev.type === 'star') {\n if (opts.noglobstar === true) {\n consume(value);\n continue;\n }\n\n const prior = prev.prev;\n const before = prior.prev;\n const isStart = prior.type === 'slash' || prior.type === 'bos';\n const afterStar = before && (before.type === 'star' || before.type === 'globstar');\n\n if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) {\n push({ type: 'star', value, output: '' });\n continue;\n }\n\n const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace');\n const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren');\n if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) {\n push({ type: 'star', value, output: '' });\n continue;\n }\n\n // strip consecutive `/**/`\n while (rest.slice(0, 3) === '/**') {\n const after = input[state.index + 4];\n if (after && after !== '/') {\n break;\n }\n rest = rest.slice(3);\n consume('/**', 3);\n }\n\n if (prior.type === 'bos' && eos()) {\n prev.type = 'globstar';\n prev.value += value;\n prev.output = globstar(opts);\n state.output = prev.output;\n state.globstar = true;\n consume(value);\n continue;\n }\n\n if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) {\n state.output = state.output.slice(0, -(prior.output + prev.output).length);\n prior.output = `(?:${prior.output}`;\n\n prev.type = 'globstar';\n prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)');\n prev.value += value;\n state.globstar = true;\n state.output += prior.output + prev.output;\n consume(value);\n continue;\n }\n\n if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') {\n const end = rest[1] !== void 0 ? '|$' : '';\n\n state.output = state.output.slice(0, -(prior.output + prev.output).length);\n prior.output = `(?:${prior.output}`;\n\n prev.type = 'globstar';\n prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`;\n prev.value += value;\n\n state.output += prior.output + prev.output;\n state.globstar = true;\n\n consume(value + advance());\n\n push({ type: 'slash', value: '/', output: '' });\n continue;\n }\n\n if (prior.type === 'bos' && rest[0] === '/') {\n prev.type = 'globstar';\n prev.value += value;\n prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`;\n state.output = prev.output;\n state.globstar = true;\n consume(value + advance());\n push({ type: 'slash', value: '/', output: '' });\n continue;\n }\n\n // remove single star from output\n state.output = state.output.slice(0, -prev.output.length);\n\n // reset previous token to globstar\n prev.type = 'globstar';\n prev.output = globstar(opts);\n prev.value += value;\n\n // reset output with globstar\n state.output += prev.output;\n state.globstar = true;\n consume(value);\n continue;\n }\n\n const token = { type: 'star', value, output: star };\n\n if (opts.bash === true) {\n token.output = '.*?';\n if (prev.type === 'bos' || prev.type === 'slash') {\n token.output = nodot + token.output;\n }\n push(token);\n continue;\n }\n\n if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) {\n token.output = value;\n push(token);\n continue;\n }\n\n if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') {\n if (prev.type === 'dot') {\n state.output += NO_DOT_SLASH;\n prev.output += NO_DOT_SLASH;\n\n } else if (opts.dot === true) {\n state.output += NO_DOTS_SLASH;\n prev.output += NO_DOTS_SLASH;\n\n } else {\n state.output += nodot;\n prev.output += nodot;\n }\n\n if (peek() !== '*') {\n state.output += ONE_CHAR;\n prev.output += ONE_CHAR;\n }\n }\n\n push(token);\n }\n\n while (state.brackets > 0) {\n if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']'));\n state.output = utils.escapeLast(state.output, '[');\n decrement('brackets');\n }\n\n while (state.parens > 0) {\n if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')'));\n state.output = utils.escapeLast(state.output, '(');\n decrement('parens');\n }\n\n while (state.braces > 0) {\n if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}'));\n state.output = utils.escapeLast(state.output, '{');\n decrement('braces');\n }\n\n if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) {\n push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` });\n }\n\n // rebuild the output if we had to backtrack at any point\n if (state.backtrack === true) {\n state.output = '';\n\n for (const token of state.tokens) {\n state.output += token.output != null ? token.output : token.value;\n\n if (token.suffix) {\n state.output += token.suffix;\n }\n }\n }\n\n return state;\n};\n\n/**\n * Fast paths for creating regular expressions for common glob patterns.\n * This can significantly speed up processing and has very little downside\n * impact when none of the fast paths match.\n */\n\nparse.fastpaths = (input, options) => {\n const opts = { ...options };\n const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;\n const len = input.length;\n if (len > max) {\n throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);\n }\n\n input = REPLACEMENTS[input] || input;\n const win32 = utils.isWindows(options);\n\n // create constants based on platform, for windows or posix\n const {\n DOT_LITERAL,\n SLASH_LITERAL,\n ONE_CHAR,\n DOTS_SLASH,\n NO_DOT,\n NO_DOTS,\n NO_DOTS_SLASH,\n STAR,\n START_ANCHOR\n } = constants.globChars(win32);\n\n const nodot = opts.dot ? NO_DOTS : NO_DOT;\n const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT;\n const capture = opts.capture ? '' : '?:';\n const state = { negated: false, prefix: '' };\n let star = opts.bash === true ? '.*?' : STAR;\n\n if (opts.capture) {\n star = `(${star})`;\n }\n\n const globstar = (opts) => {\n if (opts.noglobstar === true) return star;\n return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;\n };\n\n const create = str => {\n switch (str) {\n case '*':\n return `${nodot}${ONE_CHAR}${star}`;\n\n case '.*':\n return `${DOT_LITERAL}${ONE_CHAR}${star}`;\n\n case '*.*':\n return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;\n\n case '*/*':\n return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`;\n\n case '**':\n return nodot + globstar(opts);\n\n case '**/*':\n return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`;\n\n case '**/*.*':\n return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;\n\n case '**/.*':\n return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`;\n\n default: {\n const match = /^(.*?)\\.(\\w+)$/.exec(str);\n if (!match) return;\n\n const source = create(match[1]);\n if (!source) return;\n\n return source + DOT_LITERAL + match[2];\n }\n }\n };\n\n const output = utils.removePrefix(input, state);\n let source = create(output);\n\n if (source && opts.strictSlashes !== true) {\n source += `${SLASH_LITERAL}?`;\n }\n\n return source;\n};\n\nmodule.exports = parse;\n","'use strict';\n\nconst path = require('path');\nconst scan = require('./scan');\nconst parse = require('./parse');\nconst utils = require('./utils');\nconst constants = require('./constants');\nconst isObject = val => val && typeof val === 'object' && !Array.isArray(val);\n\n/**\n * Creates a matcher function from one or more glob patterns. The\n * returned function takes a string to match as its first argument,\n * and returns true if the string is a match. The returned matcher\n * function also takes a boolean as the second argument that, when true,\n * returns an object with additional information.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch(glob[, options]);\n *\n * const isMatch = picomatch('*.!(*a)');\n * console.log(isMatch('a.a')); //=> false\n * console.log(isMatch('a.b')); //=> true\n * ```\n * @name picomatch\n * @param {String|Array} `globs` One or more glob patterns.\n * @param {Object=} `options`\n * @return {Function=} Returns a matcher function.\n * @api public\n */\n\nconst picomatch = (glob, options, returnState = false) => {\n if (Array.isArray(glob)) {\n const fns = glob.map(input => picomatch(input, options, returnState));\n const arrayMatcher = str => {\n for (const isMatch of fns) {\n const state = isMatch(str);\n if (state) return state;\n }\n return false;\n };\n return arrayMatcher;\n }\n\n const isState = isObject(glob) && glob.tokens && glob.input;\n\n if (glob === '' || (typeof glob !== 'string' && !isState)) {\n throw new TypeError('Expected pattern to be a non-empty string');\n }\n\n const opts = options || {};\n const posix = utils.isWindows(options);\n const regex = isState\n ? picomatch.compileRe(glob, options)\n : picomatch.makeRe(glob, options, false, true);\n\n const state = regex.state;\n delete regex.state;\n\n let isIgnored = () => false;\n if (opts.ignore) {\n const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null };\n isIgnored = picomatch(opts.ignore, ignoreOpts, returnState);\n }\n\n const matcher = (input, returnObject = false) => {\n const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix });\n const result = { glob, state, regex, posix, input, output, match, isMatch };\n\n if (typeof opts.onResult === 'function') {\n opts.onResult(result);\n }\n\n if (isMatch === false) {\n result.isMatch = false;\n return returnObject ? result : false;\n }\n\n if (isIgnored(input)) {\n if (typeof opts.onIgnore === 'function') {\n opts.onIgnore(result);\n }\n result.isMatch = false;\n return returnObject ? result : false;\n }\n\n if (typeof opts.onMatch === 'function') {\n opts.onMatch(result);\n }\n return returnObject ? result : true;\n };\n\n if (returnState) {\n matcher.state = state;\n }\n\n return matcher;\n};\n\n/**\n * Test `input` with the given `regex`. This is used by the main\n * `picomatch()` function to test the input string.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.test(input, regex[, options]);\n *\n * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\\/([^/]*?))$/));\n * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' }\n * ```\n * @param {String} `input` String to test.\n * @param {RegExp} `regex`\n * @return {Object} Returns an object with matching info.\n * @api public\n */\n\npicomatch.test = (input, regex, options, { glob, posix } = {}) => {\n if (typeof input !== 'string') {\n throw new TypeError('Expected input to be a string');\n }\n\n if (input === '') {\n return { isMatch: false, output: '' };\n }\n\n const opts = options || {};\n const format = opts.format || (posix ? utils.toPosixSlashes : null);\n let match = input === glob;\n let output = (match && format) ? format(input) : input;\n\n if (match === false) {\n output = format ? format(input) : input;\n match = output === glob;\n }\n\n if (match === false || opts.capture === true) {\n if (opts.matchBase === true || opts.basename === true) {\n match = picomatch.matchBase(input, regex, options, posix);\n } else {\n match = regex.exec(output);\n }\n }\n\n return { isMatch: Boolean(match), match, output };\n};\n\n/**\n * Match the basename of a filepath.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.matchBase(input, glob[, options]);\n * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true\n * ```\n * @param {String} `input` String to test.\n * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe).\n * @return {Boolean}\n * @api public\n */\n\npicomatch.matchBase = (input, glob, options, posix = utils.isWindows(options)) => {\n const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options);\n return regex.test(path.basename(input));\n};\n\n/**\n * Returns true if **any** of the given glob `patterns` match the specified `string`.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.isMatch(string, patterns[, options]);\n *\n * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true\n * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false\n * ```\n * @param {String|Array} str The string to test.\n * @param {String|Array} patterns One or more glob patterns to use for matching.\n * @param {Object} [options] See available [options](#options).\n * @return {Boolean} Returns true if any patterns match `str`\n * @api public\n */\n\npicomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str);\n\n/**\n * Parse a glob pattern to create the source string for a regular\n * expression.\n *\n * ```js\n * const picomatch = require('picomatch');\n * const result = picomatch.parse(pattern[, options]);\n * ```\n * @param {String} `pattern`\n * @param {Object} `options`\n * @return {Object} Returns an object with useful properties and output to be used as a regex source string.\n * @api public\n */\n\npicomatch.parse = (pattern, options) => {\n if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options));\n return parse(pattern, { ...options, fastpaths: false });\n};\n\n/**\n * Scan a glob pattern to separate the pattern into segments.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.scan(input[, options]);\n *\n * const result = picomatch.scan('!./foo/*.js');\n * console.log(result);\n * { prefix: '!./',\n * input: '!./foo/*.js',\n * start: 3,\n * base: 'foo',\n * glob: '*.js',\n * isBrace: false,\n * isBracket: false,\n * isGlob: true,\n * isExtglob: false,\n * isGlobstar: false,\n * negated: true }\n * ```\n * @param {String} `input` Glob pattern to scan.\n * @param {Object} `options`\n * @return {Object} Returns an object with\n * @api public\n */\n\npicomatch.scan = (input, options) => scan(input, options);\n\n/**\n * Create a regular expression from a parsed glob pattern.\n *\n * ```js\n * const picomatch = require('picomatch');\n * const state = picomatch.parse('*.js');\n * // picomatch.compileRe(state[, options]);\n *\n * console.log(picomatch.compileRe(state));\n * //=> /^(?:(?!\\.)(?=.)[^/]*?\\.js)$/\n * ```\n * @param {String} `state` The object returned from the `.parse` method.\n * @param {Object} `options`\n * @return {RegExp} Returns a regex created from the given pattern.\n * @api public\n */\n\npicomatch.compileRe = (parsed, options, returnOutput = false, returnState = false) => {\n if (returnOutput === true) {\n return parsed.output;\n }\n\n const opts = options || {};\n const prepend = opts.contains ? '' : '^';\n const append = opts.contains ? '' : '$';\n\n let source = `${prepend}(?:${parsed.output})${append}`;\n if (parsed && parsed.negated === true) {\n source = `^(?!${source}).*$`;\n }\n\n const regex = picomatch.toRegex(source, options);\n if (returnState === true) {\n regex.state = parsed;\n }\n\n return regex;\n};\n\npicomatch.makeRe = (input, options, returnOutput = false, returnState = false) => {\n if (!input || typeof input !== 'string') {\n throw new TypeError('Expected a non-empty string');\n }\n\n const opts = options || {};\n let parsed = { negated: false, fastpaths: true };\n let prefix = '';\n let output;\n\n if (input.startsWith('./')) {\n input = input.slice(2);\n prefix = parsed.prefix = './';\n }\n\n if (opts.fastpaths !== false && (input[0] === '.' || input[0] === '*')) {\n output = parse.fastpaths(input, options);\n }\n\n if (output === undefined) {\n parsed = parse(input, options);\n parsed.prefix = prefix + (parsed.prefix || '');\n } else {\n parsed.output = output;\n }\n\n return picomatch.compileRe(parsed, options, returnOutput, returnState);\n};\n\n/**\n * Create a regular expression from the given regex source string.\n *\n * ```js\n * const picomatch = require('picomatch');\n * // picomatch.toRegex(source[, options]);\n *\n * const { output } = picomatch.parse('*.js');\n * console.log(picomatch.toRegex(output));\n * //=> /^(?:(?!\\.)(?=.)[^/]*?\\.js)$/\n * ```\n * @param {String} `source` Regular expression source string.\n * @param {Object} `options`\n * @return {RegExp}\n * @api public\n */\n\npicomatch.toRegex = (source, options) => {\n try {\n const opts = options || {};\n return new RegExp(source, opts.flags || (opts.nocase ? 'i' : ''));\n } catch (err) {\n if (options && options.debug === true) throw err;\n return /$^/;\n }\n};\n\n/**\n * Picomatch constants.\n * @return {Object}\n */\n\npicomatch.constants = constants;\n\n/**\n * Expose \"picomatch\"\n */\n\nmodule.exports = picomatch;\n","'use strict';\n\nconst utils = require('./utils');\nconst {\n CHAR_ASTERISK, /* * */\n CHAR_AT, /* @ */\n CHAR_BACKWARD_SLASH, /* \\ */\n CHAR_COMMA, /* , */\n CHAR_DOT, /* . */\n CHAR_EXCLAMATION_MARK, /* ! */\n CHAR_FORWARD_SLASH, /* / */\n CHAR_LEFT_CURLY_BRACE, /* { */\n CHAR_LEFT_PARENTHESES, /* ( */\n CHAR_LEFT_SQUARE_BRACKET, /* [ */\n CHAR_PLUS, /* + */\n CHAR_QUESTION_MARK, /* ? */\n CHAR_RIGHT_CURLY_BRACE, /* } */\n CHAR_RIGHT_PARENTHESES, /* ) */\n CHAR_RIGHT_SQUARE_BRACKET /* ] */\n} = require('./constants');\n\nconst isPathSeparator = code => {\n return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH;\n};\n\nconst depth = token => {\n if (token.isPrefix !== true) {\n token.depth = token.isGlobstar ? Infinity : 1;\n }\n};\n\n/**\n * Quickly scans a glob pattern and returns an object with a handful of\n * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists),\n * `glob` (the actual pattern), and `negated` (true if the path starts with `!`).\n *\n * ```js\n * const pm = require('picomatch');\n * console.log(pm.scan('foo/bar/*.js'));\n * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' }\n * ```\n * @param {String} `str`\n * @param {Object} `options`\n * @return {Object} Returns an object with tokens and regex source string.\n * @api public\n */\n\nconst scan = (input, options) => {\n const opts = options || {};\n\n const length = input.length - 1;\n const scanToEnd = opts.parts === true || opts.scanToEnd === true;\n const slashes = [];\n const tokens = [];\n const parts = [];\n\n let str = input;\n let index = -1;\n let start = 0;\n let lastIndex = 0;\n let isBrace = false;\n let isBracket = false;\n let isGlob = false;\n let isExtglob = false;\n let isGlobstar = false;\n let braceEscaped = false;\n let backslashes = false;\n let negated = false;\n let finished = false;\n let braces = 0;\n let prev;\n let code;\n let token = { value: '', depth: 0, isGlob: false };\n\n const eos = () => index >= length;\n const peek = () => str.charCodeAt(index + 1);\n const advance = () => {\n prev = code;\n return str.charCodeAt(++index);\n };\n\n while (index < length) {\n code = advance();\n let next;\n\n if (code === CHAR_BACKWARD_SLASH) {\n backslashes = token.backslashes = true;\n code = advance();\n\n if (code === CHAR_LEFT_CURLY_BRACE) {\n braceEscaped = true;\n }\n continue;\n }\n\n if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) {\n braces++;\n\n while (eos() !== true && (code = advance())) {\n if (code === CHAR_BACKWARD_SLASH) {\n backslashes = token.backslashes = true;\n advance();\n continue;\n }\n\n if (code === CHAR_LEFT_CURLY_BRACE) {\n braces++;\n continue;\n }\n\n if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) {\n isBrace = token.isBrace = true;\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n\n break;\n }\n\n if (braceEscaped !== true && code === CHAR_COMMA) {\n isBrace = token.isBrace = true;\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n\n break;\n }\n\n if (code === CHAR_RIGHT_CURLY_BRACE) {\n braces--;\n\n if (braces === 0) {\n braceEscaped = false;\n isBrace = token.isBrace = true;\n finished = true;\n break;\n }\n }\n }\n\n if (scanToEnd === true) {\n continue;\n }\n\n break;\n }\n\n if (code === CHAR_FORWARD_SLASH) {\n slashes.push(index);\n tokens.push(token);\n token = { value: '', depth: 0, isGlob: false };\n\n if (finished === true) continue;\n if (prev === CHAR_DOT && index === (start + 1)) {\n start += 2;\n continue;\n }\n\n lastIndex = index + 1;\n continue;\n }\n\n if (opts.noext !== true) {\n const isExtglobChar = code === CHAR_PLUS\n || code === CHAR_AT\n || code === CHAR_ASTERISK\n || code === CHAR_QUESTION_MARK\n || code === CHAR_EXCLAMATION_MARK;\n\n if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) {\n isGlob = token.isGlob = true;\n isExtglob = token.isExtglob = true;\n finished = true;\n\n if (scanToEnd === true) {\n while (eos() !== true && (code = advance())) {\n if (code === CHAR_BACKWARD_SLASH) {\n backslashes = token.backslashes = true;\n code = advance();\n continue;\n }\n\n if (code === CHAR_RIGHT_PARENTHESES) {\n isGlob = token.isGlob = true;\n finished = true;\n break;\n }\n }\n continue;\n }\n break;\n }\n }\n\n if (code === CHAR_ASTERISK) {\n if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true;\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n break;\n }\n\n if (code === CHAR_QUESTION_MARK) {\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n break;\n }\n\n if (code === CHAR_LEFT_SQUARE_BRACKET) {\n while (eos() !== true && (next = advance())) {\n if (next === CHAR_BACKWARD_SLASH) {\n backslashes = token.backslashes = true;\n advance();\n continue;\n }\n\n if (next === CHAR_RIGHT_SQUARE_BRACKET) {\n isBracket = token.isBracket = true;\n isGlob = token.isGlob = true;\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n break;\n }\n }\n }\n\n if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) {\n negated = token.negated = true;\n start++;\n continue;\n }\n\n if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) {\n isGlob = token.isGlob = true;\n\n if (scanToEnd === true) {\n while (eos() !== true && (code = advance())) {\n if (code === CHAR_LEFT_PARENTHESES) {\n backslashes = token.backslashes = true;\n code = advance();\n continue;\n }\n\n if (code === CHAR_RIGHT_PARENTHESES) {\n finished = true;\n break;\n }\n }\n continue;\n }\n break;\n }\n\n if (isGlob === true) {\n finished = true;\n\n if (scanToEnd === true) {\n continue;\n }\n\n break;\n }\n }\n\n if (opts.noext === true) {\n isExtglob = false;\n isGlob = false;\n }\n\n let base = str;\n let prefix = '';\n let glob = '';\n\n if (start > 0) {\n prefix = str.slice(0, start);\n str = str.slice(start);\n lastIndex -= start;\n }\n\n if (base && isGlob === true && lastIndex > 0) {\n base = str.slice(0, lastIndex);\n glob = str.slice(lastIndex);\n } else if (isGlob === true) {\n base = '';\n glob = str;\n } else {\n base = str;\n }\n\n if (base && base !== '' && base !== '/' && base !== str) {\n if (isPathSeparator(base.charCodeAt(base.length - 1))) {\n base = base.slice(0, -1);\n }\n }\n\n if (opts.unescape === true) {\n if (glob) glob = utils.removeBackslashes(glob);\n\n if (base && backslashes === true) {\n base = utils.removeBackslashes(base);\n }\n }\n\n const state = {\n prefix,\n input,\n start,\n base,\n glob,\n isBrace,\n isBracket,\n isGlob,\n isExtglob,\n isGlobstar,\n negated\n };\n\n if (opts.tokens === true) {\n state.maxDepth = 0;\n if (!isPathSeparator(code)) {\n tokens.push(token);\n }\n state.tokens = tokens;\n }\n\n if (opts.parts === true || opts.tokens === true) {\n let prevIndex;\n\n for (let idx = 0; idx < slashes.length; idx++) {\n const n = prevIndex ? prevIndex + 1 : start;\n const i = slashes[idx];\n const value = input.slice(n, i);\n if (opts.tokens) {\n if (idx === 0 && start !== 0) {\n tokens[idx].isPrefix = true;\n tokens[idx].value = prefix;\n } else {\n tokens[idx].value = value;\n }\n depth(tokens[idx]);\n state.maxDepth += tokens[idx].depth;\n }\n if (idx !== 0 || value !== '') {\n parts.push(value);\n }\n prevIndex = i;\n }\n\n if (prevIndex && prevIndex + 1 < input.length) {\n const value = input.slice(prevIndex + 1);\n parts.push(value);\n\n if (opts.tokens) {\n tokens[tokens.length - 1].value = value;\n depth(tokens[tokens.length - 1]);\n state.maxDepth += tokens[tokens.length - 1].depth;\n }\n }\n\n state.slashes = slashes;\n state.parts = parts;\n }\n\n return state;\n};\n\nmodule.exports = scan;\n","'use strict';\n\nconst path = require('path');\nconst win32 = process.platform === 'win32';\nconst {\n REGEX_BACKSLASH,\n REGEX_REMOVE_BACKSLASH,\n REGEX_SPECIAL_CHARS,\n REGEX_SPECIAL_CHARS_GLOBAL\n} = require('./constants');\n\nexports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);\nexports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str);\nexports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str);\nexports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\\\$1');\nexports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/');\n\nexports.removeBackslashes = str => {\n return str.replace(REGEX_REMOVE_BACKSLASH, match => {\n return match === '\\\\' ? '' : match;\n });\n};\n\nexports.supportsLookbehinds = () => {\n const segs = process.version.slice(1).split('.').map(Number);\n if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) {\n return true;\n }\n return false;\n};\n\nexports.isWindows = options => {\n if (options && typeof options.windows === 'boolean') {\n return options.windows;\n }\n return win32 === true || path.sep === '\\\\';\n};\n\nexports.escapeLast = (input, char, lastIdx) => {\n const idx = input.lastIndexOf(char, lastIdx);\n if (idx === -1) return input;\n if (input[idx - 1] === '\\\\') return exports.escapeLast(input, char, idx - 1);\n return `${input.slice(0, idx)}\\\\${input.slice(idx)}`;\n};\n\nexports.removePrefix = (input, state = {}) => {\n let output = input;\n if (output.startsWith('./')) {\n output = output.slice(2);\n state.prefix = './';\n }\n return output;\n};\n\nexports.wrapOutput = (input, state = {}, options = {}) => {\n const prepend = options.contains ? '' : '^';\n const append = options.contains ? '' : '$';\n\n let output = `${prepend}(?:${input})${append}`;\n if (state.negated === true) {\n output = `(?:^(?!${output}).*$)`;\n }\n return output;\n};\n","var once = require('once')\nvar eos = require('end-of-stream')\nvar fs = require('fs') // we only need fs to get the ReadStream and WriteStream prototypes\n\nvar noop = function () {}\nvar ancient = /^v?\\.0/.test(process.version)\n\nvar isFn = function (fn) {\n return typeof fn === 'function'\n}\n\nvar isFS = function (stream) {\n if (!ancient) return false // newer node version do not need to care about fs is a special way\n if (!fs) return false // browser\n return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close)\n}\n\nvar isRequest = function (stream) {\n return stream.setHeader && isFn(stream.abort)\n}\n\nvar destroyer = function (stream, reading, writing, callback) {\n callback = once(callback)\n\n var closed = false\n stream.on('close', function () {\n closed = true\n })\n\n eos(stream, {readable: reading, writable: writing}, function (err) {\n if (err) return callback(err)\n closed = true\n callback()\n })\n\n var destroyed = false\n return function (err) {\n if (closed) return\n if (destroyed) return\n destroyed = true\n\n if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks\n if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want\n\n if (isFn(stream.destroy)) return stream.destroy()\n\n callback(err || new Error('stream was destroyed'))\n }\n}\n\nvar call = function (fn) {\n fn()\n}\n\nvar pipe = function (from, to) {\n return from.pipe(to)\n}\n\nvar pump = function () {\n var streams = Array.prototype.slice.call(arguments)\n var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop\n\n if (Array.isArray(streams[0])) streams = streams[0]\n if (streams.length < 2) throw new Error('pump requires two streams per minimum')\n\n var error\n var destroys = streams.map(function (stream, i) {\n var reading = i < streams.length - 1\n var writing = i > 0\n return destroyer(stream, reading, writing, function (err) {\n if (!error) error = err\n if (err) destroys.forEach(call)\n if (reading) return\n destroys.forEach(call)\n callback(error)\n })\n })\n\n return streams.reduce(pipe)\n}\n\nmodule.exports = pump\n","'use strict';\n\nclass QuickLRU {\n\tconstructor(options = {}) {\n\t\tif (!(options.maxSize && options.maxSize > 0)) {\n\t\t\tthrow new TypeError('`maxSize` must be a number greater than 0');\n\t\t}\n\n\t\tthis.maxSize = options.maxSize;\n\t\tthis.onEviction = options.onEviction;\n\t\tthis.cache = new Map();\n\t\tthis.oldCache = new Map();\n\t\tthis._size = 0;\n\t}\n\n\t_set(key, value) {\n\t\tthis.cache.set(key, value);\n\t\tthis._size++;\n\n\t\tif (this._size >= this.maxSize) {\n\t\t\tthis._size = 0;\n\n\t\t\tif (typeof this.onEviction === 'function') {\n\t\t\t\tfor (const [key, value] of this.oldCache.entries()) {\n\t\t\t\t\tthis.onEviction(key, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tthis.oldCache = this.cache;\n\t\t\tthis.cache = new Map();\n\t\t}\n\t}\n\n\tget(key) {\n\t\tif (this.cache.has(key)) {\n\t\t\treturn this.cache.get(key);\n\t\t}\n\n\t\tif (this.oldCache.has(key)) {\n\t\t\tconst value = this.oldCache.get(key);\n\t\t\tthis.oldCache.delete(key);\n\t\t\tthis._set(key, value);\n\t\t\treturn value;\n\t\t}\n\t}\n\n\tset(key, value) {\n\t\tif (this.cache.has(key)) {\n\t\t\tthis.cache.set(key, value);\n\t\t} else {\n\t\t\tthis._set(key, value);\n\t\t}\n\n\t\treturn this;\n\t}\n\n\thas(key) {\n\t\treturn this.cache.has(key) || this.oldCache.has(key);\n\t}\n\n\tpeek(key) {\n\t\tif (this.cache.has(key)) {\n\t\t\treturn this.cache.get(key);\n\t\t}\n\n\t\tif (this.oldCache.has(key)) {\n\t\t\treturn this.oldCache.get(key);\n\t\t}\n\t}\n\n\tdelete(key) {\n\t\tconst deleted = this.cache.delete(key);\n\t\tif (deleted) {\n\t\t\tthis._size--;\n\t\t}\n\n\t\treturn this.oldCache.delete(key) || deleted;\n\t}\n\n\tclear() {\n\t\tthis.cache.clear();\n\t\tthis.oldCache.clear();\n\t\tthis._size = 0;\n\t}\n\n\t* keys() {\n\t\tfor (const [key] of this) {\n\t\t\tyield key;\n\t\t}\n\t}\n\n\t* values() {\n\t\tfor (const [, value] of this) {\n\t\t\tyield value;\n\t\t}\n\t}\n\n\t* [Symbol.iterator]() {\n\t\tfor (const item of this.cache) {\n\t\t\tyield item;\n\t\t}\n\n\t\tfor (const item of this.oldCache) {\n\t\t\tconst [key] = item;\n\t\t\tif (!this.cache.has(key)) {\n\t\t\t\tyield item;\n\t\t\t}\n\t\t}\n\t}\n\n\tget size() {\n\t\tlet oldCacheSize = 0;\n\t\tfor (const key of this.oldCache.keys()) {\n\t\t\tif (!this.cache.has(key)) {\n\t\t\t\toldCacheSize++;\n\t\t\t}\n\t\t}\n\n\t\treturn Math.min(this._size + oldCacheSize, this.maxSize);\n\t}\n}\n\nmodule.exports = QuickLRU;\n","'use strict';\nconst tls = require('tls');\n\nmodule.exports = (options = {}) => new Promise((resolve, reject) => {\n\tconst socket = tls.connect(options, () => {\n\t\tif (options.resolveSocket) {\n\t\t\tsocket.off('error', reject);\n\t\t\tresolve({alpnProtocol: socket.alpnProtocol, socket});\n\t\t} else {\n\t\t\tsocket.destroy();\n\t\t\tresolve({alpnProtocol: socket.alpnProtocol});\n\t\t}\n\t});\n\n\tsocket.on('error', reject);\n});\n","'use strict';\n\nconst Readable = require('stream').Readable;\nconst lowercaseKeys = require('lowercase-keys');\n\nclass Response extends Readable {\n\tconstructor(statusCode, headers, body, url) {\n\t\tif (typeof statusCode !== 'number') {\n\t\t\tthrow new TypeError('Argument `statusCode` should be a number');\n\t\t}\n\t\tif (typeof headers !== 'object') {\n\t\t\tthrow new TypeError('Argument `headers` should be an object');\n\t\t}\n\t\tif (!(body instanceof Buffer)) {\n\t\t\tthrow new TypeError('Argument `body` should be a buffer');\n\t\t}\n\t\tif (typeof url !== 'string') {\n\t\t\tthrow new TypeError('Argument `url` should be a string');\n\t\t}\n\n\t\tsuper();\n\t\tthis.statusCode = statusCode;\n\t\tthis.headers = lowercaseKeys(headers);\n\t\tthis.body = body;\n\t\tthis.url = url;\n\t}\n\n\t_read() {\n\t\tthis.push(this.body);\n\t\tthis.push(null);\n\t}\n}\n\nmodule.exports = Response;\n","'use strict'\n\nfunction reusify (Constructor) {\n var head = new Constructor()\n var tail = head\n\n function get () {\n var current = head\n\n if (current.next) {\n head = current.next\n } else {\n head = new Constructor()\n tail = head\n }\n\n current.next = null\n\n return current\n }\n\n function release (obj) {\n tail.next = obj\n tail = obj\n }\n\n return {\n get: get,\n release: release\n }\n}\n\nmodule.exports = reusify\n","/*! run-parallel. MIT License. Feross Aboukhadijeh */\nmodule.exports = runParallel\n\nfunction runParallel (tasks, cb) {\n var results, pending, keys\n var isSync = true\n\n if (Array.isArray(tasks)) {\n results = []\n pending = tasks.length\n } else {\n keys = Object.keys(tasks)\n results = {}\n pending = keys.length\n }\n\n function done (err) {\n function end () {\n if (cb) cb(err, results)\n cb = null\n }\n if (isSync) process.nextTick(end)\n else end()\n }\n\n function each (i, err, result) {\n results[i] = result\n if (--pending === 0 || err) {\n done(err)\n }\n }\n\n if (!pending) {\n // empty\n done(null)\n } else if (keys) {\n // object\n keys.forEach(function (key) {\n tasks[key](function (err, result) { each(key, err, result) })\n })\n } else {\n // array\n tasks.forEach(function (task, i) {\n task(function (err, result) { each(i, err, result) })\n })\n }\n\n isSync = false\n}\n",";(function (sax) { // wrapper for non-node envs\n sax.parser = function (strict, opt) { return new SAXParser(strict, opt) }\n sax.SAXParser = SAXParser\n sax.SAXStream = SAXStream\n sax.createStream = createStream\n\n // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns.\n // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)),\n // since that's the earliest that a buffer overrun could occur. This way, checks are\n // as rare as required, but as often as necessary to ensure never crossing this bound.\n // Furthermore, buffers are only tested at most once per write(), so passing a very\n // large string into write() might have undesirable effects, but this is manageable by\n // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme\n // edge case, result in creating at most one complete copy of the string passed in.\n // Set to Infinity to have unlimited buffers.\n sax.MAX_BUFFER_LENGTH = 64 * 1024\n\n var buffers = [\n 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype',\n 'procInstName', 'procInstBody', 'entity', 'attribName',\n 'attribValue', 'cdata', 'script'\n ]\n\n sax.EVENTS = [\n 'text',\n 'processinginstruction',\n 'sgmldeclaration',\n 'doctype',\n 'comment',\n 'opentagstart',\n 'attribute',\n 'opentag',\n 'closetag',\n 'opencdata',\n 'cdata',\n 'closecdata',\n 'error',\n 'end',\n 'ready',\n 'script',\n 'opennamespace',\n 'closenamespace'\n ]\n\n function SAXParser (strict, opt) {\n if (!(this instanceof SAXParser)) {\n return new SAXParser(strict, opt)\n }\n\n var parser = this\n clearBuffers(parser)\n parser.q = parser.c = ''\n parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH\n parser.opt = opt || {}\n parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags\n parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase'\n parser.tags = []\n parser.closed = parser.closedRoot = parser.sawRoot = false\n parser.tag = parser.error = null\n parser.strict = !!strict\n parser.noscript = !!(strict || parser.opt.noscript)\n parser.state = S.BEGIN\n parser.strictEntities = parser.opt.strictEntities\n parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES)\n parser.attribList = []\n\n // namespaces form a prototype chain.\n // it always points at the current tag,\n // which protos to its parent tag.\n if (parser.opt.xmlns) {\n parser.ns = Object.create(rootNS)\n }\n\n // mostly just for error reporting\n parser.trackPosition = parser.opt.position !== false\n if (parser.trackPosition) {\n parser.position = parser.line = parser.column = 0\n }\n emit(parser, 'onready')\n }\n\n if (!Object.create) {\n Object.create = function (o) {\n function F () {}\n F.prototype = o\n var newf = new F()\n return newf\n }\n }\n\n if (!Object.keys) {\n Object.keys = function (o) {\n var a = []\n for (var i in o) if (o.hasOwnProperty(i)) a.push(i)\n return a\n }\n }\n\n function checkBufferLength (parser) {\n var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10)\n var maxActual = 0\n for (var i = 0, l = buffers.length; i < l; i++) {\n var len = parser[buffers[i]].length\n if (len > maxAllowed) {\n // Text/cdata nodes can get big, and since they're buffered,\n // we can get here under normal conditions.\n // Avoid issues by emitting the text node now,\n // so at least it won't get any bigger.\n switch (buffers[i]) {\n case 'textNode':\n closeText(parser)\n break\n\n case 'cdata':\n emitNode(parser, 'oncdata', parser.cdata)\n parser.cdata = ''\n break\n\n case 'script':\n emitNode(parser, 'onscript', parser.script)\n parser.script = ''\n break\n\n default:\n error(parser, 'Max buffer length exceeded: ' + buffers[i])\n }\n }\n maxActual = Math.max(maxActual, len)\n }\n // schedule the next check for the earliest possible buffer overrun.\n var m = sax.MAX_BUFFER_LENGTH - maxActual\n parser.bufferCheckPosition = m + parser.position\n }\n\n function clearBuffers (parser) {\n for (var i = 0, l = buffers.length; i < l; i++) {\n parser[buffers[i]] = ''\n }\n }\n\n function flushBuffers (parser) {\n closeText(parser)\n if (parser.cdata !== '') {\n emitNode(parser, 'oncdata', parser.cdata)\n parser.cdata = ''\n }\n if (parser.script !== '') {\n emitNode(parser, 'onscript', parser.script)\n parser.script = ''\n }\n }\n\n SAXParser.prototype = {\n end: function () { end(this) },\n write: write,\n resume: function () { this.error = null; return this },\n close: function () { return this.write(null) },\n flush: function () { flushBuffers(this) }\n }\n\n var Stream\n try {\n Stream = require('stream').Stream\n } catch (ex) {\n Stream = function () {}\n }\n\n var streamWraps = sax.EVENTS.filter(function (ev) {\n return ev !== 'error' && ev !== 'end'\n })\n\n function createStream (strict, opt) {\n return new SAXStream(strict, opt)\n }\n\n function SAXStream (strict, opt) {\n if (!(this instanceof SAXStream)) {\n return new SAXStream(strict, opt)\n }\n\n Stream.apply(this)\n\n this._parser = new SAXParser(strict, opt)\n this.writable = true\n this.readable = true\n\n var me = this\n\n this._parser.onend = function () {\n me.emit('end')\n }\n\n this._parser.onerror = function (er) {\n me.emit('error', er)\n\n // if didn't throw, then means error was handled.\n // go ahead and clear error, so we can write again.\n me._parser.error = null\n }\n\n this._decoder = null\n\n streamWraps.forEach(function (ev) {\n Object.defineProperty(me, 'on' + ev, {\n get: function () {\n return me._parser['on' + ev]\n },\n set: function (h) {\n if (!h) {\n me.removeAllListeners(ev)\n me._parser['on' + ev] = h\n return h\n }\n me.on(ev, h)\n },\n enumerable: true,\n configurable: false\n })\n })\n }\n\n SAXStream.prototype = Object.create(Stream.prototype, {\n constructor: {\n value: SAXStream\n }\n })\n\n SAXStream.prototype.write = function (data) {\n if (typeof Buffer === 'function' &&\n typeof Buffer.isBuffer === 'function' &&\n Buffer.isBuffer(data)) {\n if (!this._decoder) {\n var SD = require('string_decoder').StringDecoder\n this._decoder = new SD('utf8')\n }\n data = this._decoder.write(data)\n }\n\n this._parser.write(data.toString())\n this.emit('data', data)\n return true\n }\n\n SAXStream.prototype.end = function (chunk) {\n if (chunk && chunk.length) {\n this.write(chunk)\n }\n this._parser.end()\n return true\n }\n\n SAXStream.prototype.on = function (ev, handler) {\n var me = this\n if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) {\n me._parser['on' + ev] = function () {\n var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments)\n args.splice(0, 0, ev)\n me.emit.apply(me, args)\n }\n }\n\n return Stream.prototype.on.call(me, ev, handler)\n }\n\n // this really needs to be replaced with character classes.\n // XML allows all manner of ridiculous numbers and digits.\n var CDATA = '[CDATA['\n var DOCTYPE = 'DOCTYPE'\n var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'\n var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/'\n var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE }\n\n // http://www.w3.org/TR/REC-xml/#NT-NameStartChar\n // This implementation works on strings, a single character at a time\n // as such, it cannot ever support astral-plane characters (10000-EFFFF)\n // without a significant breaking change to either this parser, or the\n // JavaScript language. Implementation of an emoji-capable xml parser\n // is left as an exercise for the reader.\n var nameStart = /[:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD]/\n\n var nameBody = /[:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD\\u00B7\\u0300-\\u036F\\u203F-\\u2040.\\d-]/\n\n var entityStart = /[#:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD]/\n var entityBody = /[#:_A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD\\u00B7\\u0300-\\u036F\\u203F-\\u2040.\\d-]/\n\n function isWhitespace (c) {\n return c === ' ' || c === '\\n' || c === '\\r' || c === '\\t'\n }\n\n function isQuote (c) {\n return c === '\"' || c === '\\''\n }\n\n function isAttribEnd (c) {\n return c === '>' || isWhitespace(c)\n }\n\n function isMatch (regex, c) {\n return regex.test(c)\n }\n\n function notMatch (regex, c) {\n return !isMatch(regex, c)\n }\n\n var S = 0\n sax.STATE = {\n BEGIN: S++, // leading byte order mark or whitespace\n BEGIN_WHITESPACE: S++, // leading whitespace\n TEXT: S++, // general stuff\n TEXT_ENTITY: S++, // & and such.\n OPEN_WAKA: S++, // <\n SGML_DECL: S++, // \n SCRIPT: S++, //