diff --git a/.github/workflows/check-dist.yml b/.github/workflows/check-dist.yml
index e571511..c994dfa 100644
--- a/.github/workflows/check-dist.yml
+++ b/.github/workflows/check-dist.yml
@@ -21,10 +21,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v6
- name: Set Node.js
- uses: actions/setup-node@v4
+ uses: actions/setup-node@v6
with:
node-version-file: '.nvmrc'
@@ -46,7 +46,7 @@ jobs:
id: diff
# If index.js was different than expected, upload the expected version as an artifact
- - uses: actions/upload-artifact@v4
+ - uses: actions/upload-artifact@v5
if: ${{ failure() && steps.diff.conclusion == 'failure' }}
with:
name: dist
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index d5f8615..aeec96d 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -13,8 +13,8 @@ jobs:
name: Build & Test
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
- - uses: actions/setup-node@v4
+ - uses: actions/checkout@v6
+ - uses: actions/setup-node@v6
with:
node-version-file: '.nvmrc'
- run: npm ci
@@ -25,7 +25,7 @@ jobs:
- name: Upload test results
if: ${{ !cancelled() }}
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v5
with:
name: test-results
path: __tests__/__results__/*.xml
diff --git a/.github/workflows/manual-run.yml b/.github/workflows/manual-run.yml
index 169c810..c1875a9 100644
--- a/.github/workflows/manual-run.yml
+++ b/.github/workflows/manual-run.yml
@@ -8,7 +8,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v6
- run: npm ci
- run: npm run build
- run: npm test
diff --git a/.github/workflows/test-report.yml b/.github/workflows/test-report.yml
index e3f9555..11b266a 100644
--- a/.github/workflows/test-report.yml
+++ b/.github/workflows/test-report.yml
@@ -11,7 +11,7 @@ jobs:
name: Workflow test
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v6
- uses: ./
with:
artifact: test-results
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8489364..b0badd5 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,18 @@
# Changelog
+## 2.3.0
+* Feature: Add Python support with `python-xunit` reporter (pytest) https://github.com/dorny/test-reporter/pull/643
+* Feature: Add pytest traceback parsing and `directory-mapping` option https://github.com/dorny/test-reporter/pull/238
+* Performance: Update sax.js to fix large XML file parsing https://github.com/dorny/test-reporter/pull/681
+* Documentation: Complete documentation for all supported reporters https://github.com/dorny/test-reporter/pull/691
+* Security: Bump js-yaml and mocha in /reports/mocha (fixes prototype pollution) https://github.com/dorny/test-reporter/pull/682
+
+## 2.2.0
+* Feature: Add collapsed option to control report summary visibility https://github.com/dorny/test-reporter/pull/664
+* Fix badge encoding for values including underscore and hyphens https://github.com/dorny/test-reporter/pull/672
+* Fix missing `report-title` attribute in action definition https://github.com/dorny/test-reporter/pull/637
+* Refactor variable names to fix shadowing issues https://github.com/dorny/test-reporter/pull/630
+
## 2.1.1
* Fix error when a TestMethod element does not have a className attribute in a trx file https://github.com/dorny/test-reporter/pull/623
* Add stack trace from trx to summary https://github.com/dorny/test-reporter/pull/615
diff --git a/README.md b/README.md
index c307ff3..d900926 100644
--- a/README.md
+++ b/README.md
@@ -19,6 +19,8 @@ This [Github Action](https://github.com/features/actions) displays test results
- Go / [go test](https://pkg.go.dev/testing)
- Java / [JUnit](https://junit.org/)
- JavaScript / [JEST](https://jestjs.io/) / [Mocha](https://mochajs.org/)
+- Python / [pytest](https://docs.pytest.org/en/stable/) / [unittest](https://docs.python.org/3/library/unittest.html)
+- Ruby / [RSpec](https://rspec.info/)
- Swift / xUnit
For more information see [Supported formats](#supported-formats) section.
@@ -145,7 +147,9 @@ jobs:
# java-junit
# jest-junit
# mocha-json
+ # python-xunit
# rspec-json
+ # swift-xunit
reporter: ''
# Allows you to generate only the summary.
@@ -253,6 +257,20 @@ Supported testing frameworks:
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
+
+ dotnet-nunit
+
+Test execution must be configured to generate [NUnit3](https://docs.nunit.org/articles/nunit/technical-notes/usage/Test-Result-XML-Format.html) XML test results.
+Install the [NUnit3TestAdapter](https://www.nuget.org/packages/NUnit3TestAdapter) package (required; it registers the `nunit` logger for `dotnet test`), then run tests with:
+
+`dotnet test --logger "nunit;LogFileName=test-results.xml"`
+
+Supported testing frameworks:
+- [NUnit](https://nunit.org/)
+
+For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
+
+
flutter-json
@@ -349,6 +367,41 @@ Before version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0), M
Please update Mocha to version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0) or above if you encounter this issue.
+
+ python-xunit (Experimental)
+
+Support for Python test results in xUnit format is experimental - should work but it was not extensively tested.
+
+For **pytest** support, configure [JUnit XML output](https://docs.pytest.org/en/stable/how-to/output.html#creating-junitxml-format-files) and run with the `--junit-xml` option, which also lets you specify the output path for test results.
+
+```shell
+pytest --junit-xml=test-report.xml
+```
+
+For **unittest** support, use a test runner that outputs the JUnit report format, such as [unittest-xml-reporting](https://pypi.org/project/unittest-xml-reporting/).
+
+
+
+ rspec-json
+
+[RSpec](https://rspec.info/) testing framework support requires the usage of JSON formatter.
+You can configure RSpec to output JSON format by using the `--format json` option and redirecting to a file:
+
+```shell
+rspec --format json --out rspec-results.json
+```
+
+Or configure it in `.rspec` file:
+```
+--format json
+--out rspec-results.json
+```
+
+For more information see:
+- [RSpec documentation](https://rspec.info/)
+- [RSpec Formatters](https://relishapp.com/rspec/rspec-core/docs/formatters)
+
+
swift-xunit (Experimental)
diff --git a/__tests__/__outputs__/python-xunit-pytest.md b/__tests__/__outputs__/python-xunit-pytest.md
new file mode 100644
index 0000000..7b13e28
--- /dev/null
+++ b/__tests__/__outputs__/python-xunit-pytest.md
@@ -0,0 +1,26 @@
+
+|Report|Passed|Failed|Skipped|Time|
+|:---|---:|---:|---:|---:|
+|[fixtures/python-xunit-pytest.xml](#user-content-r0)|6 ✅|2 ❌|2 ⚪|19ms|
+## ❌ fixtures/python-xunit-pytest.xml
+**10** tests were completed in **19ms** with **6** passed, **2** failed and **2** skipped.
+|Test suite|Passed|Failed|Skipped|Time|
+|:---|---:|---:|---:|---:|
+|[pytest](#user-content-r0s0)|6 ✅|2 ❌|2 ⚪|19ms|
+### ❌ pytest
+```
+tests.test_lib
+ ✅ test_always_pass
+ ✅ test_with_subtests
+ ✅ test_parameterized[param1]
+ ✅ test_parameterized[param2]
+ ⚪ test_always_skip
+ ❌ test_always_fail
+ assert False
+ ⚪ test_expected_failure
+ ❌ test_error
+ Exception: error
+ ✅ test_with_record_property
+custom_classname
+ ✅ test_with_record_xml_attribute
+```
\ No newline at end of file
diff --git a/__tests__/__outputs__/python-xunit-unittest.md b/__tests__/__outputs__/python-xunit-unittest.md
new file mode 100644
index 0000000..230d186
--- /dev/null
+++ b/__tests__/__outputs__/python-xunit-unittest.md
@@ -0,0 +1,23 @@
+
+|Report|Passed|Failed|Skipped|Time|
+|:---|---:|---:|---:|---:|
+|[fixtures/python-xunit-unittest.xml](#user-content-r0)|4 ✅|2 ❌|2 ⚪|1ms|
+## ❌ fixtures/python-xunit-unittest.xml
+**8** tests were completed in **1ms** with **4** passed, **2** failed and **2** skipped.
+|Test suite|Passed|Failed|Skipped|Time|
+|:---|---:|---:|---:|---:|
+|[TestAcme-20251114214921](#user-content-r0s0)|4 ✅|2 ❌|2 ⚪|1ms|
+### ❌ TestAcme-20251114214921
+```
+TestAcme
+ ✅ test_always_pass
+ ✅ test_parameterized_0_param1
+ ✅ test_parameterized_1_param2
+ ✅ test_with_subtests
+ ❌ test_always_fail
+ AssertionError: failed
+ ❌ test_error
+ Exception: error
+ ⚪ test_always_skip
+ ⚪ test_expected_failure
+```
\ No newline at end of file
diff --git a/__tests__/__snapshots__/dart-json.test.ts.snap b/__tests__/__snapshots__/dart-json.test.ts.snap
index a499822..88a7349 100644
--- a/__tests__/__snapshots__/dart-json.test.ts.snap
+++ b/__tests__/__snapshots__/dart-json.test.ts.snap
@@ -1,4 +1,4 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
+// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`dart-json tests matches report snapshot 1`] = `
TestRunResult {
diff --git a/__tests__/__snapshots__/dotnet-nunit.test.ts.snap b/__tests__/__snapshots__/dotnet-nunit.test.ts.snap
index 60d55f2..529f702 100644
--- a/__tests__/__snapshots__/dotnet-nunit.test.ts.snap
+++ b/__tests__/__snapshots__/dotnet-nunit.test.ts.snap
@@ -1,4 +1,4 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
+// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`dotnet-nunit tests report from ./reports/dotnet test results matches snapshot 1`] = `
TestRunResult {
diff --git a/__tests__/__snapshots__/dotnet-trx.test.ts.snap b/__tests__/__snapshots__/dotnet-trx.test.ts.snap
index f432162..b9d272d 100644
--- a/__tests__/__snapshots__/dotnet-trx.test.ts.snap
+++ b/__tests__/__snapshots__/dotnet-trx.test.ts.snap
@@ -1,4 +1,4 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
+// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`dotnet-trx tests matches dotnet-trx report snapshot 1`] = `
TestRunResult {
diff --git a/__tests__/__snapshots__/golang-json.test.ts.snap b/__tests__/__snapshots__/golang-json.test.ts.snap
index 75c6de1..bd28d4a 100644
--- a/__tests__/__snapshots__/golang-json.test.ts.snap
+++ b/__tests__/__snapshots__/golang-json.test.ts.snap
@@ -1,4 +1,4 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
+// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`golang-json tests report from ./reports/dotnet test results matches snapshot 1`] = `
TestRunResult {
diff --git a/__tests__/__snapshots__/java-junit.test.ts.snap b/__tests__/__snapshots__/java-junit.test.ts.snap
index 341b092..38daca9 100644
--- a/__tests__/__snapshots__/java-junit.test.ts.snap
+++ b/__tests__/__snapshots__/java-junit.test.ts.snap
@@ -1,4 +1,4 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
+// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`java-junit tests report from apache/pulsar single suite test results matches snapshot 1`] = `
TestRunResult {
diff --git a/__tests__/__snapshots__/jest-junit.test.ts.snap b/__tests__/__snapshots__/jest-junit.test.ts.snap
index eb20dfe..eca0092 100644
--- a/__tests__/__snapshots__/jest-junit.test.ts.snap
+++ b/__tests__/__snapshots__/jest-junit.test.ts.snap
@@ -1,4 +1,4 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
+// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`jest-junit tests parsing ESLint report without timing information works - PR #134 1`] = `
TestRunResult {
diff --git a/__tests__/__snapshots__/mocha-json.test.ts.snap b/__tests__/__snapshots__/mocha-json.test.ts.snap
index 7038239..4a1448c 100644
--- a/__tests__/__snapshots__/mocha-json.test.ts.snap
+++ b/__tests__/__snapshots__/mocha-json.test.ts.snap
@@ -1,4 +1,4 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
+// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`mocha-json tests report from ./reports/mocha-json test results matches snapshot 1`] = `
TestRunResult {
diff --git a/__tests__/__snapshots__/python-xunit.test.ts.snap b/__tests__/__snapshots__/python-xunit.test.ts.snap
new file mode 100644
index 0000000..f325c84
--- /dev/null
+++ b/__tests__/__snapshots__/python-xunit.test.ts.snap
@@ -0,0 +1,192 @@
+// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
+
+exports[`python-xunit pytest report report from python test results matches snapshot 1`] = `
+TestRunResult {
+ "path": "fixtures/python-xunit-pytest.xml",
+ "suites": [
+ TestSuiteResult {
+ "groups": [
+ TestGroupResult {
+ "name": "tests.test_lib",
+ "tests": [
+ TestCaseResult {
+ "error": undefined,
+ "name": "test_always_pass",
+ "result": "success",
+ "time": 2,
+ },
+ TestCaseResult {
+ "error": undefined,
+ "name": "test_with_subtests",
+ "result": "success",
+ "time": 5,
+ },
+ TestCaseResult {
+ "error": undefined,
+ "name": "test_parameterized[param1]",
+ "result": "success",
+ "time": 0,
+ },
+ TestCaseResult {
+ "error": undefined,
+ "name": "test_parameterized[param2]",
+ "result": "success",
+ "time": 0,
+ },
+ TestCaseResult {
+ "error": undefined,
+ "name": "test_always_skip",
+ "result": "skipped",
+ "time": 0,
+ },
+ TestCaseResult {
+ "error": {
+ "details": "def test_always_fail():
+ > assert False
+ E assert False
+
+ tests/test_lib.py:25: AssertionError
+ ",
+ "line": undefined,
+ "message": "assert False",
+ "path": undefined,
+ },
+ "name": "test_always_fail",
+ "result": "failed",
+ "time": 0,
+ },
+ TestCaseResult {
+ "error": undefined,
+ "name": "test_expected_failure",
+ "result": "skipped",
+ "time": 0,
+ },
+ TestCaseResult {
+ "error": {
+ "details": "def test_error():
+ > raise Exception("error")
+ E Exception: error
+
+ tests/test_lib.py:32: Exception
+ ",
+ "line": undefined,
+ "message": "Exception: error",
+ "path": undefined,
+ },
+ "name": "test_error",
+ "result": "failed",
+ "time": 0,
+ },
+ TestCaseResult {
+ "error": undefined,
+ "name": "test_with_record_property",
+ "result": "success",
+ "time": 0,
+ },
+ ],
+ },
+ TestGroupResult {
+ "name": "custom_classname",
+ "tests": [
+ TestCaseResult {
+ "error": undefined,
+ "name": "test_with_record_xml_attribute",
+ "result": "success",
+ "time": 0,
+ },
+ ],
+ },
+ ],
+ "name": "pytest",
+ "totalTime": 19,
+ },
+ ],
+ "totalTime": undefined,
+}
+`;
+
+exports[`python-xunit unittest report report from python test results matches snapshot 1`] = `
+TestRunResult {
+ "path": "fixtures/python-xunit-unittest.xml",
+ "suites": [
+ TestSuiteResult {
+ "groups": [
+ TestGroupResult {
+ "name": "TestAcme",
+ "tests": [
+ TestCaseResult {
+ "error": undefined,
+ "name": "test_always_pass",
+ "result": "success",
+ "time": 0,
+ },
+ TestCaseResult {
+ "error": undefined,
+ "name": "test_parameterized_0_param1",
+ "result": "success",
+ "time": 1,
+ },
+ TestCaseResult {
+ "error": undefined,
+ "name": "test_parameterized_1_param2",
+ "result": "success",
+ "time": 0,
+ },
+ TestCaseResult {
+ "error": undefined,
+ "name": "test_with_subtests",
+ "result": "success",
+ "time": 0,
+ },
+ TestCaseResult {
+ "error": {
+ "details": "Traceback (most recent call last):
+ File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail
+ self.fail("failed")
+AssertionError: failed
+",
+ "line": undefined,
+ "message": "AssertionError: failed",
+ "path": undefined,
+ },
+ "name": "test_always_fail",
+ "result": "failed",
+ "time": 0,
+ },
+ TestCaseResult {
+ "error": {
+ "details": "Traceback (most recent call last):
+ File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error
+ raise Exception("error")
+Exception: error
+",
+ "line": undefined,
+ "message": "Exception: error",
+ "path": undefined,
+ },
+ "name": "test_error",
+ "result": "failed",
+ "time": 0,
+ },
+ TestCaseResult {
+ "error": undefined,
+ "name": "test_always_skip",
+ "result": "skipped",
+ "time": 0,
+ },
+ TestCaseResult {
+ "error": undefined,
+ "name": "test_expected_failure",
+ "result": "skipped",
+ "time": 0,
+ },
+ ],
+ },
+ ],
+ "name": "TestAcme-20251114214921",
+ "totalTime": 1,
+ },
+ ],
+ "totalTime": 1,
+}
+`;
diff --git a/__tests__/__snapshots__/rspec-json.test.ts.snap b/__tests__/__snapshots__/rspec-json.test.ts.snap
index cc14bfb..51c1943 100644
--- a/__tests__/__snapshots__/rspec-json.test.ts.snap
+++ b/__tests__/__snapshots__/rspec-json.test.ts.snap
@@ -1,4 +1,4 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
+// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`rspec-json tests report from ./reports/rspec-json test results matches snapshot 1`] = `
TestRunResult {
diff --git a/__tests__/__snapshots__/swift-xunit.test.ts.snap b/__tests__/__snapshots__/swift-xunit.test.ts.snap
index ae34deb..bddc6ea 100644
--- a/__tests__/__snapshots__/swift-xunit.test.ts.snap
+++ b/__tests__/__snapshots__/swift-xunit.test.ts.snap
@@ -1,4 +1,4 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
+// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`swift-xunit tests report from swift test results matches snapshot 1`] = `
TestRunResult {
diff --git a/__tests__/fixtures/python-xunit-pytest.xml b/__tests__/fixtures/python-xunit-pytest.xml
new file mode 100644
index 0000000..fcb044a
--- /dev/null
+++ b/__tests__/fixtures/python-xunit-pytest.xml
@@ -0,0 +1,42 @@
+
+
+
+
+
+
+
+
+
+
+
+ /Users/mike/Projects/python-test/tests/test_lib.py:20: skipped
+
+
+
+ def test_always_fail():
+ > assert False
+ E assert False
+
+ tests/test_lib.py:25: AssertionError
+
+
+
+
+
+
+ def test_error():
+ > raise Exception("error")
+ E Exception: error
+
+ tests/test_lib.py:32: Exception
+
+
+
+
+
+
+
+
+
+
diff --git a/__tests__/fixtures/python-xunit-unittest.xml b/__tests__/fixtures/python-xunit-unittest.xml
new file mode 100644
index 0000000..ecc67d4
--- /dev/null
+++ b/__tests__/fixtures/python-xunit-unittest.xml
@@ -0,0 +1,27 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/__tests__/jest-junit.test.ts b/__tests__/jest-junit.test.ts
index f4b8335..912ebde 100644
--- a/__tests__/jest-junit.test.ts
+++ b/__tests__/jest-junit.test.ts
@@ -207,4 +207,143 @@ describe('jest-junit tests', () => {
// Report should have the title as the first line
expect(report).toMatch(/^# My Custom Title\n/)
})
+
+ it('report can be collapsed when configured', async () => {
+ const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
+ const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
+ const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
+
+ const opts: ParseOptions = {
+ parseErrors: true,
+ trackedFiles: []
+ }
+
+ const parser = new JestJunitParser(opts)
+ const result = await parser.parse(filePath, fileContent)
+ const report = getReport([result], {
+ ...DEFAULT_OPTIONS,
+ collapsed: 'always'
+ })
+ // Report should include collapsible details
+ expect(report).toContain('Expand for details
')
+ expect(report).toContain(' ')
+ })
+
+ it('report is not collapsed when configured to never', async () => {
+ const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
+ const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
+ const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
+
+ const opts: ParseOptions = {
+ parseErrors: true,
+ trackedFiles: []
+ }
+
+ const parser = new JestJunitParser(opts)
+ const result = await parser.parse(filePath, fileContent)
+ const report = getReport([result], {
+ ...DEFAULT_OPTIONS,
+ collapsed: 'never'
+ })
+ // Report should not include collapsible details
+ expect(report).not.toContain('Expand for details
')
+ expect(report).not.toContain(' ')
+ })
+
+ it('report auto-collapses when all tests pass', async () => {
+ // Test with a fixture that has all passing tests (no failures)
+ const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit-eslint.xml')
+ const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
+ const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
+
+ const opts: ParseOptions = {
+ parseErrors: true,
+ trackedFiles: []
+ }
+
+ const parser = new JestJunitParser(opts)
+ const result = await parser.parse(filePath, fileContent)
+
+ // Verify this fixture has no failures
+ expect(result.failed).toBe(0)
+
+ const report = getReport([result], {
+ ...DEFAULT_OPTIONS,
+ collapsed: 'auto'
+ })
+
+ // Should collapse when all tests pass
+ expect(report).toContain('Expand for details
')
+ expect(report).toContain(' ')
+ })
+
+ it('report does not auto-collapse when tests fail', async () => {
+ // Test with a fixture that has failing tests
+ const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
+ const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
+ const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
+
+ const opts: ParseOptions = {
+ parseErrors: true,
+ trackedFiles: []
+ }
+
+ const parser = new JestJunitParser(opts)
+ const result = await parser.parse(filePath, fileContent)
+
+ // Verify this fixture has failures
+ expect(result.failed).toBeGreaterThan(0)
+
+ const report = getReport([result], {
+ ...DEFAULT_OPTIONS,
+ collapsed: 'auto'
+ })
+
+ // Should not collapse when there are failures
+ expect(report).not.toContain('Expand for details
')
+ expect(report).not.toContain(' ')
+ })
+
+ it('report includes the short summary', async () => {
+ const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
+ const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
+ const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
+
+ const opts: ParseOptions = {
+ parseErrors: true,
+ trackedFiles: []
+ }
+
+ const parser = new JestJunitParser(opts)
+ const result = await parser.parse(filePath, fileContent)
+ const shortSummary = '1 passed, 4 failed and 1 skipped'
+ const report = getReport([result], DEFAULT_OPTIONS, shortSummary)
+ // Report should have the title as the first line
+ expect(report).toMatch(/^## 1 passed, 4 failed and 1 skipped\n/)
+ })
+
+ it('report includes a custom report title and short summary', async () => {
+ const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
+ const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
+ const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
+
+ const opts: ParseOptions = {
+ parseErrors: true,
+ trackedFiles: []
+ }
+
+ const parser = new JestJunitParser(opts)
+ const result = await parser.parse(filePath, fileContent)
+ const shortSummary = '1 passed, 4 failed and 1 skipped'
+ const report = getReport(
+ [result],
+ {
+ ...DEFAULT_OPTIONS,
+ reportTitle: 'My Custom Title'
+ },
+ shortSummary
+ )
+ // Report should have the title as the first line
+ expect(report).toMatch(/^# My Custom Title\n## 1 passed, 4 failed and 1 skipped\n/)
+ })
})
diff --git a/__tests__/python-xunit.test.ts b/__tests__/python-xunit.test.ts
new file mode 100644
index 0000000..c1550a4
--- /dev/null
+++ b/__tests__/python-xunit.test.ts
@@ -0,0 +1,93 @@
+import * as fs from 'fs'
+import * as path from 'path'
+
+import {PythonXunitParser} from '../src/parsers/python-xunit/python-xunit-parser'
+import {ParseOptions} from '../src/test-parser'
+import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report'
+import {normalizeFilePath} from '../src/utils/path-utils'
+
+const defaultOpts: ParseOptions = {
+ parseErrors: true,
+ trackedFiles: []
+}
+
+describe('python-xunit unittest report', () => {
+ const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-unittest.xml')
+ const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
+ const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
+ const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-unittest.md')
+
+ it('report from python test results matches snapshot', async () => {
+ const trackedFiles = ['tests/test_lib.py']
+ const opts: ParseOptions = {
+ ...defaultOpts,
+ trackedFiles
+ }
+
+ const parser = new PythonXunitParser(opts)
+ const result = await parser.parse(filePath, fileContent)
+ expect(result).toMatchSnapshot()
+
+ const report = getReport([result])
+ fs.mkdirSync(path.dirname(outputPath), {recursive: true})
+ fs.writeFileSync(outputPath, report)
+ })
+
+ it('report does not include a title by default', async () => {
+ const parser = new PythonXunitParser(defaultOpts)
+ const result = await parser.parse(filePath, fileContent)
+ const report = getReport([result])
+ // Report should have the badge as the first line
+ expect(report).toMatch(/^!\[Tests failed]/)
+ })
+
+ it.each([
+ ['empty string', ''],
+ ['space', ' '],
+ ['tab', '\t'],
+ ['newline', '\n']
+ ])('report does not include a title when configured value is %s', async (_, reportTitle) => {
+ const parser = new PythonXunitParser(defaultOpts)
+ const result = await parser.parse(filePath, fileContent)
+ const report = getReport([result], {
+ ...DEFAULT_OPTIONS,
+ reportTitle
+ })
+ // Report should have the badge as the first line
+ expect(report).toMatch(/^!\[Tests failed]/)
+ })
+
+ it('report includes a custom report title', async () => {
+ const parser = new PythonXunitParser(defaultOpts)
+ const result = await parser.parse(filePath, fileContent)
+ const report = getReport([result], {
+ ...DEFAULT_OPTIONS,
+ reportTitle: 'My Custom Title'
+ })
+ // Report should have the title as the first line
+ expect(report).toMatch(/^# My Custom Title\n/)
+ })
+})
+
+describe('python-xunit pytest report', () => {
+ const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-pytest.xml')
+ const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
+ const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
+ const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-pytest.md')
+
+ it('report from python test results matches snapshot', async () => {
+ const trackedFiles = ['tests/test_lib.py']
+ const opts: ParseOptions = {
+ ...defaultOpts,
+ trackedFiles
+ }
+
+ const parser = new PythonXunitParser(opts)
+ const result = await parser.parse(filePath, fileContent)
+ expect(result).toMatchSnapshot()
+
+ const report = getReport([result])
+ fs.mkdirSync(path.dirname(outputPath), {recursive: true})
+ fs.writeFileSync(outputPath, report)
+ })
+})
diff --git a/__tests__/report/get-report.test.ts b/__tests__/report/get-report.test.ts
new file mode 100644
index 0000000..670b0ad
--- /dev/null
+++ b/__tests__/report/get-report.test.ts
@@ -0,0 +1,120 @@
+import {getBadge, DEFAULT_OPTIONS, ReportOptions} from '../../src/report/get-report'
+
+describe('getBadge', () => {
+ describe('URI encoding with special characters', () => {
+ it('generates correct URI with simple badge title', () => {
+ const options: ReportOptions = {
+ ...DEFAULT_OPTIONS,
+ badgeTitle: 'tests'
+ }
+ const badge = getBadge(5, 0, 1, options)
+ expect(badge).toBe('')
+ })
+
+ it('handles badge title with single hyphen', () => {
+ const options: ReportOptions = {
+ ...DEFAULT_OPTIONS,
+ badgeTitle: 'unit-tests'
+ }
+ const badge = getBadge(3, 0, 0, options)
+ // The hyphen in the badge title should be encoded as --
+ expect(badge).toBe('')
+ })
+
+ it('handles badge title with multiple hyphens', () => {
+ const options: ReportOptions = {
+ ...DEFAULT_OPTIONS,
+ badgeTitle: 'integration-api-tests'
+ }
+ const badge = getBadge(10, 0, 0, options)
+ // All hyphens in the title should be encoded as --
+ expect(badge).toBe('')
+ })
+
+ it('handles badge title with multiple underscores', () => {
+ const options: ReportOptions = {
+ ...DEFAULT_OPTIONS,
+ badgeTitle: 'my_integration_test'
+ }
+ const badge = getBadge(10, 0, 0, options)
+ // All underscores in the title should be encoded as __
+ expect(badge).toBe('')
+ })
+
+ it('handles badge title with version format containing hyphen', () => {
+ const options: ReportOptions = {
+ ...DEFAULT_OPTIONS,
+ badgeTitle: 'MariaDb 12.0-ubi database tests'
+ }
+ const badge = getBadge(1, 0, 0, options)
+ // The hyphen in "12.0-ubi" should be encoded as --
+ expect(badge).toBe('')
+ })
+
+ it('handles badge title with dots and hyphens', () => {
+ const options: ReportOptions = {
+ ...DEFAULT_OPTIONS,
+ badgeTitle: 'v1.2.3-beta-test'
+ }
+ const badge = getBadge(4, 1, 0, options)
+ expect(badge).toBe('')
+ })
+
+ it('preserves structural hyphens between label and message', () => {
+ const options: ReportOptions = {
+ ...DEFAULT_OPTIONS,
+ badgeTitle: 'test-suite'
+ }
+ const badge = getBadge(2, 3, 1, options)
+ // The URI should have literal hyphens separating title-message-color
+ expect(badge).toBe('')
+ })
+ })
+
+ describe('generates test outcome as color name for imgshields', () => {
+ it('uses success color when all tests pass', () => {
+ const options: ReportOptions = {...DEFAULT_OPTIONS}
+ const badge = getBadge(5, 0, 0, options)
+ expect(badge).toContain('-success)')
+ })
+
+ it('uses critical color when tests fail', () => {
+ const options: ReportOptions = {...DEFAULT_OPTIONS}
+ const badge = getBadge(5, 2, 0, options)
+ expect(badge).toContain('-critical)')
+ })
+
+ it('uses yellow color when no tests found', () => {
+ const options: ReportOptions = {...DEFAULT_OPTIONS}
+ const badge = getBadge(0, 0, 0, options)
+ expect(badge).toContain('-yellow)')
+ })
+ })
+
+ describe('badge message composition', () => {
+ it('includes only passed count when no failures or skips', () => {
+ const options: ReportOptions = {...DEFAULT_OPTIONS}
+ const badge = getBadge(5, 0, 0, options)
+ expect(badge).toBe('')
+ })
+
+ it('includes passed and failed counts', () => {
+ const options: ReportOptions = {...DEFAULT_OPTIONS}
+ const badge = getBadge(5, 2, 0, options)
+ expect(badge).toBe('')
+ })
+
+ it('includes passed, failed and skipped counts', () => {
+ const options: ReportOptions = {...DEFAULT_OPTIONS}
+ const badge = getBadge(5, 2, 1, options)
+ expect(badge).toBe('')
+ })
+
+ it('uses "none" message when no tests', () => {
+ const options: ReportOptions = {...DEFAULT_OPTIONS}
+ const badge = getBadge(0, 0, 0, options)
+ expect(badge).toBe('')
+ })
+ })
+})
+
diff --git a/__tests__/utils/parse-utils.test.ts b/__tests__/utils/parse-utils.test.ts
index 83689ef..0f02867 100644
--- a/__tests__/utils/parse-utils.test.ts
+++ b/__tests__/utils/parse-utils.test.ts
@@ -32,6 +32,6 @@ describe('parseNetDuration', () => {
})
it('throws when string has invalid format', () => {
- expect(() => parseNetDuration('12:34:56 not a duration')).toThrowError(/^Invalid format/)
+ expect(() => parseNetDuration('12:34:56 not a duration')).toThrow(/^Invalid format/)
})
})
diff --git a/action.yml b/action.yml
index ec4772f..530435c 100644
--- a/action.yml
+++ b/action.yml
@@ -1,6 +1,5 @@
name: Test Reporter
-description: |
- Shows test results in GitHub UI: .NET (xUnit, NUnit, MSTest), Dart, Flutter, Java (JUnit), JavaScript (JEST, Mocha)
+description: Displays test results from popular testing frameworks directly in GitHub
author: Michal Dorner
inputs:
artifact:
@@ -29,9 +28,11 @@ inputs:
- dotnet-nunit
- dotnet-trx
- flutter-json
+ - golang-json
- java-junit
- jest-junit
- mocha-json
+ - python-xunit
- rspec-json
- swift-xunit
required: true
@@ -68,6 +69,10 @@ inputs:
working-directory:
description: Relative path under $GITHUB_WORKSPACE where the repository was checked out
required: false
+ report-title:
+ description: Title for the test report summary
+ required: false
+ default: ''
only-summary:
description: |
Allows you to generate only the summary.
@@ -85,6 +90,14 @@ inputs:
description: Customize badge title
required: false
default: 'tests'
+ collapsed:
+ description: |
+ Controls whether test report details are collapsed or expanded. Supported options:
+ - auto: Collapse only if all tests pass (default behavior)
+ - always: Always collapse the report details
+ - never: Always expand the report details
+ required: false
+ default: 'auto'
token:
description: GitHub Access Token
required: false
diff --git a/dist/index.js b/dist/index.js
index a5ea165..42cb52e 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -277,6 +277,7 @@ const golang_json_parser_1 = __nccwpck_require__(5162);
const java_junit_parser_1 = __nccwpck_require__(8342);
const jest_junit_parser_1 = __nccwpck_require__(1042);
const mocha_json_parser_1 = __nccwpck_require__(5402);
+const python_xunit_parser_1 = __nccwpck_require__(6578);
const rspec_json_parser_1 = __nccwpck_require__(9768);
const swift_xunit_parser_1 = __nccwpck_require__(7330);
const path_utils_1 = __nccwpck_require__(9132);
@@ -309,6 +310,7 @@ class TestReporter {
useActionsSummary = core.getInput('use-actions-summary', { required: false }) === 'true';
badgeTitle = core.getInput('badge-title', { required: false });
reportTitle = core.getInput('report-title', { required: false });
+ collapsed = core.getInput('collapsed', { required: false });
token = core.getInput('token', { required: true });
octokit;
context = (0, github_utils_1.getCheckRunContext)();
@@ -322,6 +324,10 @@ class TestReporter {
core.setFailed(`Input parameter 'list-tests' has invalid value`);
return;
}
+ if (this.collapsed !== 'auto' && this.collapsed !== 'always' && this.collapsed !== 'never') {
+ core.setFailed(`Input parameter 'collapsed' has invalid value`);
+ return;
+ }
if (isNaN(this.maxAnnotations) || this.maxAnnotations < 0 || this.maxAnnotations > 50) {
core.setFailed(`Input parameter 'max-annotations' has invalid value`);
return;
@@ -401,7 +407,7 @@ class TestReporter {
throw error;
}
}
- const { listSuites, listTests, onlySummary, useActionsSummary, badgeTitle, reportTitle } = this;
+ const { listSuites, listTests, onlySummary, useActionsSummary, badgeTitle, reportTitle, collapsed } = this;
const passed = results.reduce((sum, tr) => sum + tr.passed, 0);
const failed = results.reduce((sum, tr) => sum + tr.failed, 0);
const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0);
@@ -415,11 +421,11 @@ class TestReporter {
onlySummary,
useActionsSummary,
badgeTitle,
- reportTitle
- });
+ reportTitle,
+ collapsed
+ }, shortSummary);
core.info('Summary content:');
core.info(summary);
- core.summary.addRaw(`# ${shortSummary}`);
await core.summary.addRaw(summary).write();
}
else {
@@ -443,7 +449,8 @@ class TestReporter {
onlySummary,
useActionsSummary,
badgeTitle,
- reportTitle
+ reportTitle,
+ collapsed
});
core.info('Creating annotations');
const annotations = (0, get_annotations_1.getAnnotations)(results, this.maxAnnotations);
@@ -487,6 +494,8 @@ class TestReporter {
return new jest_junit_parser_1.JestJunitParser(options);
case 'mocha-json':
return new mocha_json_parser_1.MochaJsonParser(options);
+ case 'python-xunit':
+ return new python_xunit_parser_1.PythonXunitParser(options);
case 'rspec-json':
return new rspec_json_parser_1.RspecJsonParser(options);
case 'swift-xunit':
@@ -837,12 +846,12 @@ class DotnetNunitParser {
.map(suite => suite.$.name)
.join('.');
const groupName = suitesWithoutTheories[suitesWithoutTheories.length - 1].$.name;
- let existingSuite = result.find(existingSuite => existingSuite.name === suiteName);
+ let existingSuite = result.find(suite => suite.name === suiteName);
if (existingSuite === undefined) {
existingSuite = new test_results_1.TestSuiteResult(suiteName, []);
result.push(existingSuite);
}
- let existingGroup = existingSuite.groups.find(existingGroup => existingGroup.name === groupName);
+ let existingGroup = existingSuite.groups.find(group => group.name === groupName);
if (existingGroup === undefined) {
existingGroup = new test_results_1.TestGroupResult(groupName, []);
existingSuite.groups.push(existingGroup);
@@ -1657,6 +1666,26 @@ class MochaJsonParser {
exports.MochaJsonParser = MochaJsonParser;
+/***/ }),
+
+/***/ 6578:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.PythonXunitParser = void 0;
+const java_junit_parser_1 = __nccwpck_require__(8342);
+class PythonXunitParser extends java_junit_parser_1.JavaJunitParser {
+ options;
+ constructor(options) {
+ super(options);
+ this.options = options;
+ }
+}
+exports.PythonXunitParser = PythonXunitParser;
+
+
/***/ }),
/***/ 9768:
@@ -1909,6 +1938,7 @@ var __importStar = (this && this.__importStar) || (function () {
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.DEFAULT_OPTIONS = void 0;
exports.getReport = getReport;
+exports.getBadge = getBadge;
const core = __importStar(__nccwpck_require__(7484));
const markdown_utils_1 = __nccwpck_require__(5129);
const node_utils_1 = __nccwpck_require__(5384);
@@ -1923,13 +1953,13 @@ exports.DEFAULT_OPTIONS = {
onlySummary: false,
useActionsSummary: true,
badgeTitle: 'tests',
- reportTitle: ''
+ reportTitle: '',
+ collapsed: 'auto'
};
-function getReport(results, options = exports.DEFAULT_OPTIONS) {
- core.info('Generating check run summary');
+function getReport(results, options = exports.DEFAULT_OPTIONS, shortSummary = '') {
applySort(results);
const opts = { ...options };
- let lines = renderReport(results, opts);
+ let lines = renderReport(results, opts, shortSummary);
let report = lines.join('\n');
if (getByteLength(report) <= getMaxReportLength(options)) {
return report;
@@ -1937,7 +1967,7 @@ function getReport(results, options = exports.DEFAULT_OPTIONS) {
if (opts.listTests === 'all') {
core.info("Test report summary is too big - setting 'listTests' to 'failed'");
opts.listTests = 'failed';
- lines = renderReport(results, opts);
+ lines = renderReport(results, opts, shortSummary);
report = lines.join('\n');
if (getByteLength(report) <= getMaxReportLength(options)) {
return report;
@@ -1984,12 +2014,15 @@ function applySort(results) {
function getByteLength(text) {
return Buffer.byteLength(text, 'utf8');
}
-function renderReport(results, options) {
+function renderReport(results, options, shortSummary) {
const sections = [];
const reportTitle = options.reportTitle.trim();
if (reportTitle) {
sections.push(`# ${reportTitle}`);
}
+ if (shortSummary) {
+ sections.push(`## ${shortSummary}`);
+ }
const badge = getReportBadge(results, options);
sections.push(badge);
const runs = getTestRunsReport(results, options);
@@ -2022,13 +2055,17 @@ function getBadge(passed, failed, skipped, options) {
color = 'yellow';
}
const hint = failed > 0 ? 'Tests failed' : 'Tests passed successfully';
- const uri = encodeURIComponent(`${options.badgeTitle}-${message}-${color}`);
- return ``;
+ const encodedBadgeTitle = encodeImgShieldsURIComponent(options.badgeTitle);
+ const encodedMessage = encodeImgShieldsURIComponent(message);
+ const encodedColor = encodeImgShieldsURIComponent(color);
+ return ``;
}
function getTestRunsReport(testRuns, options) {
const sections = [];
const totalFailed = testRuns.reduce((sum, tr) => sum + tr.failed, 0);
- if (totalFailed === 0) {
+ // Determine if report should be collapsed based on collapsed option
+ const shouldCollapse = options.collapsed === 'always' || (options.collapsed === 'auto' && totalFailed === 0);
+ if (shouldCollapse) {
sections.push(`Expand for details
`);
sections.push(` `);
}
@@ -2053,7 +2090,7 @@ function getTestRunsReport(testRuns, options) {
const suitesReports = testRuns.map((tr, i) => getSuitesReport(tr, i, options)).flat();
sections.push(...suitesReports);
}
- if (totalFailed === 0) {
+ if (shouldCollapse) {
sections.push(` `);
}
return sections;
@@ -2153,6 +2190,9 @@ function getResultIcon(result) {
return '';
}
}
+function encodeImgShieldsURIComponent(component) {
+ return encodeURIComponent(component).replace(/-/g, '--').replace(/_/g, '__');
+}
/***/ }),
@@ -27847,6 +27887,7 @@ module.exports = {
// Replace globs with equivalent patterns to reduce parsing time.
REPLACEMENTS: {
+ __proto__: null,
'***': '*',
'**/**': '**',
'**/**/**': '**'
@@ -30281,8 +30322,11 @@ function runParallel (tasks, cb) {
/***/ 2560:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
-;(function (sax) { // wrapper for non-node envs
- sax.parser = function (strict, opt) { return new SAXParser(strict, opt) }
+;(function (sax) {
+ // wrapper for non-node envs
+ sax.parser = function (strict, opt) {
+ return new SAXParser(strict, opt)
+ }
sax.SAXParser = SAXParser
sax.SAXStream = SAXStream
sax.createStream = createStream
@@ -30299,9 +30343,18 @@ function runParallel (tasks, cb) {
sax.MAX_BUFFER_LENGTH = 64 * 1024
var buffers = [
- 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype',
- 'procInstName', 'procInstBody', 'entity', 'attribName',
- 'attribValue', 'cdata', 'script'
+ 'comment',
+ 'sgmlDecl',
+ 'textNode',
+ 'tagName',
+ 'doctype',
+ 'procInstName',
+ 'procInstBody',
+ 'entity',
+ 'attribName',
+ 'attribValue',
+ 'cdata',
+ 'script',
]
sax.EVENTS = [
@@ -30322,10 +30375,10 @@ function runParallel (tasks, cb) {
'ready',
'script',
'opennamespace',
- 'closenamespace'
+ 'closenamespace',
]
- function SAXParser (strict, opt) {
+ function SAXParser(strict, opt) {
if (!(this instanceof SAXParser)) {
return new SAXParser(strict, opt)
}
@@ -30344,7 +30397,10 @@ function runParallel (tasks, cb) {
parser.noscript = !!(strict || parser.opt.noscript)
parser.state = S.BEGIN
parser.strictEntities = parser.opt.strictEntities
- parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES)
+ parser.ENTITIES =
+ parser.strictEntities ?
+ Object.create(sax.XML_ENTITIES)
+ : Object.create(sax.ENTITIES)
parser.attribList = []
// namespaces form a prototype chain.
@@ -30354,6 +30410,12 @@ function runParallel (tasks, cb) {
parser.ns = Object.create(rootNS)
}
+ // disallow unquoted attribute values if not otherwise configured
+ // and strict mode is true
+ if (parser.opt.unquotedAttributeValues === undefined) {
+ parser.opt.unquotedAttributeValues = !strict
+ }
+
// mostly just for error reporting
parser.trackPosition = parser.opt.position !== false
if (parser.trackPosition) {
@@ -30364,7 +30426,7 @@ function runParallel (tasks, cb) {
if (!Object.create) {
Object.create = function (o) {
- function F () {}
+ function F() {}
F.prototype = o
var newf = new F()
return newf
@@ -30379,7 +30441,7 @@ function runParallel (tasks, cb) {
}
}
- function checkBufferLength (parser) {
+ function checkBufferLength(parser) {
var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10)
var maxActual = 0
for (var i = 0, l = buffers.length; i < l; i++) {
@@ -30415,13 +30477,13 @@ function runParallel (tasks, cb) {
parser.bufferCheckPosition = m + parser.position
}
- function clearBuffers (parser) {
+ function clearBuffers(parser) {
for (var i = 0, l = buffers.length; i < l; i++) {
parser[buffers[i]] = ''
}
}
- function flushBuffers (parser) {
+ function flushBuffers(parser) {
closeText(parser)
if (parser.cdata !== '') {
emitNode(parser, 'oncdata', parser.cdata)
@@ -30434,11 +30496,20 @@ function runParallel (tasks, cb) {
}
SAXParser.prototype = {
- end: function () { end(this) },
+ end: function () {
+ end(this)
+ },
write: write,
- resume: function () { this.error = null; return this },
- close: function () { return this.write(null) },
- flush: function () { flushBuffers(this) }
+ resume: function () {
+ this.error = null
+ return this
+ },
+ close: function () {
+ return this.write(null)
+ },
+ flush: function () {
+ flushBuffers(this)
+ },
}
var Stream
@@ -30447,16 +30518,17 @@ function runParallel (tasks, cb) {
} catch (ex) {
Stream = function () {}
}
+ if (!Stream) Stream = function () {}
var streamWraps = sax.EVENTS.filter(function (ev) {
return ev !== 'error' && ev !== 'end'
})
- function createStream (strict, opt) {
+ function createStream(strict, opt) {
return new SAXStream(strict, opt)
}
- function SAXStream (strict, opt) {
+ function SAXStream(strict, opt) {
if (!(this instanceof SAXStream)) {
return new SAXStream(strict, opt)
}
@@ -30497,21 +30569,23 @@ function runParallel (tasks, cb) {
me.on(ev, h)
},
enumerable: true,
- configurable: false
+ configurable: false,
})
})
}
SAXStream.prototype = Object.create(Stream.prototype, {
constructor: {
- value: SAXStream
- }
+ value: SAXStream,
+ },
})
SAXStream.prototype.write = function (data) {
- if (typeof Buffer === 'function' &&
+ if (
+ typeof Buffer === 'function' &&
typeof Buffer.isBuffer === 'function' &&
- Buffer.isBuffer(data)) {
+ Buffer.isBuffer(data)
+ ) {
if (!this._decoder) {
var SD = (__nccwpck_require__(3193).StringDecoder)
this._decoder = new SD('utf8')
@@ -30536,7 +30610,10 @@ function runParallel (tasks, cb) {
var me = this
if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) {
me._parser['on' + ev] = function () {
- var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments)
+ var args =
+ arguments.length === 1 ?
+ [arguments[0]]
+ : Array.apply(null, arguments)
args.splice(0, 0, ev)
me.emit.apply(me, args)
}
@@ -30559,30 +30636,34 @@ function runParallel (tasks, cb) {
// without a significant breaking change to either this parser, or the
// JavaScript language. Implementation of an emoji-capable xml parser
// is left as an exercise for the reader.
- var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/
+ var nameStart =
+ /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/
- var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/
+ var nameBody =
+ /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/
- var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/
- var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/
+ var entityStart =
+ /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/
+ var entityBody =
+ /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/
- function isWhitespace (c) {
+ function isWhitespace(c) {
return c === ' ' || c === '\n' || c === '\r' || c === '\t'
}
- function isQuote (c) {
- return c === '"' || c === '\''
+ function isQuote(c) {
+ return c === '"' || c === "'"
}
- function isAttribEnd (c) {
+ function isAttribEnd(c) {
return c === '>' || isWhitespace(c)
}
- function isMatch (regex, c) {
+ function isMatch(regex, c) {
return regex.test(c)
}
- function notMatch (regex, c) {
+ function notMatch(regex, c) {
return !isMatch(regex, c)
}
@@ -30623,271 +30704,271 @@ function runParallel (tasks, cb) {
CLOSE_TAG: S++, //
SCRIPT: S++, //