Compare commits

...

26 commits
v2.2.0 ... main

Author SHA1 Message Date
Jozef Izso
ee446707ff
Merge pull request #692 from dorny/release/v2.3.0
Some checks failed
Check dist/ / check-dist (push) Has been cancelled
CI / Build & Test (push) Has been cancelled
2025-11-30 01:52:48 +01:00
Jozef Izso
fe45e95373
test-reporter release v2.3.0 2025-11-30 01:49:30 +01:00
Jozef Izso
e40a1da745
Merge pull request #682 from dorny/dependabot/npm_and_yarn/reports/mocha/multi-f14266366f 2025-11-30 01:01:42 +01:00
dependabot[bot]
3445860437
Bump js-yaml and mocha in /reports/mocha
Bumps [js-yaml](https://github.com/nodeca/js-yaml) to 4.1.1 and updates ancestor dependency [mocha](https://github.com/mochajs/mocha). These dependencies need to be updated together.


Updates `js-yaml` from 4.0.0 to 4.1.1
- [Changelog](https://github.com/nodeca/js-yaml/blob/master/CHANGELOG.md)
- [Commits](https://github.com/nodeca/js-yaml/compare/4.0.0...4.1.1)

Updates `mocha` from 8.3.0 to 11.7.5
- [Release notes](https://github.com/mochajs/mocha/releases)
- [Changelog](https://github.com/mochajs/mocha/blob/v11.7.5/CHANGELOG.md)
- [Commits](https://github.com/mochajs/mocha/compare/v8.3.0...v11.7.5)

---
updated-dependencies:
- dependency-name: js-yaml
  dependency-version: 4.1.1
  dependency-type: indirect
- dependency-name: mocha
  dependency-version: 11.7.5
  dependency-type: direct:development
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-11-29 23:45:48 +00:00
Jozef Izso
9ef5c136b2
Merge pull request #691 from dorny/fix/complete-documentation 2025-11-30 00:40:18 +01:00
Jozef Izso
83e20c1534
Merge pull request #685 from dorny/dependabot/npm_and_yarn/reports/jest/js-yaml-3.14.2 2025-11-30 00:37:29 +01:00
Jozef Izso
4331a3b620
Clarify the dotnet-nunit docs to require NUnit3TestAdapter for nunit logger 2025-11-23 15:26:03 +01:00
Jozef Izso
04232af26f
Complete documentation for all supported reporters
This commit addresses several documentation gaps to ensure all implemented
reporters are properly documented across action.yml and README.md.

Changes:
1. Updated action.yml description to include all supported languages:
   - Added: Go, Python (pytest, unittest), Ruby (RSpec), Swift

2. Added Ruby/RSpec to supported languages list in README.md

3. Added detailed documentation sections in README.md:
   - dotnet-nunit: Added section with NUnit3 XML format instructions
   - rspec-json: Added section with RSpec JSON formatter configuration

All reporters now have:
- Entry in action.yml description
- Entry in README supported languages list
- Entry in README usage documentation (reporter input)
- Detailed documentation section in README "Supported formats"
- Implementation in src/main.ts
- Tests in __tests__/

This ensures users can discover and use all available reporters without
confusion about what is supported.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-22 18:05:33 +01:00
Jozef Izso
cf146f4036
Merge pull request #690 from dorny/fix/add-golang-json-to-action-yml
Some checks failed
Check dist/ / check-dist (push) Has been cancelled
CI / Build & Test (push) Has been cancelled
2025-11-22 17:50:03 +01:00
Jozef Izso
33fc27cf09
Merge pull request #687 from dorny/dependabot/github_actions/actions/checkout-6 2025-11-22 17:49:02 +01:00
Jozef Izso
8fd5fc58ca
Add missing golang-json reporter to action.yml
The golang-json reporter has been fully implemented since earlier versions
but was missing from the action.yml documentation. This made it undiscoverable
for users looking for Go test support.

Changes:
- Added golang-json to the list of supported reporters in action.yml

This aligns the action.yml with:
- The actual implementation in src/main.ts (lines 264-265)
- The README.md documentation (line 145)
- The existing parser and tests

Fixes #689

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-22 17:47:11 +01:00
dependabot[bot]
fc80cb4400
Bump actions/checkout from 5 to 6
Bumps [actions/checkout](https://github.com/actions/checkout) from 5 to 6.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v5...v6)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: '6'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-11-21 23:07:16 +00:00
dependabot[bot]
79ea6a9d0e
Bump js-yaml from 3.14.0 to 3.14.2 in /reports/jest
Bumps [js-yaml](https://github.com/nodeca/js-yaml) from 3.14.0 to 3.14.2.
- [Changelog](https://github.com/nodeca/js-yaml/blob/master/CHANGELOG.md)
- [Commits](https://github.com/nodeca/js-yaml/compare/3.14.0...3.14.2)

---
updated-dependencies:
- dependency-name: js-yaml
  dependency-version: 3.14.2
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-11-18 19:47:48 +00:00
Jozef Izso
aef3d726a6
Merge pull request #683 from micmarc/feature/python-pytest
Some checks failed
Check dist/ / check-dist (push) Has been cancelled
CI / Build & Test (push) Has been cancelled
2025-11-15 18:19:24 +01:00
Michael Marcus
c1a56edcfe Enhance pytest support
Add robust test schema for pytest report
Update README with sample pytest command
2025-11-15 11:55:41 -05:00
Jozef Izso
3b9dad208e
Merge pull request #681 from phactum-mnestler/main
Update sax.js to fix large XML file parsing #681
2025-11-15 11:24:15 +01:00
Jozef Izso
7c636a991c
Merge pull request #643 from micmarc/feature/python-support 2025-11-15 11:12:45 +01:00
Michael Nestler
cfce4bda71 Add saxjs to version overrides 2025-11-15 11:07:56 +01:00
Michael Marcus
fe87682515 Improve testing with robust schema for unittest report 2025-11-14 21:59:25 -05:00
Michael Marcus
9b8d3b002e Python support
Add python-xunit-parser.ts with associated case statement
Add python-xunit to reporter docs in action.yml
Add tests
Update README

Resolves #244
Resolves #633
2025-11-14 16:29:58 -05:00
Jozef Izso
e2f0ff6339
Merge pull request #645 from micmarc/fix/report-title-short-summary
Some checks are pending
Check dist/ / check-dist (push) Waiting to run
CI / Build & Test (push) Waiting to run
2025-11-14 20:00:35 +01:00
Jozef Izso
bc8c29617e
test-reporter release v2.2.0
Merge pull request #679 from dorny/release/v2.2.0
2025-11-14 18:46:03 +01:00
Michael Marcus
9aef9d168f Remove info log 2025-11-14 12:01:42 -05:00
Michael Marcus
6b64465c34 Rebuild index.js after rebase from main 2025-11-14 11:59:46 -05:00
Michael Marcus
6617053f9c Fix short summary formatting when a report title is present 2025-11-14 11:58:16 -05:00
Michael Nestler
43a747d94c Update sax.js to fix large XML file parsing 2025-11-14 16:06:35 +01:00
24 changed files with 5121 additions and 2738 deletions

View file

@ -21,7 +21,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- name: Set Node.js
uses: actions/setup-node@v6

View file

@ -13,7 +13,7 @@ jobs:
name: Build & Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- uses: actions/setup-node@v6
with:
node-version-file: '.nvmrc'

View file

@ -8,7 +8,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- run: npm ci
- run: npm run build
- run: npm test

View file

@ -11,7 +11,7 @@ jobs:
name: Workflow test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- uses: ./
with:
artifact: test-results

View file

@ -1,5 +1,12 @@
# Changelog
## 2.3.0
* Feature: Add Python support with `python-xunit` reporter (pytest) https://github.com/dorny/test-reporter/pull/643
* Feature: Add pytest traceback parsing and `directory-mapping` option https://github.com/dorny/test-reporter/pull/238
* Performance: Update sax.js to fix large XML file parsing https://github.com/dorny/test-reporter/pull/681
* Documentation: Complete documentation for all supported reporters https://github.com/dorny/test-reporter/pull/691
* Security: Bump js-yaml and mocha in /reports/mocha (fixes prototype pollution) https://github.com/dorny/test-reporter/pull/682
## 2.2.0
* Feature: Add collapsed option to control report summary visibility https://github.com/dorny/test-reporter/pull/664
* Fix badge encoding for values including underscore and hyphens https://github.com/dorny/test-reporter/pull/672

View file

@ -19,6 +19,8 @@ This [Github Action](https://github.com/features/actions) displays test results
- Go / [go test](https://pkg.go.dev/testing)
- Java / [JUnit](https://junit.org/)
- JavaScript / [JEST](https://jestjs.io/) / [Mocha](https://mochajs.org/)
- Python / [pytest](https://docs.pytest.org/en/stable/) / [unittest](https://docs.python.org/3/library/unittest.html)
- Ruby / [RSpec](https://rspec.info/)
- Swift / xUnit
For more information see [Supported formats](#supported-formats) section.
@ -145,7 +147,9 @@ jobs:
# java-junit
# jest-junit
# mocha-json
# python-xunit
# rspec-json
# swift-xunit
reporter: ''
# Allows you to generate only the summary.
@ -253,6 +257,20 @@ Supported testing frameworks:
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
</details>
<details>
<summary>dotnet-nunit</summary>
Test execution must be configured to generate [NUnit3](https://docs.nunit.org/articles/nunit/technical-notes/usage/Test-Result-XML-Format.html) XML test results.
Install the [NUnit3TestAdapter](https://www.nuget.org/packages/NUnit3TestAdapter) package (required; it registers the `nunit` logger for `dotnet test`), then run tests with:
`dotnet test --logger "nunit;LogFileName=test-results.xml"`
Supported testing frameworks:
- [NUnit](https://nunit.org/)
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
</details>
<details>
<summary>flutter-json</summary>
@ -349,6 +367,41 @@ Before version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0), M
Please update Mocha to version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0) or above if you encounter this issue.
</details>
<details>
<summary>python-xunit (Experimental)</summary>
Support for Python test results in xUnit format is experimental - should work but it was not extensively tested.
For **pytest** support, configure [JUnit XML output](https://docs.pytest.org/en/stable/how-to/output.html#creating-junitxml-format-files) and run with the `--junit-xml` option, which also lets you specify the output path for test results.
```shell
pytest --junit-xml=test-report.xml
```
For **unittest** support, use a test runner that outputs the JUnit report format, such as [unittest-xml-reporting](https://pypi.org/project/unittest-xml-reporting/).
</details>
<details>
<summary>rspec-json</summary>
[RSpec](https://rspec.info/) testing framework support requires the usage of JSON formatter.
You can configure RSpec to output JSON format by using the `--format json` option and redirecting to a file:
```shell
rspec --format json --out rspec-results.json
```
Or configure it in `.rspec` file:
```
--format json
--out rspec-results.json
```
For more information see:
- [RSpec documentation](https://rspec.info/)
- [RSpec Formatters](https://relishapp.com/rspec/rspec-core/docs/formatters)
</details>
<details>
<summary>swift-xunit (Experimental)</summary>

View file

@ -0,0 +1,26 @@
![Tests failed](https://img.shields.io/badge/tests-6%20passed%2C%202%20failed%2C%202%20skipped-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/python-xunit-pytest.xml](#user-content-r0)|6 ✅|2 ❌|2 ⚪|19ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit-pytest.xml</a>
**10** tests were completed in **19ms** with **6** passed, **2** failed and **2** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[pytest](#user-content-r0s0)|6 ✅|2 ❌|2 ⚪|19ms|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">pytest</a>
```
tests.test_lib
✅ test_always_pass
✅ test_with_subtests
✅ test_parameterized[param1]
✅ test_parameterized[param2]
⚪ test_always_skip
❌ test_always_fail
assert False
⚪ test_expected_failure
❌ test_error
Exception: error
✅ test_with_record_property
custom_classname
✅ test_with_record_xml_attribute
```

View file

@ -0,0 +1,23 @@
![Tests failed](https://img.shields.io/badge/tests-4%20passed%2C%202%20failed%2C%202%20skipped-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/python-xunit-unittest.xml](#user-content-r0)|4 ✅|2 ❌|2 ⚪|1ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit-unittest.xml</a>
**8** tests were completed in **1ms** with **4** passed, **2** failed and **2** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[TestAcme-20251114214921](#user-content-r0s0)|4 ✅|2 ❌|2 ⚪|1ms|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">TestAcme-20251114214921</a>
```
TestAcme
✅ test_always_pass
✅ test_parameterized_0_param1
✅ test_parameterized_1_param2
✅ test_with_subtests
❌ test_always_fail
AssertionError: failed
❌ test_error
Exception: error
⚪ test_always_skip
⚪ test_expected_failure
```

View file

@ -0,0 +1,192 @@
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`python-xunit pytest report report from python test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/python-xunit-pytest.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "tests.test_lib",
"tests": [
TestCaseResult {
"error": undefined,
"name": "test_always_pass",
"result": "success",
"time": 2,
},
TestCaseResult {
"error": undefined,
"name": "test_with_subtests",
"result": "success",
"time": 5,
},
TestCaseResult {
"error": undefined,
"name": "test_parameterized[param1]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_parameterized[param2]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_always_skip",
"result": "skipped",
"time": 0,
},
TestCaseResult {
"error": {
"details": "def test_always_fail():
> assert False
E assert False
tests/test_lib.py:25: AssertionError
",
"line": undefined,
"message": "assert False",
"path": undefined,
},
"name": "test_always_fail",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_expected_failure",
"result": "skipped",
"time": 0,
},
TestCaseResult {
"error": {
"details": "def test_error():
> raise Exception("error")
E Exception: error
tests/test_lib.py:32: Exception
",
"line": undefined,
"message": "Exception: error",
"path": undefined,
},
"name": "test_error",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_with_record_property",
"result": "success",
"time": 0,
},
],
},
TestGroupResult {
"name": "custom_classname",
"tests": [
TestCaseResult {
"error": undefined,
"name": "test_with_record_xml_attribute",
"result": "success",
"time": 0,
},
],
},
],
"name": "pytest",
"totalTime": 19,
},
],
"totalTime": undefined,
}
`;
exports[`python-xunit unittest report report from python test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/python-xunit-unittest.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "TestAcme",
"tests": [
TestCaseResult {
"error": undefined,
"name": "test_always_pass",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_parameterized_0_param1",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
"name": "test_parameterized_1_param2",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_with_subtests",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": {
"details": "Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail
self.fail("failed")
AssertionError: failed
",
"line": undefined,
"message": "AssertionError: failed",
"path": undefined,
},
"name": "test_always_fail",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": {
"details": "Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error
raise Exception("error")
Exception: error
",
"line": undefined,
"message": "Exception: error",
"path": undefined,
},
"name": "test_error",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_always_skip",
"result": "skipped",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_expected_failure",
"result": "skipped",
"time": 0,
},
],
},
],
"name": "TestAcme-20251114214921",
"totalTime": 1,
},
],
"totalTime": 1,
}
`;

View file

@ -0,0 +1,42 @@
<?xml version="1.0" encoding="utf-8"?>
<testsuites name="pytest tests">
<testsuite name="pytest" errors="0" failures="2" skipped="2" tests="15" time="0.019"
timestamp="2025-11-15T11:51:49.548396-05:00" hostname="Mac.hsd1.va.comcast.net">
<properties>
<property name="custom_prop" value="custom_val"/>
</properties>
<testcase classname="tests.test_lib" name="test_always_pass" time="0.002"/>
<testcase classname="tests.test_lib" name="test_with_subtests" time="0.005"/>
<testcase classname="tests.test_lib" name="test_parameterized[param1]" time="0.000"/>
<testcase classname="tests.test_lib" name="test_parameterized[param2]" time="0.000"/>
<testcase classname="tests.test_lib" name="test_always_skip" time="0.000">
<skipped type="pytest.skip" message="skipped">/Users/mike/Projects/python-test/tests/test_lib.py:20: skipped
</skipped>
</testcase>
<testcase classname="tests.test_lib" name="test_always_fail" time="0.000">
<failure message="assert False">def test_always_fail():
&gt; assert False
E assert False
tests/test_lib.py:25: AssertionError
</failure>
</testcase>
<testcase classname="tests.test_lib" name="test_expected_failure" time="0.000">
<skipped type="pytest.xfail" message=""/>
</testcase>
<testcase classname="tests.test_lib" name="test_error" time="0.000">
<failure message="Exception: error">def test_error():
&gt; raise Exception("error")
E Exception: error
tests/test_lib.py:32: Exception
</failure>
</testcase>
<testcase classname="tests.test_lib" name="test_with_record_property" time="0.000">
<properties>
<property name="example_key" value="1"/>
</properties>
</testcase>
<testcase classname="custom_classname" name="test_with_record_xml_attribute" time="0.000"/>
</testsuite>
</testsuites>

View file

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuite name="TestAcme-20251114214921" tests="8" file=".py" time="0.001" timestamp="2025-11-14T21:49:22" failures="1" errors="1" skipped="2">
<testcase classname="TestAcme" name="test_always_pass" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="8"/>
<testcase classname="TestAcme" name="test_parameterized_0_param1" time="0.001" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="618"/>
<testcase classname="TestAcme" name="test_parameterized_1_param2" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="618"/>
<testcase classname="TestAcme" name="test_with_subtests" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="11"/>
<testcase classname="TestAcme" name="test_always_fail" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="23">
<failure type="AssertionError" message="failed"><![CDATA[Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail
self.fail("failed")
AssertionError: failed
]]></failure>
</testcase>
<testcase classname="TestAcme" name="test_error" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="30">
<error type="Exception" message="error"><![CDATA[Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error
raise Exception("error")
Exception: error
]]></error>
</testcase>
<testcase classname="TestAcme" name="test_always_skip" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="20">
<skipped type="skip" message="skipped"/>
</testcase>
<testcase classname="TestAcme" name="test_expected_failure" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="26">
<skipped type="XFAIL" message="expected failure: (&lt;class 'AssertionError'&gt;, AssertionError('expected failure'), &lt;traceback object at 0x100c125c0&gt;)"/>
</testcase>
</testsuite>

View file

@ -303,4 +303,47 @@ describe('jest-junit tests', () => {
expect(report).not.toContain('<details><summary>Expand for details</summary>')
expect(report).not.toContain('</details>')
})
it('report includes the short summary', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JestJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
const shortSummary = '1 passed, 4 failed and 1 skipped'
const report = getReport([result], DEFAULT_OPTIONS, shortSummary)
// Report should have the title as the first line
expect(report).toMatch(/^## 1 passed, 4 failed and 1 skipped\n/)
})
it('report includes a custom report title and short summary', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JestJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
const shortSummary = '1 passed, 4 failed and 1 skipped'
const report = getReport(
[result],
{
...DEFAULT_OPTIONS,
reportTitle: 'My Custom Title'
},
shortSummary
)
// Report should have the title as the first line
expect(report).toMatch(/^# My Custom Title\n## 1 passed, 4 failed and 1 skipped\n/)
})
})

View file

@ -0,0 +1,93 @@
import * as fs from 'fs'
import * as path from 'path'
import {PythonXunitParser} from '../src/parsers/python-xunit/python-xunit-parser'
import {ParseOptions} from '../src/test-parser'
import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report'
import {normalizeFilePath} from '../src/utils/path-utils'
const defaultOpts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
describe('python-xunit unittest report', () => {
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-unittest.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-unittest.md')
it('report from python test results matches snapshot', async () => {
const trackedFiles = ['tests/test_lib.py']
const opts: ParseOptions = {
...defaultOpts,
trackedFiles
}
const parser = new PythonXunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
it('report does not include a title by default', async () => {
const parser = new PythonXunitParser(defaultOpts)
const result = await parser.parse(filePath, fileContent)
const report = getReport([result])
// Report should have the badge as the first line
expect(report).toMatch(/^!\[Tests failed]/)
})
it.each([
['empty string', ''],
['space', ' '],
['tab', '\t'],
['newline', '\n']
])('report does not include a title when configured value is %s', async (_, reportTitle) => {
const parser = new PythonXunitParser(defaultOpts)
const result = await parser.parse(filePath, fileContent)
const report = getReport([result], {
...DEFAULT_OPTIONS,
reportTitle
})
// Report should have the badge as the first line
expect(report).toMatch(/^!\[Tests failed]/)
})
it('report includes a custom report title', async () => {
const parser = new PythonXunitParser(defaultOpts)
const result = await parser.parse(filePath, fileContent)
const report = getReport([result], {
...DEFAULT_OPTIONS,
reportTitle: 'My Custom Title'
})
// Report should have the title as the first line
expect(report).toMatch(/^# My Custom Title\n/)
})
})
describe('python-xunit pytest report', () => {
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-pytest.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-pytest.md')
it('report from python test results matches snapshot', async () => {
const trackedFiles = ['tests/test_lib.py']
const opts: ParseOptions = {
...defaultOpts,
trackedFiles
}
const parser = new PythonXunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
})

View file

@ -1,6 +1,5 @@
name: Test Reporter
description: |
Shows test results in GitHub UI: .NET (xUnit, NUnit, MSTest), Dart, Flutter, Java (JUnit), JavaScript (JEST, Mocha)
description: Displays test results from popular testing frameworks directly in GitHub
author: Michal Dorner <dorner.michal@gmail.com>
inputs:
artifact:
@ -29,9 +28,11 @@ inputs:
- dotnet-nunit
- dotnet-trx
- flutter-json
- golang-json
- java-junit
- jest-junit
- mocha-json
- python-xunit
- rspec-json
- swift-xunit
required: true

908
dist/index.js generated vendored

File diff suppressed because it is too large Load diff

80
dist/licenses.txt generated vendored
View file

@ -1350,48 +1350,62 @@ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
sax
ISC
The ISC License
BlueOak-1.0.0
# Blue Oak Model License
Copyright (c) Isaac Z. Schlueter and Contributors
Version 1.0.0
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
## Purpose
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
This license gives everyone as much permission to work with
this software as possible, while protecting contributors
from liability.
====
## Acceptance
`String.fromCodePoint` by Mathias Bynens used according to terms of MIT
License, as follows:
In order to receive this license, you must agree to its
rules. The rules of this license are both obligations
under that agreement and conditions to your license.
You must not do anything with this software that triggers
a rule that you cannot or will not follow.
Copyright Mathias Bynens <https://mathiasbynens.be/>
## Copyright
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
Each contributor licenses you to do everything with this
software that would otherwise infringe that contributor's
copyright in it.
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
## Notices
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
You must ensure that everyone who gets a copy of
any part of this software from you, with or without
changes, also gets the text of this license or a link to
<https://blueoakcouncil.org/license/1.0.0>.
## Excuse
If anyone notifies you in writing that you have not
complied with [Notices](#notices), you can keep your
license by taking all practical steps to comply within 30
days after the notice. If you do not do so, your license
ends immediately.
## Patent
Each contributor licenses you to do everything with this
software that would otherwise infringe any patent claims
they can license or become able to license.
## Reliability
No contributor can revoke this license.
## No Liability
***As far as the law allows, this software comes as is,
without any warranty or condition, and no contributor
will be liable to anyone for any damages related to this
software or this license, under any kind of legal claim.***
to-regex-range

11
package-lock.json generated
View file

@ -1,12 +1,12 @@
{
"name": "test-reporter",
"version": "2.2.0",
"version": "2.3.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "test-reporter",
"version": "2.2.0",
"version": "2.3.0",
"license": "MIT",
"dependencies": {
"@actions/core": "^1.11.1",
@ -7580,9 +7580,10 @@
}
},
"node_modules/sax": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
"version": "1.4.3",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.4.3.tgz",
"integrity": "sha512-yqYn1JhPczigF94DMS+shiDMjDowYO6y9+wB/4WgO0Y19jWYk0lQ4tuG5KI7kj4FTp1wxPj5IFfcrz/s1c3jjQ==",
"license": "BlueOak-1.0.0"
},
"node_modules/semver": {
"version": "7.7.3",

View file

@ -1,6 +1,6 @@
{
"name": "test-reporter",
"version": "2.2.0",
"version": "2.3.0",
"private": true,
"description": "Presents test results from popular testing frameworks as Github check run",
"main": "lib/main.js",
@ -69,6 +69,9 @@
"ts-jest": "^29.4.5",
"typescript": "^5.9.3"
},
"overrides": {
"sax": "^1.4.3"
},
"jest-junit": {
"suiteName": "jest tests",
"outputDirectory": "__tests__/__results__",

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -9,6 +9,6 @@
"author": "Michal Dorner <dorner.michal@gmail.com>",
"license": "MIT",
"devDependencies": {
"mocha": "^8.3.0"
"mocha": "^11.7.5"
}
}

View file

@ -17,9 +17,9 @@ import {GolangJsonParser} from './parsers/golang-json/golang-json-parser'
import {JavaJunitParser} from './parsers/java-junit/java-junit-parser'
import {JestJunitParser} from './parsers/jest-junit/jest-junit-parser'
import {MochaJsonParser} from './parsers/mocha-json/mocha-json-parser'
import {PythonXunitParser} from './parsers/python-xunit/python-xunit-parser'
import {RspecJsonParser} from './parsers/rspec-json/rspec-json-parser'
import {SwiftXunitParser} from './parsers/swift-xunit/swift-xunit-parser'
import {normalizeDirPath, normalizeFilePath} from './utils/path-utils'
import {getCheckRunContext} from './utils/github-utils'
@ -181,20 +181,23 @@ class TestReporter {
let baseUrl = ''
if (this.useActionsSummary) {
const summary = getReport(results, {
listSuites,
listTests,
baseUrl,
onlySummary,
useActionsSummary,
badgeTitle,
reportTitle,
collapsed
})
const summary = getReport(
results,
{
listSuites,
listTests,
baseUrl,
onlySummary,
useActionsSummary,
badgeTitle,
reportTitle,
collapsed
},
shortSummary
)
core.info('Summary content:')
core.info(summary)
core.summary.addRaw(`# ${shortSummary}`)
await core.summary.addRaw(summary).write()
} else {
core.info(`Creating check run ${name}`)
@ -268,6 +271,8 @@ class TestReporter {
return new JestJunitParser(options)
case 'mocha-json':
return new MochaJsonParser(options)
case 'python-xunit':
return new PythonXunitParser(options)
case 'rspec-json':
return new RspecJsonParser(options)
case 'swift-xunit':

View file

@ -0,0 +1,8 @@
import {ParseOptions} from '../../test-parser'
import {JavaJunitParser} from '../java-junit/java-junit-parser'
export class PythonXunitParser extends JavaJunitParser {
constructor(readonly options: ParseOptions) {
super(options)
}
}

View file

@ -30,13 +30,15 @@ export const DEFAULT_OPTIONS: ReportOptions = {
collapsed: 'auto'
}
export function getReport(results: TestRunResult[], options: ReportOptions = DEFAULT_OPTIONS): string {
core.info('Generating check run summary')
export function getReport(
results: TestRunResult[],
options: ReportOptions = DEFAULT_OPTIONS,
shortSummary = ''
): string {
applySort(results)
const opts = {...options}
let lines = renderReport(results, opts)
let lines = renderReport(results, opts, shortSummary)
let report = lines.join('\n')
if (getByteLength(report) <= getMaxReportLength(options)) {
@ -46,7 +48,7 @@ export function getReport(results: TestRunResult[], options: ReportOptions = DEF
if (opts.listTests === 'all') {
core.info("Test report summary is too big - setting 'listTests' to 'failed'")
opts.listTests = 'failed'
lines = renderReport(results, opts)
lines = renderReport(results, opts, shortSummary)
report = lines.join('\n')
if (getByteLength(report) <= getMaxReportLength(options)) {
return report
@ -103,7 +105,7 @@ function getByteLength(text: string): number {
return Buffer.byteLength(text, 'utf8')
}
function renderReport(results: TestRunResult[], options: ReportOptions): string[] {
function renderReport(results: TestRunResult[], options: ReportOptions, shortSummary: string): string[] {
const sections: string[] = []
const reportTitle: string = options.reportTitle.trim()
@ -111,6 +113,10 @@ function renderReport(results: TestRunResult[], options: ReportOptions): string[
sections.push(`# ${reportTitle}`)
}
if (shortSummary) {
sections.push(`## ${shortSummary}`)
}
const badge = getReportBadge(results, options)
sections.push(badge)