mirror of
https://github.com/dorny/test-reporter.git
synced 2025-12-15 13:57:09 +01:00
Compare commits
64 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ee446707ff | ||
|
|
fe45e95373 | ||
|
|
e40a1da745 | ||
|
|
3445860437 | ||
|
|
9ef5c136b2 | ||
|
|
83e20c1534 | ||
|
|
4331a3b620 | ||
|
|
04232af26f | ||
|
|
cf146f4036 | ||
|
|
33fc27cf09 | ||
|
|
8fd5fc58ca | ||
|
|
fc80cb4400 | ||
|
|
79ea6a9d0e | ||
|
|
aef3d726a6 | ||
|
|
c1a56edcfe | ||
|
|
3b9dad208e | ||
|
|
7c636a991c | ||
|
|
cfce4bda71 | ||
|
|
fe87682515 | ||
|
|
9b8d3b002e | ||
|
|
e2f0ff6339 | ||
|
|
bc8c29617e | ||
|
|
9aef9d168f | ||
|
|
6b64465c34 | ||
|
|
6617053f9c | ||
|
|
43a747d94c | ||
|
|
7b7927aa7d | ||
|
|
eeac280b8e | ||
|
|
6939db53fb | ||
|
|
b3812e0f5b | ||
|
|
cd299561e7 | ||
|
|
c7935221e6 | ||
|
|
5fb0582760 | ||
|
|
7148297f02 | ||
|
|
828632acd0 | ||
|
|
4a41472ca4 | ||
|
|
22dc7b52f4 | ||
|
|
bed521d765 | ||
|
|
6079ce3d17 | ||
|
|
de77f76b7e | ||
|
|
c883ae9738 | ||
|
|
35be98f7e7 | ||
|
|
f372a8338e | ||
|
|
948dd03d7b | ||
|
|
cf9db500ed | ||
|
|
ba33405987 | ||
|
|
34d8269ede | ||
|
|
fd1c798d8d | ||
|
|
2211cf1035 | ||
|
|
be3721d54a | ||
|
|
d171d89cd4 | ||
|
|
661decd3af | ||
|
|
bd9e36bf0c | ||
|
|
9642942c97 | ||
|
|
aa953f36f9 | ||
|
|
f686ce916a | ||
|
|
b14337a039 | ||
|
|
ec1e910416 | ||
|
|
353a438514 | ||
|
|
9c4a54379f | ||
|
|
07e5c648b5 | ||
|
|
4d84da17a1 | ||
|
|
1c33c4c823 | ||
|
|
eea8b67eb1 |
36 changed files with 7140 additions and 4129 deletions
6
.github/workflows/check-dist.yml
vendored
6
.github/workflows/check-dist.yml
vendored
|
|
@ -21,10 +21,10 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Set Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
|
||||
|
|
@ -46,7 +46,7 @@ jobs:
|
|||
id: diff
|
||||
|
||||
# If index.js was different than expected, upload the expected version as an artifact
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v5
|
||||
if: ${{ failure() && steps.diff.conclusion == 'failure' }}
|
||||
with:
|
||||
name: dist
|
||||
|
|
|
|||
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
|
|
@ -13,8 +13,8 @@ jobs:
|
|||
name: Build & Test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
- run: npm ci
|
||||
|
|
@ -25,7 +25,7 @@ jobs:
|
|||
|
||||
- name: Upload test results
|
||||
if: ${{ !cancelled() }}
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: test-results
|
||||
path: __tests__/__results__/*.xml
|
||||
|
|
|
|||
2
.github/workflows/manual-run.yml
vendored
2
.github/workflows/manual-run.yml
vendored
|
|
@ -8,7 +8,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: npm test
|
||||
|
|
|
|||
2
.github/workflows/test-report.yml
vendored
2
.github/workflows/test-report.yml
vendored
|
|
@ -11,7 +11,7 @@ jobs:
|
|||
name: Workflow test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
- uses: ./
|
||||
with:
|
||||
artifact: test-results
|
||||
|
|
|
|||
13
CHANGELOG.md
13
CHANGELOG.md
|
|
@ -1,5 +1,18 @@
|
|||
# Changelog
|
||||
|
||||
## 2.3.0
|
||||
* Feature: Add Python support with `python-xunit` reporter (pytest) https://github.com/dorny/test-reporter/pull/643
|
||||
* Feature: Add pytest traceback parsing and `directory-mapping` option https://github.com/dorny/test-reporter/pull/238
|
||||
* Performance: Update sax.js to fix large XML file parsing https://github.com/dorny/test-reporter/pull/681
|
||||
* Documentation: Complete documentation for all supported reporters https://github.com/dorny/test-reporter/pull/691
|
||||
* Security: Bump js-yaml and mocha in /reports/mocha (fixes prototype pollution) https://github.com/dorny/test-reporter/pull/682
|
||||
|
||||
## 2.2.0
|
||||
* Feature: Add collapsed option to control report summary visibility https://github.com/dorny/test-reporter/pull/664
|
||||
* Fix badge encoding for values including underscore and hyphens https://github.com/dorny/test-reporter/pull/672
|
||||
* Fix missing `report-title` attribute in action definition https://github.com/dorny/test-reporter/pull/637
|
||||
* Refactor variable names to fix shadowing issues https://github.com/dorny/test-reporter/pull/630
|
||||
|
||||
## 2.1.1
|
||||
* Fix error when a TestMethod element does not have a className attribute in a trx file https://github.com/dorny/test-reporter/pull/623
|
||||
* Add stack trace from trx to summary https://github.com/dorny/test-reporter/pull/615
|
||||
|
|
|
|||
53
README.md
53
README.md
|
|
@ -19,6 +19,8 @@ This [Github Action](https://github.com/features/actions) displays test results
|
|||
- Go / [go test](https://pkg.go.dev/testing)
|
||||
- Java / [JUnit](https://junit.org/)
|
||||
- JavaScript / [JEST](https://jestjs.io/) / [Mocha](https://mochajs.org/)
|
||||
- Python / [pytest](https://docs.pytest.org/en/stable/) / [unittest](https://docs.python.org/3/library/unittest.html)
|
||||
- Ruby / [RSpec](https://rspec.info/)
|
||||
- Swift / xUnit
|
||||
|
||||
For more information see [Supported formats](#supported-formats) section.
|
||||
|
|
@ -145,7 +147,9 @@ jobs:
|
|||
# java-junit
|
||||
# jest-junit
|
||||
# mocha-json
|
||||
# python-xunit
|
||||
# rspec-json
|
||||
# swift-xunit
|
||||
reporter: ''
|
||||
|
||||
# Allows you to generate only the summary.
|
||||
|
|
@ -253,6 +257,20 @@ Supported testing frameworks:
|
|||
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>dotnet-nunit</summary>
|
||||
|
||||
Test execution must be configured to generate [NUnit3](https://docs.nunit.org/articles/nunit/technical-notes/usage/Test-Result-XML-Format.html) XML test results.
|
||||
Install the [NUnit3TestAdapter](https://www.nuget.org/packages/NUnit3TestAdapter) package (required; it registers the `nunit` logger for `dotnet test`), then run tests with:
|
||||
|
||||
`dotnet test --logger "nunit;LogFileName=test-results.xml"`
|
||||
|
||||
Supported testing frameworks:
|
||||
- [NUnit](https://nunit.org/)
|
||||
|
||||
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>flutter-json</summary>
|
||||
|
||||
|
|
@ -349,6 +367,41 @@ Before version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0), M
|
|||
Please update Mocha to version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0) or above if you encounter this issue.
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>python-xunit (Experimental)</summary>
|
||||
|
||||
Support for Python test results in xUnit format is experimental - should work but it was not extensively tested.
|
||||
|
||||
For **pytest** support, configure [JUnit XML output](https://docs.pytest.org/en/stable/how-to/output.html#creating-junitxml-format-files) and run with the `--junit-xml` option, which also lets you specify the output path for test results.
|
||||
|
||||
```shell
|
||||
pytest --junit-xml=test-report.xml
|
||||
```
|
||||
|
||||
For **unittest** support, use a test runner that outputs the JUnit report format, such as [unittest-xml-reporting](https://pypi.org/project/unittest-xml-reporting/).
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>rspec-json</summary>
|
||||
|
||||
[RSpec](https://rspec.info/) testing framework support requires the usage of JSON formatter.
|
||||
You can configure RSpec to output JSON format by using the `--format json` option and redirecting to a file:
|
||||
|
||||
```shell
|
||||
rspec --format json --out rspec-results.json
|
||||
```
|
||||
|
||||
Or configure it in `.rspec` file:
|
||||
```
|
||||
--format json
|
||||
--out rspec-results.json
|
||||
```
|
||||
|
||||
For more information see:
|
||||
- [RSpec documentation](https://rspec.info/)
|
||||
- [RSpec Formatters](https://relishapp.com/rspec/rspec-core/docs/formatters)
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>swift-xunit (Experimental)</summary>
|
||||
|
||||
|
|
|
|||
26
__tests__/__outputs__/python-xunit-pytest.md
Normal file
26
__tests__/__outputs__/python-xunit-pytest.md
Normal file
|
|
@ -0,0 +1,26 @@
|
|||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/python-xunit-pytest.xml](#user-content-r0)|6 ✅|2 ❌|2 ⚪|19ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit-pytest.xml</a>
|
||||
**10** tests were completed in **19ms** with **6** passed, **2** failed and **2** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[pytest](#user-content-r0s0)|6 ✅|2 ❌|2 ⚪|19ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">pytest</a>
|
||||
```
|
||||
tests.test_lib
|
||||
✅ test_always_pass
|
||||
✅ test_with_subtests
|
||||
✅ test_parameterized[param1]
|
||||
✅ test_parameterized[param2]
|
||||
⚪ test_always_skip
|
||||
❌ test_always_fail
|
||||
assert False
|
||||
⚪ test_expected_failure
|
||||
❌ test_error
|
||||
Exception: error
|
||||
✅ test_with_record_property
|
||||
custom_classname
|
||||
✅ test_with_record_xml_attribute
|
||||
```
|
||||
23
__tests__/__outputs__/python-xunit-unittest.md
Normal file
23
__tests__/__outputs__/python-xunit-unittest.md
Normal file
|
|
@ -0,0 +1,23 @@
|
|||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/python-xunit-unittest.xml](#user-content-r0)|4 ✅|2 ❌|2 ⚪|1ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit-unittest.xml</a>
|
||||
**8** tests were completed in **1ms** with **4** passed, **2** failed and **2** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[TestAcme-20251114214921](#user-content-r0s0)|4 ✅|2 ❌|2 ⚪|1ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">TestAcme-20251114214921</a>
|
||||
```
|
||||
TestAcme
|
||||
✅ test_always_pass
|
||||
✅ test_parameterized_0_param1
|
||||
✅ test_parameterized_1_param2
|
||||
✅ test_with_subtests
|
||||
❌ test_always_fail
|
||||
AssertionError: failed
|
||||
❌ test_error
|
||||
Exception: error
|
||||
⚪ test_always_skip
|
||||
⚪ test_expected_failure
|
||||
```
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`dart-json tests matches report snapshot 1`] = `
|
||||
TestRunResult {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`dotnet-nunit tests report from ./reports/dotnet test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`dotnet-trx tests matches dotnet-trx report snapshot 1`] = `
|
||||
TestRunResult {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`golang-json tests report from ./reports/dotnet test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`java-junit tests report from apache/pulsar single suite test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`jest-junit tests parsing ESLint report without timing information works - PR #134 1`] = `
|
||||
TestRunResult {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`mocha-json tests report from ./reports/mocha-json test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
|
|
|
|||
192
__tests__/__snapshots__/python-xunit.test.ts.snap
Normal file
192
__tests__/__snapshots__/python-xunit.test.ts.snap
Normal file
|
|
@ -0,0 +1,192 @@
|
|||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`python-xunit pytest report report from python test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/python-xunit-pytest.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "tests.test_lib",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_always_pass",
|
||||
"result": "success",
|
||||
"time": 2,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_with_subtests",
|
||||
"result": "success",
|
||||
"time": 5,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_parameterized[param1]",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_parameterized[param2]",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_always_skip",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "def test_always_fail():
|
||||
> assert False
|
||||
E assert False
|
||||
|
||||
tests/test_lib.py:25: AssertionError
|
||||
",
|
||||
"line": undefined,
|
||||
"message": "assert False",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "test_always_fail",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_expected_failure",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "def test_error():
|
||||
> raise Exception("error")
|
||||
E Exception: error
|
||||
|
||||
tests/test_lib.py:32: Exception
|
||||
",
|
||||
"line": undefined,
|
||||
"message": "Exception: error",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "test_error",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_with_record_property",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
TestGroupResult {
|
||||
"name": "custom_classname",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_with_record_xml_attribute",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "pytest",
|
||||
"totalTime": 19,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`python-xunit unittest report report from python test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/python-xunit-unittest.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "TestAcme",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_always_pass",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_parameterized_0_param1",
|
||||
"result": "success",
|
||||
"time": 1,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_parameterized_1_param2",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_with_subtests",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "Traceback (most recent call last):
|
||||
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail
|
||||
self.fail("failed")
|
||||
AssertionError: failed
|
||||
",
|
||||
"line": undefined,
|
||||
"message": "AssertionError: failed",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "test_always_fail",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "Traceback (most recent call last):
|
||||
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error
|
||||
raise Exception("error")
|
||||
Exception: error
|
||||
",
|
||||
"line": undefined,
|
||||
"message": "Exception: error",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "test_error",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_always_skip",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_expected_failure",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "TestAcme-20251114214921",
|
||||
"totalTime": 1,
|
||||
},
|
||||
],
|
||||
"totalTime": 1,
|
||||
}
|
||||
`;
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`rspec-json tests report from ./reports/rspec-json test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`swift-xunit tests report from swift test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
|
|
|
|||
42
__tests__/fixtures/python-xunit-pytest.xml
Normal file
42
__tests__/fixtures/python-xunit-pytest.xml
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<testsuites name="pytest tests">
|
||||
<testsuite name="pytest" errors="0" failures="2" skipped="2" tests="15" time="0.019"
|
||||
timestamp="2025-11-15T11:51:49.548396-05:00" hostname="Mac.hsd1.va.comcast.net">
|
||||
<properties>
|
||||
<property name="custom_prop" value="custom_val"/>
|
||||
</properties>
|
||||
<testcase classname="tests.test_lib" name="test_always_pass" time="0.002"/>
|
||||
<testcase classname="tests.test_lib" name="test_with_subtests" time="0.005"/>
|
||||
<testcase classname="tests.test_lib" name="test_parameterized[param1]" time="0.000"/>
|
||||
<testcase classname="tests.test_lib" name="test_parameterized[param2]" time="0.000"/>
|
||||
<testcase classname="tests.test_lib" name="test_always_skip" time="0.000">
|
||||
<skipped type="pytest.skip" message="skipped">/Users/mike/Projects/python-test/tests/test_lib.py:20: skipped
|
||||
</skipped>
|
||||
</testcase>
|
||||
<testcase classname="tests.test_lib" name="test_always_fail" time="0.000">
|
||||
<failure message="assert False">def test_always_fail():
|
||||
> assert False
|
||||
E assert False
|
||||
|
||||
tests/test_lib.py:25: AssertionError
|
||||
</failure>
|
||||
</testcase>
|
||||
<testcase classname="tests.test_lib" name="test_expected_failure" time="0.000">
|
||||
<skipped type="pytest.xfail" message=""/>
|
||||
</testcase>
|
||||
<testcase classname="tests.test_lib" name="test_error" time="0.000">
|
||||
<failure message="Exception: error">def test_error():
|
||||
> raise Exception("error")
|
||||
E Exception: error
|
||||
|
||||
tests/test_lib.py:32: Exception
|
||||
</failure>
|
||||
</testcase>
|
||||
<testcase classname="tests.test_lib" name="test_with_record_property" time="0.000">
|
||||
<properties>
|
||||
<property name="example_key" value="1"/>
|
||||
</properties>
|
||||
</testcase>
|
||||
<testcase classname="custom_classname" name="test_with_record_xml_attribute" time="0.000"/>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
27
__tests__/fixtures/python-xunit-unittest.xml
Normal file
27
__tests__/fixtures/python-xunit-unittest.xml
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuite name="TestAcme-20251114214921" tests="8" file=".py" time="0.001" timestamp="2025-11-14T21:49:22" failures="1" errors="1" skipped="2">
|
||||
<testcase classname="TestAcme" name="test_always_pass" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="8"/>
|
||||
<testcase classname="TestAcme" name="test_parameterized_0_param1" time="0.001" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="618"/>
|
||||
<testcase classname="TestAcme" name="test_parameterized_1_param2" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="618"/>
|
||||
<testcase classname="TestAcme" name="test_with_subtests" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="11"/>
|
||||
<testcase classname="TestAcme" name="test_always_fail" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="23">
|
||||
<failure type="AssertionError" message="failed"><![CDATA[Traceback (most recent call last):
|
||||
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail
|
||||
self.fail("failed")
|
||||
AssertionError: failed
|
||||
]]></failure>
|
||||
</testcase>
|
||||
<testcase classname="TestAcme" name="test_error" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="30">
|
||||
<error type="Exception" message="error"><![CDATA[Traceback (most recent call last):
|
||||
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error
|
||||
raise Exception("error")
|
||||
Exception: error
|
||||
]]></error>
|
||||
</testcase>
|
||||
<testcase classname="TestAcme" name="test_always_skip" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="20">
|
||||
<skipped type="skip" message="skipped"/>
|
||||
</testcase>
|
||||
<testcase classname="TestAcme" name="test_expected_failure" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="26">
|
||||
<skipped type="XFAIL" message="expected failure: (<class 'AssertionError'>, AssertionError('expected failure'), <traceback object at 0x100c125c0>)"/>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
|
|
@ -207,4 +207,143 @@ describe('jest-junit tests', () => {
|
|||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^# My Custom Title\n/)
|
||||
})
|
||||
|
||||
it('report can be collapsed when configured', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
collapsed: 'always'
|
||||
})
|
||||
// Report should include collapsible details
|
||||
expect(report).toContain('<details><summary>Expand for details</summary>')
|
||||
expect(report).toContain('</details>')
|
||||
})
|
||||
|
||||
it('report is not collapsed when configured to never', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
collapsed: 'never'
|
||||
})
|
||||
// Report should not include collapsible details
|
||||
expect(report).not.toContain('<details><summary>Expand for details</summary>')
|
||||
expect(report).not.toContain('</details>')
|
||||
})
|
||||
|
||||
it('report auto-collapses when all tests pass', async () => {
|
||||
// Test with a fixture that has all passing tests (no failures)
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit-eslint.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify this fixture has no failures
|
||||
expect(result.failed).toBe(0)
|
||||
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
collapsed: 'auto'
|
||||
})
|
||||
|
||||
// Should collapse when all tests pass
|
||||
expect(report).toContain('<details><summary>Expand for details</summary>')
|
||||
expect(report).toContain('</details>')
|
||||
})
|
||||
|
||||
it('report does not auto-collapse when tests fail', async () => {
|
||||
// Test with a fixture that has failing tests
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify this fixture has failures
|
||||
expect(result.failed).toBeGreaterThan(0)
|
||||
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
collapsed: 'auto'
|
||||
})
|
||||
|
||||
// Should not collapse when there are failures
|
||||
expect(report).not.toContain('<details><summary>Expand for details</summary>')
|
||||
expect(report).not.toContain('</details>')
|
||||
})
|
||||
|
||||
it('report includes the short summary', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const shortSummary = '1 passed, 4 failed and 1 skipped'
|
||||
const report = getReport([result], DEFAULT_OPTIONS, shortSummary)
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^## 1 passed, 4 failed and 1 skipped\n/)
|
||||
})
|
||||
|
||||
it('report includes a custom report title and short summary', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const shortSummary = '1 passed, 4 failed and 1 skipped'
|
||||
const report = getReport(
|
||||
[result],
|
||||
{
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle: 'My Custom Title'
|
||||
},
|
||||
shortSummary
|
||||
)
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^# My Custom Title\n## 1 passed, 4 failed and 1 skipped\n/)
|
||||
})
|
||||
})
|
||||
|
|
|
|||
93
__tests__/python-xunit.test.ts
Normal file
93
__tests__/python-xunit.test.ts
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
|
||||
import {PythonXunitParser} from '../src/parsers/python-xunit/python-xunit-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
const defaultOpts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
describe('python-xunit unittest report', () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-unittest.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-unittest.md')
|
||||
|
||||
it('report from python test results matches snapshot', async () => {
|
||||
const trackedFiles = ['tests/test_lib.py']
|
||||
const opts: ParseOptions = {
|
||||
...defaultOpts,
|
||||
trackedFiles
|
||||
}
|
||||
|
||||
const parser = new PythonXunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report does not include a title by default', async () => {
|
||||
const parser = new PythonXunitParser(defaultOpts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result])
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it.each([
|
||||
['empty string', ''],
|
||||
['space', ' '],
|
||||
['tab', '\t'],
|
||||
['newline', '\n']
|
||||
])('report does not include a title when configured value is %s', async (_, reportTitle) => {
|
||||
const parser = new PythonXunitParser(defaultOpts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle
|
||||
})
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it('report includes a custom report title', async () => {
|
||||
const parser = new PythonXunitParser(defaultOpts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle: 'My Custom Title'
|
||||
})
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^# My Custom Title\n/)
|
||||
})
|
||||
})
|
||||
|
||||
describe('python-xunit pytest report', () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-pytest.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-pytest.md')
|
||||
|
||||
it('report from python test results matches snapshot', async () => {
|
||||
const trackedFiles = ['tests/test_lib.py']
|
||||
const opts: ParseOptions = {
|
||||
...defaultOpts,
|
||||
trackedFiles
|
||||
}
|
||||
|
||||
const parser = new PythonXunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
})
|
||||
120
__tests__/report/get-report.test.ts
Normal file
120
__tests__/report/get-report.test.ts
Normal file
|
|
@ -0,0 +1,120 @@
|
|||
import {getBadge, DEFAULT_OPTIONS, ReportOptions} from '../../src/report/get-report'
|
||||
|
||||
describe('getBadge', () => {
|
||||
describe('URI encoding with special characters', () => {
|
||||
it('generates correct URI with simple badge title', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'tests'
|
||||
}
|
||||
const badge = getBadge(5, 0, 1, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('handles badge title with single hyphen', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'unit-tests'
|
||||
}
|
||||
const badge = getBadge(3, 0, 0, options)
|
||||
// The hyphen in the badge title should be encoded as --
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('handles badge title with multiple hyphens', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'integration-api-tests'
|
||||
}
|
||||
const badge = getBadge(10, 0, 0, options)
|
||||
// All hyphens in the title should be encoded as --
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('handles badge title with multiple underscores', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'my_integration_test'
|
||||
}
|
||||
const badge = getBadge(10, 0, 0, options)
|
||||
// All underscores in the title should be encoded as __
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('handles badge title with version format containing hyphen', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'MariaDb 12.0-ubi database tests'
|
||||
}
|
||||
const badge = getBadge(1, 0, 0, options)
|
||||
// The hyphen in "12.0-ubi" should be encoded as --
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('handles badge title with dots and hyphens', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'v1.2.3-beta-test'
|
||||
}
|
||||
const badge = getBadge(4, 1, 0, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('preserves structural hyphens between label and message', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'test-suite'
|
||||
}
|
||||
const badge = getBadge(2, 3, 1, options)
|
||||
// The URI should have literal hyphens separating title-message-color
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
})
|
||||
|
||||
describe('generates test outcome as color name for imgshields', () => {
|
||||
it('uses success color when all tests pass', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(5, 0, 0, options)
|
||||
expect(badge).toContain('-success)')
|
||||
})
|
||||
|
||||
it('uses critical color when tests fail', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(5, 2, 0, options)
|
||||
expect(badge).toContain('-critical)')
|
||||
})
|
||||
|
||||
it('uses yellow color when no tests found', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(0, 0, 0, options)
|
||||
expect(badge).toContain('-yellow)')
|
||||
})
|
||||
})
|
||||
|
||||
describe('badge message composition', () => {
|
||||
it('includes only passed count when no failures or skips', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(5, 0, 0, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('includes passed and failed counts', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(5, 2, 0, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('includes passed, failed and skipped counts', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(5, 2, 1, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('uses "none" message when no tests', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(0, 0, 0, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
|
@ -32,6 +32,6 @@ describe('parseNetDuration', () => {
|
|||
})
|
||||
|
||||
it('throws when string has invalid format', () => {
|
||||
expect(() => parseNetDuration('12:34:56 not a duration')).toThrowError(/^Invalid format/)
|
||||
expect(() => parseNetDuration('12:34:56 not a duration')).toThrow(/^Invalid format/)
|
||||
})
|
||||
})
|
||||
|
|
|
|||
17
action.yml
17
action.yml
|
|
@ -1,6 +1,5 @@
|
|||
name: Test Reporter
|
||||
description: |
|
||||
Shows test results in GitHub UI: .NET (xUnit, NUnit, MSTest), Dart, Flutter, Java (JUnit), JavaScript (JEST, Mocha)
|
||||
description: Displays test results from popular testing frameworks directly in GitHub
|
||||
author: Michal Dorner <dorner.michal@gmail.com>
|
||||
inputs:
|
||||
artifact:
|
||||
|
|
@ -29,9 +28,11 @@ inputs:
|
|||
- dotnet-nunit
|
||||
- dotnet-trx
|
||||
- flutter-json
|
||||
- golang-json
|
||||
- java-junit
|
||||
- jest-junit
|
||||
- mocha-json
|
||||
- python-xunit
|
||||
- rspec-json
|
||||
- swift-xunit
|
||||
required: true
|
||||
|
|
@ -68,6 +69,10 @@ inputs:
|
|||
working-directory:
|
||||
description: Relative path under $GITHUB_WORKSPACE where the repository was checked out
|
||||
required: false
|
||||
report-title:
|
||||
description: Title for the test report summary
|
||||
required: false
|
||||
default: ''
|
||||
only-summary:
|
||||
description: |
|
||||
Allows you to generate only the summary.
|
||||
|
|
@ -85,6 +90,14 @@ inputs:
|
|||
description: Customize badge title
|
||||
required: false
|
||||
default: 'tests'
|
||||
collapsed:
|
||||
description: |
|
||||
Controls whether test report details are collapsed or expanded. Supported options:
|
||||
- auto: Collapse only if all tests pass (default behavior)
|
||||
- always: Always collapse the report details
|
||||
- never: Always expand the report details
|
||||
required: false
|
||||
default: 'auto'
|
||||
token:
|
||||
description: GitHub Access Token
|
||||
required: false
|
||||
|
|
|
|||
945
dist/index.js
generated
vendored
945
dist/index.js
generated
vendored
File diff suppressed because it is too large
Load diff
80
dist/licenses.txt
generated
vendored
80
dist/licenses.txt
generated
vendored
|
|
@ -1350,48 +1350,62 @@ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|||
|
||||
|
||||
sax
|
||||
ISC
|
||||
The ISC License
|
||||
BlueOak-1.0.0
|
||||
# Blue Oak Model License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
Version 1.0.0
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
## Purpose
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
This license gives everyone as much permission to work with
|
||||
this software as possible, while protecting contributors
|
||||
from liability.
|
||||
|
||||
====
|
||||
## Acceptance
|
||||
|
||||
`String.fromCodePoint` by Mathias Bynens used according to terms of MIT
|
||||
License, as follows:
|
||||
In order to receive this license, you must agree to its
|
||||
rules. The rules of this license are both obligations
|
||||
under that agreement and conditions to your license.
|
||||
You must not do anything with this software that triggers
|
||||
a rule that you cannot or will not follow.
|
||||
|
||||
Copyright Mathias Bynens <https://mathiasbynens.be/>
|
||||
## Copyright
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
Each contributor licenses you to do everything with this
|
||||
software that would otherwise infringe that contributor's
|
||||
copyright in it.
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
## Notices
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
You must ensure that everyone who gets a copy of
|
||||
any part of this software from you, with or without
|
||||
changes, also gets the text of this license or a link to
|
||||
<https://blueoakcouncil.org/license/1.0.0>.
|
||||
|
||||
## Excuse
|
||||
|
||||
If anyone notifies you in writing that you have not
|
||||
complied with [Notices](#notices), you can keep your
|
||||
license by taking all practical steps to comply within 30
|
||||
days after the notice. If you do not do so, your license
|
||||
ends immediately.
|
||||
|
||||
## Patent
|
||||
|
||||
Each contributor licenses you to do everything with this
|
||||
software that would otherwise infringe any patent claims
|
||||
they can license or become able to license.
|
||||
|
||||
## Reliability
|
||||
|
||||
No contributor can revoke this license.
|
||||
|
||||
## No Liability
|
||||
|
||||
***As far as the law allows, this software comes as is,
|
||||
without any warranty or condition, and no contributor
|
||||
will be liable to anyone for any damages related to this
|
||||
software or this license, under any kind of legal claim.***
|
||||
|
||||
|
||||
to-regex-range
|
||||
|
|
|
|||
3063
package-lock.json
generated
3063
package-lock.json
generated
File diff suppressed because it is too large
Load diff
26
package.json
26
package.json
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "test-reporter",
|
||||
"version": "2.1.1",
|
||||
"version": "2.3.0",
|
||||
"private": true,
|
||||
"description": "Presents test results from popular testing frameworks as Github check run",
|
||||
"main": "lib/main.js",
|
||||
|
|
@ -42,33 +42,35 @@
|
|||
"adm-zip": "^0.5.16",
|
||||
"fast-glob": "^3.3.3",
|
||||
"got": "^11.8.6",
|
||||
"picomatch": "^4.0.2",
|
||||
"picomatch": "^4.0.3",
|
||||
"xml2js": "^0.6.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@octokit/webhooks-types": "^7.6.1",
|
||||
"@types/adm-zip": "^0.5.7",
|
||||
"@types/jest": "^29.5.14",
|
||||
"@types/node": "^20.19.2",
|
||||
"@types/picomatch": "^2.3.4",
|
||||
"@types/jest": "^30.0.0",
|
||||
"@types/node": "^20.19.23",
|
||||
"@types/picomatch": "^4.0.2",
|
||||
"@types/xml2js": "^0.4.14",
|
||||
"@typescript-eslint/eslint-plugin": "^7.18.0",
|
||||
"@typescript-eslint/parser": "^7.18.0",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"@vercel/ncc": "^0.38.4",
|
||||
"eol-converter-cli": "^1.1.0",
|
||||
"eslint": "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.10.1",
|
||||
"eslint-plugin-github": "^4.10.2",
|
||||
"eslint-plugin-import": "^2.32.0",
|
||||
"eslint-plugin-jest": "^28.14.0",
|
||||
"eslint-plugin-prettier": "^5.5.1",
|
||||
"jest": "^29.7.0",
|
||||
"jest-circus": "^29.7.0",
|
||||
"eslint-plugin-prettier": "^5.5.4",
|
||||
"jest": "^30.2.0",
|
||||
"jest-junit": "^16.0.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"js-yaml": "^4.1.1",
|
||||
"prettier": "^3.6.2",
|
||||
"ts-jest": "^29.4.0",
|
||||
"typescript": "^5.8.3"
|
||||
"ts-jest": "^29.4.5",
|
||||
"typescript": "^5.9.3"
|
||||
},
|
||||
"overrides": {
|
||||
"sax": "^1.4.3"
|
||||
},
|
||||
"jest-junit": {
|
||||
"suiteName": "jest tests",
|
||||
|
|
|
|||
4826
reports/jest/package-lock.json
generated
4826
reports/jest/package-lock.json
generated
File diff suppressed because it is too large
Load diff
1453
reports/mocha/package-lock.json
generated
1453
reports/mocha/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
|
@ -9,6 +9,6 @@
|
|||
"author": "Michal Dorner <dorner.michal@gmail.com>",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"mocha": "^8.3.0"
|
||||
"mocha": "^11.7.5"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
39
src/main.ts
39
src/main.ts
|
|
@ -17,9 +17,9 @@ import {GolangJsonParser} from './parsers/golang-json/golang-json-parser'
|
|||
import {JavaJunitParser} from './parsers/java-junit/java-junit-parser'
|
||||
import {JestJunitParser} from './parsers/jest-junit/jest-junit-parser'
|
||||
import {MochaJsonParser} from './parsers/mocha-json/mocha-json-parser'
|
||||
import {PythonXunitParser} from './parsers/python-xunit/python-xunit-parser'
|
||||
import {RspecJsonParser} from './parsers/rspec-json/rspec-json-parser'
|
||||
import {SwiftXunitParser} from './parsers/swift-xunit/swift-xunit-parser'
|
||||
|
||||
import {normalizeDirPath, normalizeFilePath} from './utils/path-utils'
|
||||
import {getCheckRunContext} from './utils/github-utils'
|
||||
|
||||
|
|
@ -49,6 +49,7 @@ class TestReporter {
|
|||
readonly useActionsSummary = core.getInput('use-actions-summary', {required: false}) === 'true'
|
||||
readonly badgeTitle = core.getInput('badge-title', {required: false})
|
||||
readonly reportTitle = core.getInput('report-title', {required: false})
|
||||
readonly collapsed = core.getInput('collapsed', {required: false}) as 'auto' | 'always' | 'never'
|
||||
readonly token = core.getInput('token', {required: true})
|
||||
readonly octokit: InstanceType<typeof GitHub>
|
||||
readonly context = getCheckRunContext()
|
||||
|
|
@ -66,6 +67,11 @@ class TestReporter {
|
|||
return
|
||||
}
|
||||
|
||||
if (this.collapsed !== 'auto' && this.collapsed !== 'always' && this.collapsed !== 'never') {
|
||||
core.setFailed(`Input parameter 'collapsed' has invalid value`)
|
||||
return
|
||||
}
|
||||
|
||||
if (isNaN(this.maxAnnotations) || this.maxAnnotations < 0 || this.maxAnnotations > 50) {
|
||||
core.setFailed(`Input parameter 'max-annotations' has invalid value`)
|
||||
return
|
||||
|
|
@ -166,7 +172,7 @@ class TestReporter {
|
|||
}
|
||||
}
|
||||
|
||||
const {listSuites, listTests, onlySummary, useActionsSummary, badgeTitle, reportTitle} = this
|
||||
const {listSuites, listTests, onlySummary, useActionsSummary, badgeTitle, reportTitle, collapsed} = this
|
||||
|
||||
const passed = results.reduce((sum, tr) => sum + tr.passed, 0)
|
||||
const failed = results.reduce((sum, tr) => sum + tr.failed, 0)
|
||||
|
|
@ -175,19 +181,23 @@ class TestReporter {
|
|||
|
||||
let baseUrl = ''
|
||||
if (this.useActionsSummary) {
|
||||
const summary = getReport(results, {
|
||||
listSuites,
|
||||
listTests,
|
||||
baseUrl,
|
||||
onlySummary,
|
||||
useActionsSummary,
|
||||
badgeTitle,
|
||||
reportTitle
|
||||
})
|
||||
const summary = getReport(
|
||||
results,
|
||||
{
|
||||
listSuites,
|
||||
listTests,
|
||||
baseUrl,
|
||||
onlySummary,
|
||||
useActionsSummary,
|
||||
badgeTitle,
|
||||
reportTitle,
|
||||
collapsed
|
||||
},
|
||||
shortSummary
|
||||
)
|
||||
|
||||
core.info('Summary content:')
|
||||
core.info(summary)
|
||||
core.summary.addRaw(`# ${shortSummary}`)
|
||||
await core.summary.addRaw(summary).write()
|
||||
} else {
|
||||
core.info(`Creating check run ${name}`)
|
||||
|
|
@ -211,7 +221,8 @@ class TestReporter {
|
|||
onlySummary,
|
||||
useActionsSummary,
|
||||
badgeTitle,
|
||||
reportTitle
|
||||
reportTitle,
|
||||
collapsed
|
||||
})
|
||||
|
||||
core.info('Creating annotations')
|
||||
|
|
@ -260,6 +271,8 @@ class TestReporter {
|
|||
return new JestJunitParser(options)
|
||||
case 'mocha-json':
|
||||
return new MochaJsonParser(options)
|
||||
case 'python-xunit':
|
||||
return new PythonXunitParser(options)
|
||||
case 'rspec-json':
|
||||
return new RspecJsonParser(options)
|
||||
case 'swift-xunit':
|
||||
|
|
|
|||
|
|
@ -77,13 +77,13 @@ export class DotnetNunitParser implements TestParser {
|
|||
.join('.')
|
||||
const groupName = suitesWithoutTheories[suitesWithoutTheories.length - 1].$.name
|
||||
|
||||
let existingSuite = result.find(existingSuite => existingSuite.name === suiteName)
|
||||
let existingSuite = result.find(suite => suite.name === suiteName)
|
||||
if (existingSuite === undefined) {
|
||||
existingSuite = new TestSuiteResult(suiteName, [])
|
||||
result.push(existingSuite)
|
||||
}
|
||||
|
||||
let existingGroup = existingSuite.groups.find(existingGroup => existingGroup.name === groupName)
|
||||
let existingGroup = existingSuite.groups.find(group => group.name === groupName)
|
||||
if (existingGroup === undefined) {
|
||||
existingGroup = new TestGroupResult(groupName, [])
|
||||
existingSuite.groups.push(existingGroup)
|
||||
|
|
|
|||
8
src/parsers/python-xunit/python-xunit-parser.ts
Normal file
8
src/parsers/python-xunit/python-xunit-parser.ts
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
import {ParseOptions} from '../../test-parser'
|
||||
import {JavaJunitParser} from '../java-junit/java-junit-parser'
|
||||
|
||||
export class PythonXunitParser extends JavaJunitParser {
|
||||
constructor(readonly options: ParseOptions) {
|
||||
super(options)
|
||||
}
|
||||
}
|
||||
|
|
@ -16,6 +16,7 @@ export interface ReportOptions {
|
|||
useActionsSummary: boolean
|
||||
badgeTitle: string
|
||||
reportTitle: string
|
||||
collapsed: 'auto' | 'always' | 'never'
|
||||
}
|
||||
|
||||
export const DEFAULT_OPTIONS: ReportOptions = {
|
||||
|
|
@ -25,16 +26,19 @@ export const DEFAULT_OPTIONS: ReportOptions = {
|
|||
onlySummary: false,
|
||||
useActionsSummary: true,
|
||||
badgeTitle: 'tests',
|
||||
reportTitle: ''
|
||||
reportTitle: '',
|
||||
collapsed: 'auto'
|
||||
}
|
||||
|
||||
export function getReport(results: TestRunResult[], options: ReportOptions = DEFAULT_OPTIONS): string {
|
||||
core.info('Generating check run summary')
|
||||
|
||||
export function getReport(
|
||||
results: TestRunResult[],
|
||||
options: ReportOptions = DEFAULT_OPTIONS,
|
||||
shortSummary = ''
|
||||
): string {
|
||||
applySort(results)
|
||||
|
||||
const opts = {...options}
|
||||
let lines = renderReport(results, opts)
|
||||
let lines = renderReport(results, opts, shortSummary)
|
||||
let report = lines.join('\n')
|
||||
|
||||
if (getByteLength(report) <= getMaxReportLength(options)) {
|
||||
|
|
@ -44,7 +48,7 @@ export function getReport(results: TestRunResult[], options: ReportOptions = DEF
|
|||
if (opts.listTests === 'all') {
|
||||
core.info("Test report summary is too big - setting 'listTests' to 'failed'")
|
||||
opts.listTests = 'failed'
|
||||
lines = renderReport(results, opts)
|
||||
lines = renderReport(results, opts, shortSummary)
|
||||
report = lines.join('\n')
|
||||
if (getByteLength(report) <= getMaxReportLength(options)) {
|
||||
return report
|
||||
|
|
@ -101,7 +105,7 @@ function getByteLength(text: string): number {
|
|||
return Buffer.byteLength(text, 'utf8')
|
||||
}
|
||||
|
||||
function renderReport(results: TestRunResult[], options: ReportOptions): string[] {
|
||||
function renderReport(results: TestRunResult[], options: ReportOptions, shortSummary: string): string[] {
|
||||
const sections: string[] = []
|
||||
|
||||
const reportTitle: string = options.reportTitle.trim()
|
||||
|
|
@ -109,6 +113,10 @@ function renderReport(results: TestRunResult[], options: ReportOptions): string[
|
|||
sections.push(`# ${reportTitle}`)
|
||||
}
|
||||
|
||||
if (shortSummary) {
|
||||
sections.push(`## ${shortSummary}`)
|
||||
}
|
||||
|
||||
const badge = getReportBadge(results, options)
|
||||
sections.push(badge)
|
||||
|
||||
|
|
@ -125,7 +133,7 @@ function getReportBadge(results: TestRunResult[], options: ReportOptions): strin
|
|||
return getBadge(passed, failed, skipped, options)
|
||||
}
|
||||
|
||||
function getBadge(passed: number, failed: number, skipped: number, options: ReportOptions): string {
|
||||
export function getBadge(passed: number, failed: number, skipped: number, options: ReportOptions): string {
|
||||
const text = []
|
||||
if (passed > 0) {
|
||||
text.push(`${passed} passed`)
|
||||
|
|
@ -145,14 +153,20 @@ function getBadge(passed: number, failed: number, skipped: number, options: Repo
|
|||
color = 'yellow'
|
||||
}
|
||||
const hint = failed > 0 ? 'Tests failed' : 'Tests passed successfully'
|
||||
const uri = encodeURIComponent(`${options.badgeTitle}-${message}-${color}`)
|
||||
return ``
|
||||
const encodedBadgeTitle = encodeImgShieldsURIComponent(options.badgeTitle)
|
||||
const encodedMessage = encodeImgShieldsURIComponent(message)
|
||||
const encodedColor = encodeImgShieldsURIComponent(color)
|
||||
return ``
|
||||
}
|
||||
|
||||
function getTestRunsReport(testRuns: TestRunResult[], options: ReportOptions): string[] {
|
||||
const sections: string[] = []
|
||||
const totalFailed = testRuns.reduce((sum, tr) => sum + tr.failed, 0)
|
||||
if (totalFailed === 0) {
|
||||
|
||||
// Determine if report should be collapsed based on collapsed option
|
||||
const shouldCollapse = options.collapsed === 'always' || (options.collapsed === 'auto' && totalFailed === 0)
|
||||
|
||||
if (shouldCollapse) {
|
||||
sections.push(`<details><summary>Expand for details</summary>`)
|
||||
sections.push(` `)
|
||||
}
|
||||
|
|
@ -185,7 +199,7 @@ function getTestRunsReport(testRuns: TestRunResult[], options: ReportOptions): s
|
|||
sections.push(...suitesReports)
|
||||
}
|
||||
|
||||
if (totalFailed === 0) {
|
||||
if (shouldCollapse) {
|
||||
sections.push(`</details>`)
|
||||
}
|
||||
return sections
|
||||
|
|
@ -305,3 +319,7 @@ function getResultIcon(result: TestExecutionResult): string {
|
|||
return ''
|
||||
}
|
||||
}
|
||||
|
||||
function encodeImgShieldsURIComponent(component: string): string {
|
||||
return encodeURIComponent(component).replace(/-/g, '--').replace(/_/g, '__')
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue