Merge pull request #643 from micmarc/feature/python-support

This commit is contained in:
Jozef Izso 2025-11-15 11:12:45 +01:00 committed by GitHub
commit 7c636a991c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 255 additions and 1 deletions

View file

@ -19,6 +19,7 @@ This [Github Action](https://github.com/features/actions) displays test results
- Go / [go test](https://pkg.go.dev/testing) - Go / [go test](https://pkg.go.dev/testing)
- Java / [JUnit](https://junit.org/) - Java / [JUnit](https://junit.org/)
- JavaScript / [JEST](https://jestjs.io/) / [Mocha](https://mochajs.org/) - JavaScript / [JEST](https://jestjs.io/) / [Mocha](https://mochajs.org/)
- Python / [pytest](https://docs.pytest.org/en/stable/) / [unittest](https://docs.python.org/3/library/unittest.html)
- Swift / xUnit - Swift / xUnit
For more information see [Supported formats](#supported-formats) section. For more information see [Supported formats](#supported-formats) section.
@ -145,7 +146,9 @@ jobs:
# java-junit # java-junit
# jest-junit # jest-junit
# mocha-json # mocha-json
# python-xunit
# rspec-json # rspec-json
# swift-xunit
reporter: '' reporter: ''
# Allows you to generate only the summary. # Allows you to generate only the summary.
@ -349,6 +352,16 @@ Before version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0), M
Please update Mocha to version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0) or above if you encounter this issue. Please update Mocha to version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0) or above if you encounter this issue.
</details> </details>
<details>
<summary>python-xunit (Experimental)</summary>
Support for Python test results in xUnit format is experimental - should work but it was not extensively tested.
For pytest support, configure [JUnit XML output](https://docs.pytest.org/en/stable/how-to/output.html#creating-junitxml-format-files) and run with the `--junit-xml` option, which also lets you specify the output path for test results.
For unittest support, use a test runner that outputs the JUnit report format, such as [unittest-xml-reporting](https://pypi.org/project/unittest-xml-reporting/).
</details>
<details> <details>
<summary>swift-xunit (Experimental)</summary> <summary>swift-xunit (Experimental)</summary>

View file

@ -0,0 +1,23 @@
![Tests failed](https://img.shields.io/badge/tests-4%20passed%2C%202%20failed%2C%202%20skipped-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/python-xunit-unittest.xml](#user-content-r0)|4 ✅|2 ❌|2 ⚪|1ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit-unittest.xml</a>
**8** tests were completed in **1ms** with **4** passed, **2** failed and **2** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[TestAcme-20251114214921](#user-content-r0s0)|4 ✅|2 ❌|2 ⚪|1ms|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">TestAcme-20251114214921</a>
```
TestAcme
✅ test_always_pass
✅ test_parameterized_0_param1
✅ test_parameterized_1_param2
✅ test_with_subtests
❌ test_always_fail
AssertionError: failed
❌ test_error
Exception: error
⚪ test_always_skip
⚪ test_expected_failure
```

View file

@ -0,0 +1,87 @@
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`python-xunit unittest report report from python test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/python-xunit-unittest.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "TestAcme",
"tests": [
TestCaseResult {
"error": undefined,
"name": "test_always_pass",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_parameterized_0_param1",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
"name": "test_parameterized_1_param2",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_with_subtests",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": {
"details": "Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail
self.fail("failed")
AssertionError: failed
",
"line": undefined,
"message": "AssertionError: failed",
"path": undefined,
},
"name": "test_always_fail",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": {
"details": "Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error
raise Exception("error")
Exception: error
",
"line": undefined,
"message": "Exception: error",
"path": undefined,
},
"name": "test_error",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_always_skip",
"result": "skipped",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_expected_failure",
"result": "skipped",
"time": 0,
},
],
},
],
"name": "TestAcme-20251114214921",
"totalTime": 1,
},
],
"totalTime": 1,
}
`;

View file

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuite name="TestAcme-20251114214921" tests="8" file=".py" time="0.001" timestamp="2025-11-14T21:49:22" failures="1" errors="1" skipped="2">
<testcase classname="TestAcme" name="test_always_pass" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="8"/>
<testcase classname="TestAcme" name="test_parameterized_0_param1" time="0.001" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="618"/>
<testcase classname="TestAcme" name="test_parameterized_1_param2" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="618"/>
<testcase classname="TestAcme" name="test_with_subtests" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="11"/>
<testcase classname="TestAcme" name="test_always_fail" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="23">
<failure type="AssertionError" message="failed"><![CDATA[Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail
self.fail("failed")
AssertionError: failed
]]></failure>
</testcase>
<testcase classname="TestAcme" name="test_error" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="30">
<error type="Exception" message="error"><![CDATA[Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error
raise Exception("error")
Exception: error
]]></error>
</testcase>
<testcase classname="TestAcme" name="test_always_skip" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="20">
<skipped type="skip" message="skipped"/>
</testcase>
<testcase classname="TestAcme" name="test_expected_failure" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="26">
<skipped type="XFAIL" message="expected failure: (&lt;class 'AssertionError'&gt;, AssertionError('expected failure'), &lt;traceback object at 0x100c125c0&gt;)"/>
</testcase>
</testsuite>

View file

@ -0,0 +1,70 @@
import * as fs from 'fs'
import * as path from 'path'
import {PythonXunitParser} from '../src/parsers/python-xunit/python-xunit-parser'
import {ParseOptions} from '../src/test-parser'
import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report'
import {normalizeFilePath} from '../src/utils/path-utils'
const defaultOpts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
describe('python-xunit unittest report', () => {
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-unittest.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
it('report from python test results matches snapshot', async () => {
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit.md')
const trackedFiles = ['tests/test_lib.py']
const opts: ParseOptions = {
...defaultOpts,
trackedFiles
}
const parser = new PythonXunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
it('report does not include a title by default', async () => {
const parser = new PythonXunitParser(defaultOpts)
const result = await parser.parse(filePath, fileContent)
const report = getReport([result])
// Report should have the badge as the first line
expect(report).toMatch(/^!\[Tests failed]/)
})
it.each([
['empty string', ''],
['space', ' '],
['tab', '\t'],
['newline', '\n']
])('report does not include a title when configured value is %s', async (_, reportTitle) => {
const parser = new PythonXunitParser(defaultOpts)
const result = await parser.parse(filePath, fileContent)
const report = getReport([result], {
...DEFAULT_OPTIONS,
reportTitle
})
// Report should have the badge as the first line
expect(report).toMatch(/^!\[Tests failed]/)
})
it('report includes a custom report title', async () => {
const parser = new PythonXunitParser(defaultOpts)
const result = await parser.parse(filePath, fileContent)
const report = getReport([result], {
...DEFAULT_OPTIONS,
reportTitle: 'My Custom Title'
})
// Report should have the title as the first line
expect(report).toMatch(/^# My Custom Title\n/)
})
})

View file

@ -32,6 +32,7 @@ inputs:
- java-junit - java-junit
- jest-junit - jest-junit
- mocha-json - mocha-json
- python-xunit
- rspec-json - rspec-json
- swift-xunit - swift-xunit
required: true required: true

23
dist/index.js generated vendored
View file

@ -277,6 +277,7 @@ const golang_json_parser_1 = __nccwpck_require__(5162);
const java_junit_parser_1 = __nccwpck_require__(8342); const java_junit_parser_1 = __nccwpck_require__(8342);
const jest_junit_parser_1 = __nccwpck_require__(1042); const jest_junit_parser_1 = __nccwpck_require__(1042);
const mocha_json_parser_1 = __nccwpck_require__(5402); const mocha_json_parser_1 = __nccwpck_require__(5402);
const python_xunit_parser_1 = __nccwpck_require__(6578);
const rspec_json_parser_1 = __nccwpck_require__(9768); const rspec_json_parser_1 = __nccwpck_require__(9768);
const swift_xunit_parser_1 = __nccwpck_require__(7330); const swift_xunit_parser_1 = __nccwpck_require__(7330);
const path_utils_1 = __nccwpck_require__(9132); const path_utils_1 = __nccwpck_require__(9132);
@ -493,6 +494,8 @@ class TestReporter {
return new jest_junit_parser_1.JestJunitParser(options); return new jest_junit_parser_1.JestJunitParser(options);
case 'mocha-json': case 'mocha-json':
return new mocha_json_parser_1.MochaJsonParser(options); return new mocha_json_parser_1.MochaJsonParser(options);
case 'python-xunit':
return new python_xunit_parser_1.PythonXunitParser(options);
case 'rspec-json': case 'rspec-json':
return new rspec_json_parser_1.RspecJsonParser(options); return new rspec_json_parser_1.RspecJsonParser(options);
case 'swift-xunit': case 'swift-xunit':
@ -1663,6 +1666,26 @@ class MochaJsonParser {
exports.MochaJsonParser = MochaJsonParser; exports.MochaJsonParser = MochaJsonParser;
/***/ }),
/***/ 6578:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.PythonXunitParser = void 0;
const java_junit_parser_1 = __nccwpck_require__(8342);
class PythonXunitParser extends java_junit_parser_1.JavaJunitParser {
options;
constructor(options) {
super(options);
this.options = options;
}
}
exports.PythonXunitParser = PythonXunitParser;
/***/ }), /***/ }),
/***/ 9768: /***/ 9768:

View file

@ -17,9 +17,9 @@ import {GolangJsonParser} from './parsers/golang-json/golang-json-parser'
import {JavaJunitParser} from './parsers/java-junit/java-junit-parser' import {JavaJunitParser} from './parsers/java-junit/java-junit-parser'
import {JestJunitParser} from './parsers/jest-junit/jest-junit-parser' import {JestJunitParser} from './parsers/jest-junit/jest-junit-parser'
import {MochaJsonParser} from './parsers/mocha-json/mocha-json-parser' import {MochaJsonParser} from './parsers/mocha-json/mocha-json-parser'
import {PythonXunitParser} from './parsers/python-xunit/python-xunit-parser'
import {RspecJsonParser} from './parsers/rspec-json/rspec-json-parser' import {RspecJsonParser} from './parsers/rspec-json/rspec-json-parser'
import {SwiftXunitParser} from './parsers/swift-xunit/swift-xunit-parser' import {SwiftXunitParser} from './parsers/swift-xunit/swift-xunit-parser'
import {normalizeDirPath, normalizeFilePath} from './utils/path-utils' import {normalizeDirPath, normalizeFilePath} from './utils/path-utils'
import {getCheckRunContext} from './utils/github-utils' import {getCheckRunContext} from './utils/github-utils'
@ -271,6 +271,8 @@ class TestReporter {
return new JestJunitParser(options) return new JestJunitParser(options)
case 'mocha-json': case 'mocha-json':
return new MochaJsonParser(options) return new MochaJsonParser(options)
case 'python-xunit':
return new PythonXunitParser(options)
case 'rspec-json': case 'rspec-json':
return new RspecJsonParser(options) return new RspecJsonParser(options)
case 'swift-xunit': case 'swift-xunit':

View file

@ -0,0 +1,8 @@
import {ParseOptions} from '../../test-parser'
import {JavaJunitParser} from '../java-junit/java-junit-parser'
export class PythonXunitParser extends JavaJunitParser {
constructor(readonly options: ParseOptions) {
super(options)
}
}