mirror of
https://github.com/dorny/test-reporter.git
synced 2025-12-13 04:58:45 +01:00
Enhance pytest support
Add robust test schema for pytest report Update README with sample pytest command
This commit is contained in:
parent
3b9dad208e
commit
c1a56edcfe
6 changed files with 203 additions and 3 deletions
|
|
@ -357,9 +357,13 @@ Please update Mocha to version [v9.1.0](https://github.com/mochajs/mocha/release
|
|||
|
||||
Support for Python test results in xUnit format is experimental - should work but it was not extensively tested.
|
||||
|
||||
For pytest support, configure [JUnit XML output](https://docs.pytest.org/en/stable/how-to/output.html#creating-junitxml-format-files) and run with the `--junit-xml` option, which also lets you specify the output path for test results.
|
||||
For **pytest** support, configure [JUnit XML output](https://docs.pytest.org/en/stable/how-to/output.html#creating-junitxml-format-files) and run with the `--junit-xml` option, which also lets you specify the output path for test results.
|
||||
|
||||
For unittest support, use a test runner that outputs the JUnit report format, such as [unittest-xml-reporting](https://pypi.org/project/unittest-xml-reporting/).
|
||||
```shell
|
||||
pytest --junit-xml=test-report.xml
|
||||
```
|
||||
|
||||
For **unittest** support, use a test runner that outputs the JUnit report format, such as [unittest-xml-reporting](https://pypi.org/project/unittest-xml-reporting/).
|
||||
</details>
|
||||
|
||||
<details>
|
||||
|
|
|
|||
26
__tests__/__outputs__/python-xunit-pytest.md
Normal file
26
__tests__/__outputs__/python-xunit-pytest.md
Normal file
|
|
@ -0,0 +1,26 @@
|
|||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/python-xunit-pytest.xml](#user-content-r0)|6 ✅|2 ❌|2 ⚪|19ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit-pytest.xml</a>
|
||||
**10** tests were completed in **19ms** with **6** passed, **2** failed and **2** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[pytest](#user-content-r0s0)|6 ✅|2 ❌|2 ⚪|19ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">pytest</a>
|
||||
```
|
||||
tests.test_lib
|
||||
✅ test_always_pass
|
||||
✅ test_with_subtests
|
||||
✅ test_parameterized[param1]
|
||||
✅ test_parameterized[param2]
|
||||
⚪ test_always_skip
|
||||
❌ test_always_fail
|
||||
assert False
|
||||
⚪ test_expected_failure
|
||||
❌ test_error
|
||||
Exception: error
|
||||
✅ test_with_record_property
|
||||
custom_classname
|
||||
✅ test_with_record_xml_attribute
|
||||
```
|
||||
|
|
@ -1,5 +1,110 @@
|
|||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`python-xunit pytest report report from python test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/python-xunit-pytest.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "tests.test_lib",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_always_pass",
|
||||
"result": "success",
|
||||
"time": 2,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_with_subtests",
|
||||
"result": "success",
|
||||
"time": 5,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_parameterized[param1]",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_parameterized[param2]",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_always_skip",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "def test_always_fail():
|
||||
> assert False
|
||||
E assert False
|
||||
|
||||
tests/test_lib.py:25: AssertionError
|
||||
",
|
||||
"line": undefined,
|
||||
"message": "assert False",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "test_always_fail",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_expected_failure",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "def test_error():
|
||||
> raise Exception("error")
|
||||
E Exception: error
|
||||
|
||||
tests/test_lib.py:32: Exception
|
||||
",
|
||||
"line": undefined,
|
||||
"message": "Exception: error",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "test_error",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_with_record_property",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
TestGroupResult {
|
||||
"name": "custom_classname",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_with_record_xml_attribute",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "pytest",
|
||||
"totalTime": 19,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`python-xunit unittest report report from python test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/python-xunit-unittest.xml",
|
||||
|
|
|
|||
42
__tests__/fixtures/python-xunit-pytest.xml
Normal file
42
__tests__/fixtures/python-xunit-pytest.xml
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<testsuites name="pytest tests">
|
||||
<testsuite name="pytest" errors="0" failures="2" skipped="2" tests="15" time="0.019"
|
||||
timestamp="2025-11-15T11:51:49.548396-05:00" hostname="Mac.hsd1.va.comcast.net">
|
||||
<properties>
|
||||
<property name="custom_prop" value="custom_val"/>
|
||||
</properties>
|
||||
<testcase classname="tests.test_lib" name="test_always_pass" time="0.002"/>
|
||||
<testcase classname="tests.test_lib" name="test_with_subtests" time="0.005"/>
|
||||
<testcase classname="tests.test_lib" name="test_parameterized[param1]" time="0.000"/>
|
||||
<testcase classname="tests.test_lib" name="test_parameterized[param2]" time="0.000"/>
|
||||
<testcase classname="tests.test_lib" name="test_always_skip" time="0.000">
|
||||
<skipped type="pytest.skip" message="skipped">/Users/mike/Projects/python-test/tests/test_lib.py:20: skipped
|
||||
</skipped>
|
||||
</testcase>
|
||||
<testcase classname="tests.test_lib" name="test_always_fail" time="0.000">
|
||||
<failure message="assert False">def test_always_fail():
|
||||
> assert False
|
||||
E assert False
|
||||
|
||||
tests/test_lib.py:25: AssertionError
|
||||
</failure>
|
||||
</testcase>
|
||||
<testcase classname="tests.test_lib" name="test_expected_failure" time="0.000">
|
||||
<skipped type="pytest.xfail" message=""/>
|
||||
</testcase>
|
||||
<testcase classname="tests.test_lib" name="test_error" time="0.000">
|
||||
<failure message="Exception: error">def test_error():
|
||||
> raise Exception("error")
|
||||
E Exception: error
|
||||
|
||||
tests/test_lib.py:32: Exception
|
||||
</failure>
|
||||
</testcase>
|
||||
<testcase classname="tests.test_lib" name="test_with_record_property" time="0.000">
|
||||
<properties>
|
||||
<property name="example_key" value="1"/>
|
||||
</properties>
|
||||
</testcase>
|
||||
<testcase classname="custom_classname" name="test_with_record_xml_attribute" time="0.000"/>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
|
|
@ -15,9 +15,9 @@ describe('python-xunit unittest report', () => {
|
|||
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-unittest.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-unittest.md')
|
||||
|
||||
it('report from python test results matches snapshot', async () => {
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit.md')
|
||||
const trackedFiles = ['tests/test_lib.py']
|
||||
const opts: ParseOptions = {
|
||||
...defaultOpts,
|
||||
|
|
@ -68,3 +68,26 @@ describe('python-xunit unittest report', () => {
|
|||
expect(report).toMatch(/^# My Custom Title\n/)
|
||||
})
|
||||
})
|
||||
|
||||
describe('python-xunit pytest report', () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-pytest.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-pytest.md')
|
||||
|
||||
it('report from python test results matches snapshot', async () => {
|
||||
const trackedFiles = ['tests/test_lib.py']
|
||||
const opts: ParseOptions = {
|
||||
...defaultOpts,
|
||||
trackedFiles
|
||||
}
|
||||
|
||||
const parser = new PythonXunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
})
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue