Improve testing with robust schema for unittest report

This commit is contained in:
Michael Marcus 2025-11-14 21:59:25 -05:00
parent 9b8d3b002e
commit fe87682515
5 changed files with 113 additions and 71 deletions

View file

@ -1,17 +1,23 @@
![Tests failed](https://img.shields.io/badge/tests-2%20passed%2C%201%20failed-critical)
![Tests failed](https://img.shields.io/badge/tests-4%20passed%2C%202%20failed%2C%202%20skipped-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/python-xunit.xml](#user-content-r0)|2 ✅|1 ❌||220ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit.xml</a>
**3** tests were completed in **220ms** with **2** passed, **1** failed and **0** skipped.
|[fixtures/python-xunit-unittest.xml](#user-content-r0)|4 ✅|2 ❌|2 ⚪|1ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit-unittest.xml</a>
**8** tests were completed in **1ms** with **4** passed, **2** failed and **2** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[pytest](#user-content-r0s0)|2 ✅|1 ❌||220ms|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">pytest</a>
|[TestAcme-20251114214921](#user-content-r0s0)|4 ✅|2 ❌|2 ⚪|1ms|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">TestAcme-20251114214921</a>
```
src.acme.test_lib
TestAcme
✅ test_always_pass
✅ test_always_skip
✅ test_parameterized_0_param1
✅ test_parameterized_1_param2
✅ test_with_subtests
❌ test_always_fail
failed
AssertionError: failed
❌ test_error
Exception: error
⚪ test_always_skip
⚪ test_expected_failure
```

View file

@ -1,44 +1,87 @@
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`python-xunit tests report from python test results matches snapshot 1`] = `
exports[`python-xunit unittest report report from python test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/python-xunit.xml",
"path": "fixtures/python-xunit-unittest.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "src.acme.test_lib",
"name": "TestAcme",
"tests": [
TestCaseResult {
"error": undefined,
"name": "test_always_pass",
"result": "success",
"time": 36.386333,
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_always_skip",
"name": "test_parameterized_0_param1",
"result": "success",
"time": 92.039167,
"time": 1,
},
TestCaseResult {
"error": undefined,
"name": "test_parameterized_1_param2",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_with_subtests",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": {
"details": undefined,
"details": "Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail
self.fail("failed")
AssertionError: failed
",
"line": undefined,
"message": "failed",
"message": "AssertionError: failed",
"path": undefined,
},
"name": "test_always_fail",
"result": "failed",
"time": 92.05175,
"time": 0,
},
TestCaseResult {
"error": {
"details": "Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error
raise Exception("error")
Exception: error
",
"line": undefined,
"message": "Exception: error",
"path": undefined,
},
"name": "test_error",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_always_skip",
"result": "skipped",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_expected_failure",
"result": "skipped",
"time": 0,
},
],
},
],
"name": "pytest",
"totalTime": 220.47725000000003,
"name": "TestAcme-20251114214921",
"totalTime": 1,
},
],
"totalTime": undefined,
"totalTime": 1,
}
`;

View file

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuite name="TestAcme-20251114214921" tests="8" file=".py" time="0.001" timestamp="2025-11-14T21:49:22" failures="1" errors="1" skipped="2">
<testcase classname="TestAcme" name="test_always_pass" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="8"/>
<testcase classname="TestAcme" name="test_parameterized_0_param1" time="0.001" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="618"/>
<testcase classname="TestAcme" name="test_parameterized_1_param2" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="618"/>
<testcase classname="TestAcme" name="test_with_subtests" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="11"/>
<testcase classname="TestAcme" name="test_always_fail" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="23">
<failure type="AssertionError" message="failed"><![CDATA[Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail
self.fail("failed")
AssertionError: failed
]]></failure>
</testcase>
<testcase classname="TestAcme" name="test_error" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="30">
<error type="Exception" message="error"><![CDATA[Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error
raise Exception("error")
Exception: error
]]></error>
</testcase>
<testcase classname="TestAcme" name="test_always_skip" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="20">
<skipped type="skip" message="skipped"/>
</testcase>
<testcase classname="TestAcme" name="test_expected_failure" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="26">
<skipped type="XFAIL" message="expected failure: (&lt;class 'AssertionError'&gt;, AssertionError('expected failure'), &lt;traceback object at 0x100c125c0&gt;)"/>
</testcase>
</testsuite>

View file

@ -1,12 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuites>
<testsuite name="pytest" errors="0" tests="3" failures="1" time="0.22047725">
<testcase classname="src.acme.test_lib" name="test_always_pass" time="0.036386333" file="src/acme/test_lib.py">
</testcase>
<testcase classname="src.acme.test_lib" name="test_always_skip" time="0.092039167" file="src/acme/test_lib.py">
</testcase>
<testcase classname="src.acme.test_lib" name="test_always_fail" time="0.09205175" file="src/acme/test_lib.py">
<failure message="failed"></failure>
</testcase>
</testsuite>
</testsuites>

View file

@ -6,16 +6,21 @@ import {ParseOptions} from '../src/test-parser'
import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report'
import {normalizeFilePath} from '../src/utils/path-utils'
describe('python-xunit tests', () => {
it('report from python test results matches snapshot', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit.xml')
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit.md')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const defaultOpts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const trackedFiles = ['src/acme/test_lib.py']
describe('python-xunit unittest report', () => {
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-unittest.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
it('report from python test results matches snapshot', async () => {
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit.md')
const trackedFiles = ['tests/test_lib.py']
const opts: ParseOptions = {
parseErrors: true,
...defaultOpts,
trackedFiles
}
@ -29,16 +34,7 @@ describe('python-xunit tests', () => {
})
it('report does not include a title by default', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PythonXunitParser(opts)
const parser = new PythonXunitParser(defaultOpts)
const result = await parser.parse(filePath, fileContent)
const report = getReport([result])
// Report should have the badge as the first line
@ -51,16 +47,7 @@ describe('python-xunit tests', () => {
['tab', '\t'],
['newline', '\n']
])('report does not include a title when configured value is %s', async (_, reportTitle) => {
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PythonXunitParser(opts)
const parser = new PythonXunitParser(defaultOpts)
const result = await parser.parse(filePath, fileContent)
const report = getReport([result], {
...DEFAULT_OPTIONS,
@ -71,16 +58,7 @@ describe('python-xunit tests', () => {
})
it('report includes a custom report title', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PythonXunitParser(opts)
const parser = new PythonXunitParser(defaultOpts)
const result = await parser.parse(filePath, fileContent)
const report = getReport([result], {
...DEFAULT_OPTIONS,