diff --git a/README.md b/README.md index c307ff3..a7c7153 100644 --- a/README.md +++ b/README.md @@ -19,6 +19,7 @@ This [Github Action](https://github.com/features/actions) displays test results - Go / [go test](https://pkg.go.dev/testing) - Java / [JUnit](https://junit.org/) - JavaScript / [JEST](https://jestjs.io/) / [Mocha](https://mochajs.org/) +- Python / [pytest](https://docs.pytest.org/en/stable/) / [unittest](https://docs.python.org/3/library/unittest.html) - Swift / xUnit For more information see [Supported formats](#supported-formats) section. @@ -145,7 +146,9 @@ jobs: # java-junit # jest-junit # mocha-json + # python-xunit # rspec-json + # swift-xunit reporter: '' # Allows you to generate only the summary. @@ -349,6 +352,20 @@ Before version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0), M Please update Mocha to version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0) or above if you encounter this issue. +
+ python-xunit (Experimental) + +Support for Python test results in xUnit format is experimental - should work but it was not extensively tested. + +For **pytest** support, configure [JUnit XML output](https://docs.pytest.org/en/stable/how-to/output.html#creating-junitxml-format-files) and run with the `--junit-xml` option, which also lets you specify the output path for test results. + +```shell +pytest --junit-xml=test-report.xml +``` + +For **unittest** support, use a test runner that outputs the JUnit report format, such as [unittest-xml-reporting](https://pypi.org/project/unittest-xml-reporting/). +
+
swift-xunit (Experimental) diff --git a/__tests__/__outputs__/python-xunit-pytest.md b/__tests__/__outputs__/python-xunit-pytest.md new file mode 100644 index 0000000..7b13e28 --- /dev/null +++ b/__tests__/__outputs__/python-xunit-pytest.md @@ -0,0 +1,26 @@ +![Tests failed](https://img.shields.io/badge/tests-6%20passed%2C%202%20failed%2C%202%20skipped-critical) +|Report|Passed|Failed|Skipped|Time| +|:---|---:|---:|---:|---:| +|[fixtures/python-xunit-pytest.xml](#user-content-r0)|6 ✅|2 ❌|2 ⚪|19ms| +## ❌ fixtures/python-xunit-pytest.xml +**10** tests were completed in **19ms** with **6** passed, **2** failed and **2** skipped. +|Test suite|Passed|Failed|Skipped|Time| +|:---|---:|---:|---:|---:| +|[pytest](#user-content-r0s0)|6 ✅|2 ❌|2 ⚪|19ms| +### ❌ pytest +``` +tests.test_lib + ✅ test_always_pass + ✅ test_with_subtests + ✅ test_parameterized[param1] + ✅ test_parameterized[param2] + ⚪ test_always_skip + ❌ test_always_fail + assert False + ⚪ test_expected_failure + ❌ test_error + Exception: error + ✅ test_with_record_property +custom_classname + ✅ test_with_record_xml_attribute +``` \ No newline at end of file diff --git a/__tests__/__outputs__/python-xunit-unittest.md b/__tests__/__outputs__/python-xunit-unittest.md new file mode 100644 index 0000000..230d186 --- /dev/null +++ b/__tests__/__outputs__/python-xunit-unittest.md @@ -0,0 +1,23 @@ +![Tests failed](https://img.shields.io/badge/tests-4%20passed%2C%202%20failed%2C%202%20skipped-critical) +|Report|Passed|Failed|Skipped|Time| +|:---|---:|---:|---:|---:| +|[fixtures/python-xunit-unittest.xml](#user-content-r0)|4 ✅|2 ❌|2 ⚪|1ms| +## ❌ fixtures/python-xunit-unittest.xml +**8** tests were completed in **1ms** with **4** passed, **2** failed and **2** skipped. +|Test suite|Passed|Failed|Skipped|Time| +|:---|---:|---:|---:|---:| +|[TestAcme-20251114214921](#user-content-r0s0)|4 ✅|2 ❌|2 ⚪|1ms| +### ❌ TestAcme-20251114214921 +``` +TestAcme + ✅ test_always_pass + ✅ test_parameterized_0_param1 + ✅ test_parameterized_1_param2 + ✅ test_with_subtests + ❌ test_always_fail + AssertionError: failed + ❌ test_error + Exception: error + ⚪ test_always_skip + ⚪ test_expected_failure +``` \ No newline at end of file diff --git a/__tests__/__snapshots__/python-xunit.test.ts.snap b/__tests__/__snapshots__/python-xunit.test.ts.snap new file mode 100644 index 0000000..f325c84 --- /dev/null +++ b/__tests__/__snapshots__/python-xunit.test.ts.snap @@ -0,0 +1,192 @@ +// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing + +exports[`python-xunit pytest report report from python test results matches snapshot 1`] = ` +TestRunResult { + "path": "fixtures/python-xunit-pytest.xml", + "suites": [ + TestSuiteResult { + "groups": [ + TestGroupResult { + "name": "tests.test_lib", + "tests": [ + TestCaseResult { + "error": undefined, + "name": "test_always_pass", + "result": "success", + "time": 2, + }, + TestCaseResult { + "error": undefined, + "name": "test_with_subtests", + "result": "success", + "time": 5, + }, + TestCaseResult { + "error": undefined, + "name": "test_parameterized[param1]", + "result": "success", + "time": 0, + }, + TestCaseResult { + "error": undefined, + "name": "test_parameterized[param2]", + "result": "success", + "time": 0, + }, + TestCaseResult { + "error": undefined, + "name": "test_always_skip", + "result": "skipped", + "time": 0, + }, + TestCaseResult { + "error": { + "details": "def test_always_fail(): + > assert False + E assert False + + tests/test_lib.py:25: AssertionError + ", + "line": undefined, + "message": "assert False", + "path": undefined, + }, + "name": "test_always_fail", + "result": "failed", + "time": 0, + }, + TestCaseResult { + "error": undefined, + "name": "test_expected_failure", + "result": "skipped", + "time": 0, + }, + TestCaseResult { + "error": { + "details": "def test_error(): + > raise Exception("error") + E Exception: error + + tests/test_lib.py:32: Exception + ", + "line": undefined, + "message": "Exception: error", + "path": undefined, + }, + "name": "test_error", + "result": "failed", + "time": 0, + }, + TestCaseResult { + "error": undefined, + "name": "test_with_record_property", + "result": "success", + "time": 0, + }, + ], + }, + TestGroupResult { + "name": "custom_classname", + "tests": [ + TestCaseResult { + "error": undefined, + "name": "test_with_record_xml_attribute", + "result": "success", + "time": 0, + }, + ], + }, + ], + "name": "pytest", + "totalTime": 19, + }, + ], + "totalTime": undefined, +} +`; + +exports[`python-xunit unittest report report from python test results matches snapshot 1`] = ` +TestRunResult { + "path": "fixtures/python-xunit-unittest.xml", + "suites": [ + TestSuiteResult { + "groups": [ + TestGroupResult { + "name": "TestAcme", + "tests": [ + TestCaseResult { + "error": undefined, + "name": "test_always_pass", + "result": "success", + "time": 0, + }, + TestCaseResult { + "error": undefined, + "name": "test_parameterized_0_param1", + "result": "success", + "time": 1, + }, + TestCaseResult { + "error": undefined, + "name": "test_parameterized_1_param2", + "result": "success", + "time": 0, + }, + TestCaseResult { + "error": undefined, + "name": "test_with_subtests", + "result": "success", + "time": 0, + }, + TestCaseResult { + "error": { + "details": "Traceback (most recent call last): + File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail + self.fail("failed") +AssertionError: failed +", + "line": undefined, + "message": "AssertionError: failed", + "path": undefined, + }, + "name": "test_always_fail", + "result": "failed", + "time": 0, + }, + TestCaseResult { + "error": { + "details": "Traceback (most recent call last): + File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error + raise Exception("error") +Exception: error +", + "line": undefined, + "message": "Exception: error", + "path": undefined, + }, + "name": "test_error", + "result": "failed", + "time": 0, + }, + TestCaseResult { + "error": undefined, + "name": "test_always_skip", + "result": "skipped", + "time": 0, + }, + TestCaseResult { + "error": undefined, + "name": "test_expected_failure", + "result": "skipped", + "time": 0, + }, + ], + }, + ], + "name": "TestAcme-20251114214921", + "totalTime": 1, + }, + ], + "totalTime": 1, +} +`; diff --git a/__tests__/fixtures/python-xunit-pytest.xml b/__tests__/fixtures/python-xunit-pytest.xml new file mode 100644 index 0000000..fcb044a --- /dev/null +++ b/__tests__/fixtures/python-xunit-pytest.xml @@ -0,0 +1,42 @@ + + + + + + + + + + + + /Users/mike/Projects/python-test/tests/test_lib.py:20: skipped + + + + def test_always_fail(): + > assert False + E assert False + + tests/test_lib.py:25: AssertionError + + + + + + + def test_error(): + > raise Exception("error") + E Exception: error + + tests/test_lib.py:32: Exception + + + + + + + + + + diff --git a/__tests__/fixtures/python-xunit-unittest.xml b/__tests__/fixtures/python-xunit-unittest.xml new file mode 100644 index 0000000..ecc67d4 --- /dev/null +++ b/__tests__/fixtures/python-xunit-unittest.xml @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + diff --git a/__tests__/python-xunit.test.ts b/__tests__/python-xunit.test.ts new file mode 100644 index 0000000..c1550a4 --- /dev/null +++ b/__tests__/python-xunit.test.ts @@ -0,0 +1,93 @@ +import * as fs from 'fs' +import * as path from 'path' + +import {PythonXunitParser} from '../src/parsers/python-xunit/python-xunit-parser' +import {ParseOptions} from '../src/test-parser' +import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report' +import {normalizeFilePath} from '../src/utils/path-utils' + +const defaultOpts: ParseOptions = { + parseErrors: true, + trackedFiles: [] +} + +describe('python-xunit unittest report', () => { + const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-unittest.xml') + const filePath = normalizeFilePath(path.relative(__dirname, fixturePath)) + const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'}) + const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-unittest.md') + + it('report from python test results matches snapshot', async () => { + const trackedFiles = ['tests/test_lib.py'] + const opts: ParseOptions = { + ...defaultOpts, + trackedFiles + } + + const parser = new PythonXunitParser(opts) + const result = await parser.parse(filePath, fileContent) + expect(result).toMatchSnapshot() + + const report = getReport([result]) + fs.mkdirSync(path.dirname(outputPath), {recursive: true}) + fs.writeFileSync(outputPath, report) + }) + + it('report does not include a title by default', async () => { + const parser = new PythonXunitParser(defaultOpts) + const result = await parser.parse(filePath, fileContent) + const report = getReport([result]) + // Report should have the badge as the first line + expect(report).toMatch(/^!\[Tests failed]/) + }) + + it.each([ + ['empty string', ''], + ['space', ' '], + ['tab', '\t'], + ['newline', '\n'] + ])('report does not include a title when configured value is %s', async (_, reportTitle) => { + const parser = new PythonXunitParser(defaultOpts) + const result = await parser.parse(filePath, fileContent) + const report = getReport([result], { + ...DEFAULT_OPTIONS, + reportTitle + }) + // Report should have the badge as the first line + expect(report).toMatch(/^!\[Tests failed]/) + }) + + it('report includes a custom report title', async () => { + const parser = new PythonXunitParser(defaultOpts) + const result = await parser.parse(filePath, fileContent) + const report = getReport([result], { + ...DEFAULT_OPTIONS, + reportTitle: 'My Custom Title' + }) + // Report should have the title as the first line + expect(report).toMatch(/^# My Custom Title\n/) + }) +}) + +describe('python-xunit pytest report', () => { + const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-pytest.xml') + const filePath = normalizeFilePath(path.relative(__dirname, fixturePath)) + const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'}) + const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-pytest.md') + + it('report from python test results matches snapshot', async () => { + const trackedFiles = ['tests/test_lib.py'] + const opts: ParseOptions = { + ...defaultOpts, + trackedFiles + } + + const parser = new PythonXunitParser(opts) + const result = await parser.parse(filePath, fileContent) + expect(result).toMatchSnapshot() + + const report = getReport([result]) + fs.mkdirSync(path.dirname(outputPath), {recursive: true}) + fs.writeFileSync(outputPath, report) + }) +}) diff --git a/action.yml b/action.yml index c8dd56b..fab75d5 100644 --- a/action.yml +++ b/action.yml @@ -32,6 +32,7 @@ inputs: - java-junit - jest-junit - mocha-json + - python-xunit - rspec-json - swift-xunit required: true diff --git a/dist/index.js b/dist/index.js index ee83e34..42cb52e 100644 --- a/dist/index.js +++ b/dist/index.js @@ -277,6 +277,7 @@ const golang_json_parser_1 = __nccwpck_require__(5162); const java_junit_parser_1 = __nccwpck_require__(8342); const jest_junit_parser_1 = __nccwpck_require__(1042); const mocha_json_parser_1 = __nccwpck_require__(5402); +const python_xunit_parser_1 = __nccwpck_require__(6578); const rspec_json_parser_1 = __nccwpck_require__(9768); const swift_xunit_parser_1 = __nccwpck_require__(7330); const path_utils_1 = __nccwpck_require__(9132); @@ -493,6 +494,8 @@ class TestReporter { return new jest_junit_parser_1.JestJunitParser(options); case 'mocha-json': return new mocha_json_parser_1.MochaJsonParser(options); + case 'python-xunit': + return new python_xunit_parser_1.PythonXunitParser(options); case 'rspec-json': return new rspec_json_parser_1.RspecJsonParser(options); case 'swift-xunit': @@ -1663,6 +1666,26 @@ class MochaJsonParser { exports.MochaJsonParser = MochaJsonParser; +/***/ }), + +/***/ 6578: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PythonXunitParser = void 0; +const java_junit_parser_1 = __nccwpck_require__(8342); +class PythonXunitParser extends java_junit_parser_1.JavaJunitParser { + options; + constructor(options) { + super(options); + this.options = options; + } +} +exports.PythonXunitParser = PythonXunitParser; + + /***/ }), /***/ 9768: @@ -30299,8 +30322,11 @@ function runParallel (tasks, cb) { /***/ 2560: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -;(function (sax) { // wrapper for non-node envs - sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } +;(function (sax) { + // wrapper for non-node envs + sax.parser = function (strict, opt) { + return new SAXParser(strict, opt) + } sax.SAXParser = SAXParser sax.SAXStream = SAXStream sax.createStream = createStream @@ -30317,9 +30343,18 @@ function runParallel (tasks, cb) { sax.MAX_BUFFER_LENGTH = 64 * 1024 var buffers = [ - 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', - 'procInstName', 'procInstBody', 'entity', 'attribName', - 'attribValue', 'cdata', 'script' + 'comment', + 'sgmlDecl', + 'textNode', + 'tagName', + 'doctype', + 'procInstName', + 'procInstBody', + 'entity', + 'attribName', + 'attribValue', + 'cdata', + 'script', ] sax.EVENTS = [ @@ -30340,10 +30375,10 @@ function runParallel (tasks, cb) { 'ready', 'script', 'opennamespace', - 'closenamespace' + 'closenamespace', ] - function SAXParser (strict, opt) { + function SAXParser(strict, opt) { if (!(this instanceof SAXParser)) { return new SAXParser(strict, opt) } @@ -30362,7 +30397,10 @@ function runParallel (tasks, cb) { parser.noscript = !!(strict || parser.opt.noscript) parser.state = S.BEGIN parser.strictEntities = parser.opt.strictEntities - parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) + parser.ENTITIES = + parser.strictEntities ? + Object.create(sax.XML_ENTITIES) + : Object.create(sax.ENTITIES) parser.attribList = [] // namespaces form a prototype chain. @@ -30372,6 +30410,12 @@ function runParallel (tasks, cb) { parser.ns = Object.create(rootNS) } + // disallow unquoted attribute values if not otherwise configured + // and strict mode is true + if (parser.opt.unquotedAttributeValues === undefined) { + parser.opt.unquotedAttributeValues = !strict + } + // mostly just for error reporting parser.trackPosition = parser.opt.position !== false if (parser.trackPosition) { @@ -30382,7 +30426,7 @@ function runParallel (tasks, cb) { if (!Object.create) { Object.create = function (o) { - function F () {} + function F() {} F.prototype = o var newf = new F() return newf @@ -30397,7 +30441,7 @@ function runParallel (tasks, cb) { } } - function checkBufferLength (parser) { + function checkBufferLength(parser) { var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) var maxActual = 0 for (var i = 0, l = buffers.length; i < l; i++) { @@ -30433,13 +30477,13 @@ function runParallel (tasks, cb) { parser.bufferCheckPosition = m + parser.position } - function clearBuffers (parser) { + function clearBuffers(parser) { for (var i = 0, l = buffers.length; i < l; i++) { parser[buffers[i]] = '' } } - function flushBuffers (parser) { + function flushBuffers(parser) { closeText(parser) if (parser.cdata !== '') { emitNode(parser, 'oncdata', parser.cdata) @@ -30452,11 +30496,20 @@ function runParallel (tasks, cb) { } SAXParser.prototype = { - end: function () { end(this) }, + end: function () { + end(this) + }, write: write, - resume: function () { this.error = null; return this }, - close: function () { return this.write(null) }, - flush: function () { flushBuffers(this) } + resume: function () { + this.error = null + return this + }, + close: function () { + return this.write(null) + }, + flush: function () { + flushBuffers(this) + }, } var Stream @@ -30465,16 +30518,17 @@ function runParallel (tasks, cb) { } catch (ex) { Stream = function () {} } + if (!Stream) Stream = function () {} var streamWraps = sax.EVENTS.filter(function (ev) { return ev !== 'error' && ev !== 'end' }) - function createStream (strict, opt) { + function createStream(strict, opt) { return new SAXStream(strict, opt) } - function SAXStream (strict, opt) { + function SAXStream(strict, opt) { if (!(this instanceof SAXStream)) { return new SAXStream(strict, opt) } @@ -30515,21 +30569,23 @@ function runParallel (tasks, cb) { me.on(ev, h) }, enumerable: true, - configurable: false + configurable: false, }) }) } SAXStream.prototype = Object.create(Stream.prototype, { constructor: { - value: SAXStream - } + value: SAXStream, + }, }) SAXStream.prototype.write = function (data) { - if (typeof Buffer === 'function' && + if ( + typeof Buffer === 'function' && typeof Buffer.isBuffer === 'function' && - Buffer.isBuffer(data)) { + Buffer.isBuffer(data) + ) { if (!this._decoder) { var SD = (__nccwpck_require__(3193).StringDecoder) this._decoder = new SD('utf8') @@ -30554,7 +30610,10 @@ function runParallel (tasks, cb) { var me = this if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { me._parser['on' + ev] = function () { - var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) + var args = + arguments.length === 1 ? + [arguments[0]] + : Array.apply(null, arguments) args.splice(0, 0, ev) me.emit.apply(me, args) } @@ -30577,30 +30636,34 @@ function runParallel (tasks, cb) { // without a significant breaking change to either this parser, or the // JavaScript language. Implementation of an emoji-capable xml parser // is left as an exercise for the reader. - var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + var nameStart = + /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ - var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + var nameBody = + /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ - var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ - var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + var entityStart = + /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + var entityBody = + /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ - function isWhitespace (c) { + function isWhitespace(c) { return c === ' ' || c === '\n' || c === '\r' || c === '\t' } - function isQuote (c) { - return c === '"' || c === '\'' + function isQuote(c) { + return c === '"' || c === "'" } - function isAttribEnd (c) { + function isAttribEnd(c) { return c === '>' || isWhitespace(c) } - function isMatch (regex, c) { + function isMatch(regex, c) { return regex.test(c) } - function notMatch (regex, c) { + function notMatch(regex, c) { return !isMatch(regex, c) } @@ -30641,271 +30704,271 @@ function runParallel (tasks, cb) { CLOSE_TAG: S++, // SCRIPT: S++, //