mirror of
https://github.com/dorny/test-reporter.git
synced 2026-02-04 05:27:55 +01:00
Merge 74f763ce28 into a810f9bf83
This commit is contained in:
commit
6af63810a7
6 changed files with 197 additions and 9 deletions
|
|
@ -184,6 +184,12 @@ jobs:
|
|||
# none
|
||||
list-tests: 'all'
|
||||
|
||||
# Limits which test result files are listed:
|
||||
# all
|
||||
# failed
|
||||
# none
|
||||
list-files: 'all'
|
||||
|
||||
# Limits number of created annotations with error message and stack trace captured during test execution.
|
||||
# Must be less or equal to 50.
|
||||
max-annotations: '10'
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import {getBadge, DEFAULT_OPTIONS, ReportOptions} from '../../src/report/get-report'
|
||||
import {DEFAULT_OPTIONS, getBadge, getReport, ReportOptions} from '../../src/report/get-report'
|
||||
import {TestCaseResult, TestGroupResult, TestRunResult, TestSuiteResult} from '../../src/test-results'
|
||||
|
||||
describe('getBadge', () => {
|
||||
describe('URI encoding with special characters', () => {
|
||||
|
|
@ -118,3 +119,144 @@ describe('getBadge', () => {
|
|||
})
|
||||
})
|
||||
|
||||
describe('getReport', () => {
|
||||
// Helper function to create test results
|
||||
function createTestResult(path: string, passed: number, failed: number, skipped: number): TestRunResult {
|
||||
const tests: TestCaseResult[] = []
|
||||
for (let i = 0; i < passed; i++) {
|
||||
tests.push(new TestCaseResult(`passed-test-${i}`, 'success', 100))
|
||||
}
|
||||
for (let i = 0; i < failed; i++) {
|
||||
tests.push(new TestCaseResult(`failed-test-${i}`, 'failed', 100, {
|
||||
details: 'Test failed',
|
||||
message: 'Assertion error'
|
||||
}))
|
||||
}
|
||||
for (let i = 0; i < skipped; i++) {
|
||||
tests.push(new TestCaseResult(`skipped-test-${i}`, 'skipped', 0))
|
||||
}
|
||||
|
||||
const group = new TestGroupResult('test-group', tests)
|
||||
const suite = new TestSuiteResult('test-suite', [group])
|
||||
return new TestRunResult(path, [suite])
|
||||
}
|
||||
|
||||
describe('list-files parameter', () => {
|
||||
const results = [
|
||||
createTestResult('passing-file.spec.ts', 5, 0, 0),
|
||||
createTestResult('failing-file.spec.ts', 3, 2, 1),
|
||||
createTestResult('passing-with-skipped-file.spec.ts', 8, 0, 2)
|
||||
]
|
||||
|
||||
it('shows all files when list-files is "all"', () => {
|
||||
const report = getReport(results, {
|
||||
...DEFAULT_OPTIONS,
|
||||
listFiles: 'all',
|
||||
listSuites: 'none',
|
||||
listTests: 'none'
|
||||
})
|
||||
|
||||
expect(report).toContain('passing-file.spec.ts')
|
||||
expect(report).toContain('failing-file.spec.ts')
|
||||
expect(report).toContain('passing-with-skipped-file.spec.ts')
|
||||
})
|
||||
|
||||
it('shows only failed files when list-files is "failed"', () => {
|
||||
const report = getReport(results, {
|
||||
...DEFAULT_OPTIONS,
|
||||
listFiles: 'failed',
|
||||
listSuites: 'none',
|
||||
listTests: 'none'
|
||||
})
|
||||
|
||||
expect(report).not.toContain('passing-file.spec.ts')
|
||||
expect(report).toContain('failing-file.spec.ts')
|
||||
expect(report).not.toContain('passing-with-skipped-file.spec.ts')
|
||||
})
|
||||
|
||||
it('shows no file details when list-files is "none"', () => {
|
||||
const report = getReport(results, {
|
||||
...DEFAULT_OPTIONS,
|
||||
listFiles: 'none',
|
||||
listSuites: 'none',
|
||||
listTests: 'none'
|
||||
})
|
||||
|
||||
// Should still have badge
|
||||
expect(report).toContain('![')
|
||||
// Should not have file names in detail sections
|
||||
expect(report).not.toContain('passing-file.spec.ts')
|
||||
expect(report).not.toContain('failing-file.spec.ts')
|
||||
expect(report).not.toContain('passing-with-skipped-file.spec.ts')
|
||||
})
|
||||
|
||||
it('includes summary table even with list-files "none"', () => {
|
||||
const report = getReport(results, {
|
||||
...DEFAULT_OPTIONS,
|
||||
listFiles: 'none',
|
||||
listSuites: 'all',
|
||||
listTests: 'none'
|
||||
})
|
||||
|
||||
// Badge should still be present
|
||||
expect(report).toContain('![')
|
||||
expect(report).toContain('badge')
|
||||
// File names should not be present
|
||||
expect(report).not.toContain('passing-file.spec.ts')
|
||||
expect(report).not.toContain('failing-file.spec.ts')
|
||||
expect(report).not.toContain('passing-with-skipped-file.spec.ts')
|
||||
})
|
||||
|
||||
it('works correctly with list-suites and list-tests when list-files is "failed"', () => {
|
||||
const report = getReport(results, {
|
||||
...DEFAULT_OPTIONS,
|
||||
listFiles: 'failed',
|
||||
listSuites: 'all',
|
||||
listTests: 'all'
|
||||
})
|
||||
|
||||
expect(report).not.toContain('passing-file.spec.ts')
|
||||
expect(report).toContain('failing-file.spec.ts')
|
||||
expect(report).not.toContain('passing-with-skipped-file.spec.ts')
|
||||
// Should show suite details for the failed file
|
||||
expect(report).toContain('test-suite')
|
||||
})
|
||||
|
||||
it('filters correctly when all files pass and list-files is "failed"', () => {
|
||||
const allPassingResults = [
|
||||
createTestResult('passing-file-1.spec.ts', 5, 0, 0),
|
||||
createTestResult('passing-file-2.spec.ts', 8, 0, 2)
|
||||
]
|
||||
|
||||
const report = getReport(allPassingResults, {
|
||||
...DEFAULT_OPTIONS,
|
||||
listFiles: 'failed',
|
||||
listSuites: 'all',
|
||||
listTests: 'none'
|
||||
})
|
||||
|
||||
expect(report).not.toContain('passing-file-1.spec.ts')
|
||||
expect(report).not.toContain('passing-file-2.spec.ts')
|
||||
// Badge should still be present
|
||||
expect(report).toContain('![')
|
||||
expect(report).toContain('badge')
|
||||
})
|
||||
|
||||
it('filters correctly when all files fail and list-files is "failed"', () => {
|
||||
const allFailingResults = [
|
||||
createTestResult('failing-file-1.spec.ts', 0, 5, 0),
|
||||
createTestResult('failing-file-2.spec.ts', 1, 2, 1)
|
||||
]
|
||||
|
||||
const report = getReport(allFailingResults, {
|
||||
...DEFAULT_OPTIONS,
|
||||
listFiles: 'failed',
|
||||
listSuites: 'all',
|
||||
listTests: 'none'
|
||||
})
|
||||
|
||||
expect(report).toContain('failing-file-1.spec.ts')
|
||||
expect(report).toContain('failing-file-2.spec.ts')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
|||
|
|
@ -54,6 +54,14 @@ inputs:
|
|||
- none
|
||||
required: false
|
||||
default: 'all'
|
||||
list-files:
|
||||
description: |
|
||||
Limits which test result files are listed. Supported options:
|
||||
- all
|
||||
- failed
|
||||
- none
|
||||
required: false
|
||||
default: 'all'
|
||||
max-annotations:
|
||||
description: |
|
||||
Limits number of created annotations with error message and stack trace captured during test execution.
|
||||
|
|
|
|||
22
dist/index.js
generated
vendored
22
dist/index.js
generated
vendored
|
|
@ -304,6 +304,7 @@ class TestReporter {
|
|||
reporter = core.getInput('reporter', { required: true });
|
||||
listSuites = core.getInput('list-suites', { required: true });
|
||||
listTests = core.getInput('list-tests', { required: true });
|
||||
listFiles = core.getInput('list-files', { required: true });
|
||||
maxAnnotations = parseInt(core.getInput('max-annotations', { required: true }));
|
||||
failOnError = core.getInput('fail-on-error', { required: true }) === 'true';
|
||||
failOnEmpty = core.getInput('fail-on-empty', { required: true }) === 'true';
|
||||
|
|
@ -326,6 +327,10 @@ class TestReporter {
|
|||
core.setFailed(`Input parameter 'list-tests' has invalid value`);
|
||||
return;
|
||||
}
|
||||
if (this.listFiles !== 'all' && this.listFiles !== 'failed' && this.listFiles !== 'none') {
|
||||
core.setFailed(`Input parameter 'list-files' has invalid value`);
|
||||
return;
|
||||
}
|
||||
if (this.collapsed !== 'auto' && this.collapsed !== 'always' && this.collapsed !== 'never') {
|
||||
core.setFailed(`Input parameter 'collapsed' has invalid value`);
|
||||
return;
|
||||
|
|
@ -409,7 +414,7 @@ class TestReporter {
|
|||
throw error;
|
||||
}
|
||||
}
|
||||
const { listSuites, listTests, onlySummary, useActionsSummary, badgeTitle, reportTitle, collapsed } = this;
|
||||
const { listSuites, listTests, listFiles, onlySummary, useActionsSummary, badgeTitle, reportTitle, collapsed } = this;
|
||||
const passed = results.reduce((sum, tr) => sum + tr.passed, 0);
|
||||
const failed = results.reduce((sum, tr) => sum + tr.failed, 0);
|
||||
const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0);
|
||||
|
|
@ -419,6 +424,7 @@ class TestReporter {
|
|||
const summary = (0, get_report_1.getReport)(results, {
|
||||
listSuites,
|
||||
listTests,
|
||||
listFiles,
|
||||
baseUrl,
|
||||
onlySummary,
|
||||
useActionsSummary,
|
||||
|
|
@ -447,6 +453,7 @@ class TestReporter {
|
|||
const summary = (0, get_report_1.getReport)(results, {
|
||||
listSuites,
|
||||
listTests,
|
||||
listFiles,
|
||||
baseUrl,
|
||||
onlySummary,
|
||||
useActionsSummary,
|
||||
|
|
@ -2446,6 +2453,7 @@ const MAX_ACTIONS_SUMMARY_LENGTH = 1048576;
|
|||
exports.DEFAULT_OPTIONS = {
|
||||
listSuites: 'all',
|
||||
listTests: 'all',
|
||||
listFiles: 'all',
|
||||
baseUrl: '',
|
||||
onlySummary: false,
|
||||
useActionsSummary: true,
|
||||
|
|
@ -2566,8 +2574,14 @@ function getTestRunsReport(testRuns, options) {
|
|||
sections.push(`<details><summary>Expand for details</summary>`);
|
||||
sections.push(` `);
|
||||
}
|
||||
if (testRuns.length > 0 || options.onlySummary) {
|
||||
const tableData = testRuns
|
||||
// Filter test runs based on list-files option
|
||||
const filteredTestRuns = options.listFiles === 'failed'
|
||||
? testRuns.filter(tr => tr.result === 'failed')
|
||||
: options.listFiles === 'none'
|
||||
? []
|
||||
: testRuns;
|
||||
if (filteredTestRuns.length > 0 || options.onlySummary) {
|
||||
const tableData = filteredTestRuns
|
||||
.map((tr, originalIndex) => ({ tr, originalIndex }))
|
||||
.filter(({ tr }) => tr.passed > 0 || tr.failed > 0 || tr.skipped > 0)
|
||||
.map(({ tr, originalIndex }) => {
|
||||
|
|
@ -2584,7 +2598,7 @@ function getTestRunsReport(testRuns, options) {
|
|||
sections.push(resultsTable);
|
||||
}
|
||||
if (options.onlySummary === false) {
|
||||
const suitesReports = testRuns.map((tr, i) => getSuitesReport(tr, i, options)).flat();
|
||||
const suitesReports = filteredTestRuns.map((tr, i) => getSuitesReport(tr, i, options)).flat();
|
||||
sections.push(...suitesReports);
|
||||
}
|
||||
if (shouldCollapse) {
|
||||
|
|
|
|||
10
src/main.ts
10
src/main.ts
|
|
@ -43,6 +43,7 @@ class TestReporter {
|
|||
readonly reporter = core.getInput('reporter', {required: true})
|
||||
readonly listSuites = core.getInput('list-suites', {required: true}) as 'all' | 'failed' | 'none'
|
||||
readonly listTests = core.getInput('list-tests', {required: true}) as 'all' | 'failed' | 'none'
|
||||
readonly listFiles = core.getInput('list-files', {required: true}) as 'all' | 'failed' | 'none'
|
||||
readonly maxAnnotations = parseInt(core.getInput('max-annotations', {required: true}))
|
||||
readonly failOnError = core.getInput('fail-on-error', {required: true}) === 'true'
|
||||
readonly failOnEmpty = core.getInput('fail-on-empty', {required: true}) === 'true'
|
||||
|
|
@ -69,6 +70,11 @@ class TestReporter {
|
|||
return
|
||||
}
|
||||
|
||||
if (this.listFiles !== 'all' && this.listFiles !== 'failed' && this.listFiles !== 'none') {
|
||||
core.setFailed(`Input parameter 'list-files' has invalid value`)
|
||||
return
|
||||
}
|
||||
|
||||
if (this.collapsed !== 'auto' && this.collapsed !== 'always' && this.collapsed !== 'never') {
|
||||
core.setFailed(`Input parameter 'collapsed' has invalid value`)
|
||||
return
|
||||
|
|
@ -174,7 +180,7 @@ class TestReporter {
|
|||
}
|
||||
}
|
||||
|
||||
const {listSuites, listTests, onlySummary, useActionsSummary, badgeTitle, reportTitle, collapsed} = this
|
||||
const {listSuites, listTests, listFiles, onlySummary, useActionsSummary, badgeTitle, reportTitle, collapsed} = this
|
||||
|
||||
const passed = results.reduce((sum, tr) => sum + tr.passed, 0)
|
||||
const failed = results.reduce((sum, tr) => sum + tr.failed, 0)
|
||||
|
|
@ -188,6 +194,7 @@ class TestReporter {
|
|||
{
|
||||
listSuites,
|
||||
listTests,
|
||||
listFiles,
|
||||
baseUrl,
|
||||
onlySummary,
|
||||
useActionsSummary,
|
||||
|
|
@ -219,6 +226,7 @@ class TestReporter {
|
|||
const summary = getReport(results, {
|
||||
listSuites,
|
||||
listTests,
|
||||
listFiles,
|
||||
baseUrl,
|
||||
onlySummary,
|
||||
useActionsSummary,
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ const MAX_ACTIONS_SUMMARY_LENGTH = 1048576
|
|||
export interface ReportOptions {
|
||||
listSuites: 'all' | 'failed' | 'none'
|
||||
listTests: 'all' | 'failed' | 'none'
|
||||
listFiles: 'all' | 'failed' | 'none'
|
||||
baseUrl: string
|
||||
onlySummary: boolean
|
||||
useActionsSummary: boolean
|
||||
|
|
@ -22,6 +23,7 @@ export interface ReportOptions {
|
|||
export const DEFAULT_OPTIONS: ReportOptions = {
|
||||
listSuites: 'all',
|
||||
listTests: 'all',
|
||||
listFiles: 'all',
|
||||
baseUrl: '',
|
||||
onlySummary: false,
|
||||
useActionsSummary: true,
|
||||
|
|
@ -171,8 +173,16 @@ function getTestRunsReport(testRuns: TestRunResult[], options: ReportOptions): s
|
|||
sections.push(` `)
|
||||
}
|
||||
|
||||
if (testRuns.length > 0 || options.onlySummary) {
|
||||
const tableData = testRuns
|
||||
// Filter test runs based on list-files option
|
||||
const filteredTestRuns =
|
||||
options.listFiles === 'failed'
|
||||
? testRuns.filter(tr => tr.result === 'failed')
|
||||
: options.listFiles === 'none'
|
||||
? []
|
||||
: testRuns
|
||||
|
||||
if (filteredTestRuns.length > 0 || options.onlySummary) {
|
||||
const tableData = filteredTestRuns
|
||||
.map((tr, originalIndex) => ({tr, originalIndex}))
|
||||
.filter(({tr}) => tr.passed > 0 || tr.failed > 0 || tr.skipped > 0)
|
||||
.map(({tr, originalIndex}) => {
|
||||
|
|
@ -195,7 +205,7 @@ function getTestRunsReport(testRuns: TestRunResult[], options: ReportOptions): s
|
|||
}
|
||||
|
||||
if (options.onlySummary === false) {
|
||||
const suitesReports = testRuns.map((tr, i) => getSuitesReport(tr, i, options)).flat()
|
||||
const suitesReports = filteredTestRuns.map((tr, i) => getSuitesReport(tr, i, options)).flat()
|
||||
sections.push(...suitesReports)
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue