mirror of
https://github.com/dorny/test-reporter.git
synced 2025-12-15 22:07:09 +01:00
Merge pull request #60 from dorny/reduce-report-size
Set listTests and listSuites to lower detail if report is too big
This commit is contained in:
commit
85275e49c9
6 changed files with 113 additions and 44 deletions
|
|
@ -1,9 +1,12 @@
|
|||
# Changelog
|
||||
|
||||
## v1.2.0
|
||||
- [Set `listTests` and `listSuites` to lower detail if report is too big](https://github.com/dorny/test-reporter/pull/60)
|
||||
|
||||
## v1.1.0
|
||||
- [Support public repo PR workflow](https://github.com/dorny/test-reporter/pull/56)
|
||||
|
||||
# v1.0.0
|
||||
## v1.0.0
|
||||
Supported languages / frameworks:
|
||||
- .NET / xUnit / NUnit / MSTest
|
||||
- Dart / test
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ describe('dotnet-trx tests', () => {
|
|||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result], {listTests: 'failed'})
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ describe('jest-junit tests', () => {
|
|||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result], {listTests: 'failed'})
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
|
|
|||
65
dist/index.js
generated
vendored
65
dist/index.js
generated
vendored
|
|
@ -998,27 +998,47 @@ exports.getReport = void 0;
|
|||
const core = __importStar(__nccwpck_require__(2186));
|
||||
const markdown_utils_1 = __nccwpck_require__(6482);
|
||||
const slugger_1 = __nccwpck_require__(3328);
|
||||
function getReport(results, options = {}) {
|
||||
const defaultOptions = {
|
||||
listSuites: 'all',
|
||||
listTests: 'all'
|
||||
};
|
||||
function getReport(results, options = defaultOptions) {
|
||||
core.info('Generating check run summary');
|
||||
const maxReportLength = 65535;
|
||||
const sections = [];
|
||||
applySort(results);
|
||||
const badge = getReportBadge(results);
|
||||
sections.push(badge);
|
||||
const runs = getTestRunsReport(results, options);
|
||||
sections.push(...runs);
|
||||
const report = sections.join('\n');
|
||||
if (report.length > maxReportLength) {
|
||||
let msg = `**Check Run summary limit of ${maxReportLength} chars was exceed**`;
|
||||
if (options.listTests !== 'all') {
|
||||
msg += '\n- Consider setting `list-tests` option to `only-failed` or `none`';
|
||||
}
|
||||
if (options.listSuites !== 'all') {
|
||||
msg += '\n- Consider setting `list-suites` option to `only-failed`';
|
||||
}
|
||||
return `${badge}\n${msg}`;
|
||||
const opts = { ...options };
|
||||
let report = renderReport(results, opts);
|
||||
if (getByteLength(report) <= maxReportLength) {
|
||||
return report;
|
||||
}
|
||||
return report;
|
||||
if (opts.listTests === 'all') {
|
||||
core.info("Test report summary is too big - setting 'listTests' to 'failed'");
|
||||
opts.listTests = 'failed';
|
||||
report = renderReport(results, opts);
|
||||
if (getByteLength(report) <= maxReportLength) {
|
||||
return report;
|
||||
}
|
||||
}
|
||||
if (opts.listSuites === 'all') {
|
||||
core.info("Test report summary is too big - setting 'listSuites' to 'failed'");
|
||||
opts.listSuites = 'failed';
|
||||
report = renderReport(results, opts);
|
||||
if (getByteLength(report) <= maxReportLength) {
|
||||
return report;
|
||||
}
|
||||
}
|
||||
if (opts.listTests !== 'none') {
|
||||
core.info("Test report summary is too big - setting 'listTests' to 'none'");
|
||||
opts.listTests = 'none';
|
||||
report = renderReport(results, opts);
|
||||
if (getByteLength(report) <= maxReportLength) {
|
||||
return report;
|
||||
}
|
||||
}
|
||||
core.warning(`Test report summary exceeded limit of ${maxReportLength} bytes`);
|
||||
const badge = getReportBadge(results);
|
||||
const msg = `**Test report summary exceeded limit of ${maxReportLength} bytes and was removed**`;
|
||||
return `${badge}\n${msg}`;
|
||||
}
|
||||
exports.getReport = getReport;
|
||||
function applySort(results) {
|
||||
|
|
@ -1027,6 +1047,17 @@ function applySort(results) {
|
|||
res.suites.sort((a, b) => a.name.localeCompare(b.name));
|
||||
}
|
||||
}
|
||||
function getByteLength(text) {
|
||||
return Buffer.byteLength(text, 'utf8');
|
||||
}
|
||||
function renderReport(results, options) {
|
||||
const sections = [];
|
||||
const badge = getReportBadge(results);
|
||||
sections.push(badge);
|
||||
const runs = getTestRunsReport(results, options);
|
||||
sections.push(...runs);
|
||||
return sections.join('\n');
|
||||
}
|
||||
function getReportBadge(results) {
|
||||
const passed = results.reduce((sum, tr) => sum + tr.passed, 0);
|
||||
const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0);
|
||||
|
|
|
|||
2
dist/index.js.map
generated
vendored
2
dist/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
|
|
@ -4,38 +4,58 @@ import {Align, formatTime, Icon, link, table} from '../utils/markdown-utils'
|
|||
import {slug} from '../utils/slugger'
|
||||
|
||||
export interface ReportOptions {
|
||||
listSuites?: 'all' | 'failed'
|
||||
listTests?: 'all' | 'failed' | 'none'
|
||||
listSuites: 'all' | 'failed'
|
||||
listTests: 'all' | 'failed' | 'none'
|
||||
}
|
||||
|
||||
export function getReport(results: TestRunResult[], options: ReportOptions = {}): string {
|
||||
const defaultOptions: ReportOptions = {
|
||||
listSuites: 'all',
|
||||
listTests: 'all'
|
||||
}
|
||||
|
||||
export function getReport(results: TestRunResult[], options: ReportOptions = defaultOptions): string {
|
||||
core.info('Generating check run summary')
|
||||
|
||||
const maxReportLength = 65535
|
||||
const sections: string[] = []
|
||||
|
||||
applySort(results)
|
||||
|
||||
const badge = getReportBadge(results)
|
||||
sections.push(badge)
|
||||
|
||||
const runs = getTestRunsReport(results, options)
|
||||
sections.push(...runs)
|
||||
|
||||
const report = sections.join('\n')
|
||||
if (report.length > maxReportLength) {
|
||||
let msg = `**Check Run summary limit of ${maxReportLength} chars was exceed**`
|
||||
if (options.listTests !== 'all') {
|
||||
msg += '\n- Consider setting `list-tests` option to `only-failed` or `none`'
|
||||
}
|
||||
if (options.listSuites !== 'all') {
|
||||
msg += '\n- Consider setting `list-suites` option to `only-failed`'
|
||||
}
|
||||
|
||||
return `${badge}\n${msg}`
|
||||
const opts = {...options}
|
||||
let report = renderReport(results, opts)
|
||||
if (getByteLength(report) <= maxReportLength) {
|
||||
return report
|
||||
}
|
||||
|
||||
return report
|
||||
if (opts.listTests === 'all') {
|
||||
core.info("Test report summary is too big - setting 'listTests' to 'failed'")
|
||||
opts.listTests = 'failed'
|
||||
report = renderReport(results, opts)
|
||||
if (getByteLength(report) <= maxReportLength) {
|
||||
return report
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.listSuites === 'all') {
|
||||
core.info("Test report summary is too big - setting 'listSuites' to 'failed'")
|
||||
opts.listSuites = 'failed'
|
||||
report = renderReport(results, opts)
|
||||
if (getByteLength(report) <= maxReportLength) {
|
||||
return report
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.listTests !== 'none') {
|
||||
core.info("Test report summary is too big - setting 'listTests' to 'none'")
|
||||
opts.listTests = 'none'
|
||||
report = renderReport(results, opts)
|
||||
if (getByteLength(report) <= maxReportLength) {
|
||||
return report
|
||||
}
|
||||
}
|
||||
|
||||
core.warning(`Test report summary exceeded limit of ${maxReportLength} bytes`)
|
||||
const badge = getReportBadge(results)
|
||||
const msg = `**Test report summary exceeded limit of ${maxReportLength} bytes and was removed**`
|
||||
return `${badge}\n${msg}`
|
||||
}
|
||||
|
||||
function applySort(results: TestRunResult[]): void {
|
||||
|
|
@ -45,6 +65,21 @@ function applySort(results: TestRunResult[]): void {
|
|||
}
|
||||
}
|
||||
|
||||
function getByteLength(text: string): number {
|
||||
return Buffer.byteLength(text, 'utf8')
|
||||
}
|
||||
|
||||
function renderReport(results: TestRunResult[], options: ReportOptions): string {
|
||||
const sections: string[] = []
|
||||
const badge = getReportBadge(results)
|
||||
sections.push(badge)
|
||||
|
||||
const runs = getTestRunsReport(results, options)
|
||||
sections.push(...runs)
|
||||
|
||||
return sections.join('\n')
|
||||
}
|
||||
|
||||
function getReportBadge(results: TestRunResult[]): string {
|
||||
const passed = results.reduce((sum, tr) => sum + tr.passed, 0)
|
||||
const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue