Compare commits

..

15 Commits

Author SHA1 Message Date
Jozef Izso
f24c625f56 Create tests for sample JUnit files
Source: https://github.com/testmoapp/junitxml/
2025-12-27 00:05:09 +01:00
Jozef Izso
ee446707ff Merge pull request #692 from dorny/release/v2.3.0 2025-11-30 01:52:48 +01:00
Jozef Izso
fe45e95373 test-reporter release v2.3.0 2025-11-30 01:49:30 +01:00
Jozef Izso
e40a1da745 Merge pull request #682 from dorny/dependabot/npm_and_yarn/reports/mocha/multi-f14266366f 2025-11-30 01:01:42 +01:00
dependabot[bot]
3445860437 Bump js-yaml and mocha in /reports/mocha
Bumps [js-yaml](https://github.com/nodeca/js-yaml) to 4.1.1 and updates ancestor dependency [mocha](https://github.com/mochajs/mocha). These dependencies need to be updated together.


Updates `js-yaml` from 4.0.0 to 4.1.1
- [Changelog](https://github.com/nodeca/js-yaml/blob/master/CHANGELOG.md)
- [Commits](https://github.com/nodeca/js-yaml/compare/4.0.0...4.1.1)

Updates `mocha` from 8.3.0 to 11.7.5
- [Release notes](https://github.com/mochajs/mocha/releases)
- [Changelog](https://github.com/mochajs/mocha/blob/v11.7.5/CHANGELOG.md)
- [Commits](https://github.com/mochajs/mocha/compare/v8.3.0...v11.7.5)

---
updated-dependencies:
- dependency-name: js-yaml
  dependency-version: 4.1.1
  dependency-type: indirect
- dependency-name: mocha
  dependency-version: 11.7.5
  dependency-type: direct:development
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-11-29 23:45:48 +00:00
Jozef Izso
9ef5c136b2 Merge pull request #691 from dorny/fix/complete-documentation 2025-11-30 00:40:18 +01:00
Jozef Izso
83e20c1534 Merge pull request #685 from dorny/dependabot/npm_and_yarn/reports/jest/js-yaml-3.14.2 2025-11-30 00:37:29 +01:00
Jozef Izso
4331a3b620 Clarify the dotnet-nunit docs to require NUnit3TestAdapter for nunit logger 2025-11-23 15:26:03 +01:00
Jozef Izso
04232af26f Complete documentation for all supported reporters
This commit addresses several documentation gaps to ensure all implemented
reporters are properly documented across action.yml and README.md.

Changes:
1. Updated action.yml description to include all supported languages:
   - Added: Go, Python (pytest, unittest), Ruby (RSpec), Swift

2. Added Ruby/RSpec to supported languages list in README.md

3. Added detailed documentation sections in README.md:
   - dotnet-nunit: Added section with NUnit3 XML format instructions
   - rspec-json: Added section with RSpec JSON formatter configuration

All reporters now have:
- Entry in action.yml description
- Entry in README supported languages list
- Entry in README usage documentation (reporter input)
- Detailed documentation section in README "Supported formats"
- Implementation in src/main.ts
- Tests in __tests__/

This ensures users can discover and use all available reporters without
confusion about what is supported.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-22 18:05:33 +01:00
Jozef Izso
cf146f4036 Merge pull request #690 from dorny/fix/add-golang-json-to-action-yml 2025-11-22 17:50:03 +01:00
Jozef Izso
33fc27cf09 Merge pull request #687 from dorny/dependabot/github_actions/actions/checkout-6 2025-11-22 17:49:02 +01:00
dependabot[bot]
fc80cb4400 Bump actions/checkout from 5 to 6
Bumps [actions/checkout](https://github.com/actions/checkout) from 5 to 6.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v5...v6)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: '6'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-11-21 23:07:16 +00:00
dependabot[bot]
79ea6a9d0e Bump js-yaml from 3.14.0 to 3.14.2 in /reports/jest
Bumps [js-yaml](https://github.com/nodeca/js-yaml) from 3.14.0 to 3.14.2.
- [Changelog](https://github.com/nodeca/js-yaml/blob/master/CHANGELOG.md)
- [Commits](https://github.com/nodeca/js-yaml/compare/3.14.0...3.14.2)

---
updated-dependencies:
- dependency-name: js-yaml
  dependency-version: 3.14.2
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-11-18 19:47:48 +00:00
Jozef Izso
aef3d726a6 Merge pull request #683 from micmarc/feature/python-pytest 2025-11-15 18:19:24 +01:00
Michael Marcus
c1a56edcfe Enhance pytest support
Add robust test schema for pytest report
Update README with sample pytest command
2025-11-15 11:55:41 -05:00
23 changed files with 4643 additions and 2311 deletions

View File

@@ -21,7 +21,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- name: Set Node.js
uses: actions/setup-node@v6

View File

@@ -13,7 +13,7 @@ jobs:
name: Build & Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- uses: actions/setup-node@v6
with:
node-version-file: '.nvmrc'

View File

@@ -8,7 +8,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- run: npm ci
- run: npm run build
- run: npm test

View File

@@ -11,7 +11,7 @@ jobs:
name: Workflow test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- uses: ./
with:
artifact: test-results

View File

@@ -1,5 +1,12 @@
# Changelog
## 2.3.0
* Feature: Add Python support with `python-xunit` reporter (pytest) https://github.com/dorny/test-reporter/pull/643
* Feature: Add pytest traceback parsing and `directory-mapping` option https://github.com/dorny/test-reporter/pull/238
* Performance: Update sax.js to fix large XML file parsing https://github.com/dorny/test-reporter/pull/681
* Documentation: Complete documentation for all supported reporters https://github.com/dorny/test-reporter/pull/691
* Security: Bump js-yaml and mocha in /reports/mocha (fixes prototype pollution) https://github.com/dorny/test-reporter/pull/682
## 2.2.0
* Feature: Add collapsed option to control report summary visibility https://github.com/dorny/test-reporter/pull/664
* Fix badge encoding for values including underscore and hyphens https://github.com/dorny/test-reporter/pull/672

View File

@@ -20,6 +20,7 @@ This [Github Action](https://github.com/features/actions) displays test results
- Java / [JUnit](https://junit.org/)
- JavaScript / [JEST](https://jestjs.io/) / [Mocha](https://mochajs.org/)
- Python / [pytest](https://docs.pytest.org/en/stable/) / [unittest](https://docs.python.org/3/library/unittest.html)
- Ruby / [RSpec](https://rspec.info/)
- Swift / xUnit
For more information see [Supported formats](#supported-formats) section.
@@ -256,6 +257,20 @@ Supported testing frameworks:
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
</details>
<details>
<summary>dotnet-nunit</summary>
Test execution must be configured to generate [NUnit3](https://docs.nunit.org/articles/nunit/technical-notes/usage/Test-Result-XML-Format.html) XML test results.
Install the [NUnit3TestAdapter](https://www.nuget.org/packages/NUnit3TestAdapter) package (required; it registers the `nunit` logger for `dotnet test`), then run tests with:
`dotnet test --logger "nunit;LogFileName=test-results.xml"`
Supported testing frameworks:
- [NUnit](https://nunit.org/)
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
</details>
<details>
<summary>flutter-json</summary>
@@ -357,9 +372,34 @@ Please update Mocha to version [v9.1.0](https://github.com/mochajs/mocha/release
Support for Python test results in xUnit format is experimental - should work but it was not extensively tested.
For pytest support, configure [JUnit XML output](https://docs.pytest.org/en/stable/how-to/output.html#creating-junitxml-format-files) and run with the `--junit-xml` option, which also lets you specify the output path for test results.
For **pytest** support, configure [JUnit XML output](https://docs.pytest.org/en/stable/how-to/output.html#creating-junitxml-format-files) and run with the `--junit-xml` option, which also lets you specify the output path for test results.
For unittest support, use a test runner that outputs the JUnit report format, such as [unittest-xml-reporting](https://pypi.org/project/unittest-xml-reporting/).
```shell
pytest --junit-xml=test-report.xml
```
For **unittest** support, use a test runner that outputs the JUnit report format, such as [unittest-xml-reporting](https://pypi.org/project/unittest-xml-reporting/).
</details>
<details>
<summary>rspec-json</summary>
[RSpec](https://rspec.info/) testing framework support requires the usage of JSON formatter.
You can configure RSpec to output JSON format by using the `--format json` option and redirecting to a file:
```shell
rspec --format json --out rspec-results.json
```
Or configure it in `.rspec` file:
```
--format json
--out rspec-results.json
```
For more information see:
- [RSpec documentation](https://rspec.info/)
- [RSpec Formatters](https://relishapp.com/rspec/rspec-core/docs/formatters)
</details>
<details>

View File

@@ -0,0 +1,23 @@
![Tests failed](https://img.shields.io/badge/tests-5%20passed%2C%201%20failed-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/external/java/junit4-basic.xml](#user-content-r0)|5 ✅|1 ❌||16s|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/junit4-basic.xml</a>
**6** tests were completed in **16s** with **5** passed, **1** failed and **0** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[Tests.Authentication](#user-content-r0s0)|2 ✅|1 ❌||9s|
|[Tests.Registration](#user-content-r0s1)|3 ✅|||7s|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Authentication</a>
```
✅ testCase7
✅ testCase8
❌ testCase9
AssertionError: Assertion error message
```
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">Tests.Registration</a>
```
✅ testCase1
✅ testCase2
✅ testCase3
```

View File

@@ -0,0 +1,22 @@
![Tests failed](https://img.shields.io/badge/tests-5%20passed%2C%202%20failed%2C%201%20skipped-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/external/java/junit4-complete.xml](#user-content-r0)|5 ✅|2 ❌|1 ⚪|16s|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/junit4-complete.xml</a>
**8** tests were completed in **16s** with **5** passed, **2** failed and **1** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[Tests.Registration](#user-content-r0s0)|5 ✅|2 ❌|1 ⚪|16s|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Registration</a>
```
✅ testCase1
✅ testCase2
✅ testCase3
⚪ testCase4
❌ testCase5
AssertionError: Expected value did not match.
❌ testCase6
ArithmeticError: Division by zero.
✅ testCase7
✅ testCase8
```

View File

@@ -0,0 +1,26 @@
![Tests failed](https://img.shields.io/badge/tests-6%20passed%2C%202%20failed%2C%202%20skipped-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/python-xunit-pytest.xml](#user-content-r0)|6 ✅|2 ❌|2 ⚪|19ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit-pytest.xml</a>
**10** tests were completed in **19ms** with **6** passed, **2** failed and **2** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[pytest](#user-content-r0s0)|6 ✅|2 ❌|2 ⚪|19ms|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">pytest</a>
```
tests.test_lib
✅ test_always_pass
✅ test_with_subtests
✅ test_parameterized[param1]
✅ test_parameterized[param2]
⚪ test_always_skip
❌ test_always_fail
assert False
⚪ test_expected_failure
❌ test_error
Exception: error
✅ test_with_record_property
custom_classname
✅ test_with_record_xml_attribute
```

View File

@@ -6878,3 +6878,153 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 2126531.0000000005,
}
`;
exports[`java-junit tests report from testmo/junitxml basic example matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/java/junit4-basic.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase1",
"result": "success",
"time": 2113.871,
},
TestCaseResult {
"error": undefined,
"name": "testCase2",
"result": "success",
"time": 1051,
},
TestCaseResult {
"error": undefined,
"name": "testCase3",
"result": "success",
"time": 3441,
},
],
},
],
"name": "Tests.Registration",
"totalTime": 6605.870999999999,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase7",
"result": "success",
"time": 2508,
},
TestCaseResult {
"error": undefined,
"name": "testCase8",
"result": "success",
"time": 1230.8159999999998,
},
TestCaseResult {
"error": {
"details": undefined,
"line": undefined,
"message": "AssertionError: Assertion error message",
"path": undefined,
},
"name": "testCase9",
"result": "failed",
"time": 982,
},
],
},
],
"name": "Tests.Authentication",
"totalTime": 9076.816,
},
],
"totalTime": 15682.687,
}
`;
exports[`java-junit tests report from testmo/junitxml complete example matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/java/junit4-complete.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase1",
"result": "success",
"time": 2436,
},
TestCaseResult {
"error": undefined,
"name": "testCase2",
"result": "success",
"time": 1534,
},
TestCaseResult {
"error": undefined,
"name": "testCase3",
"result": "success",
"time": 822,
},
TestCaseResult {
"error": undefined,
"name": "testCase4",
"result": "skipped",
"time": 0,
},
TestCaseResult {
"error": {
"details": undefined,
"line": undefined,
"message": "AssertionError: Expected value did not match.",
"path": undefined,
},
"name": "testCase5",
"result": "failed",
"time": 2902.412,
},
TestCaseResult {
"error": {
"details": undefined,
"line": undefined,
"message": "ArithmeticError: Division by zero.",
"path": undefined,
},
"name": "testCase6",
"result": "failed",
"time": 3819,
},
TestCaseResult {
"error": undefined,
"name": "testCase7",
"result": "success",
"time": 2944,
},
TestCaseResult {
"error": undefined,
"name": "testCase8",
"result": "success",
"time": 1625.275,
},
],
},
],
"name": "Tests.Registration",
"totalTime": 16082.687,
},
],
"totalTime": 16082.687,
}
`;

View File

@@ -1,5 +1,110 @@
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`python-xunit pytest report report from python test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/python-xunit-pytest.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "tests.test_lib",
"tests": [
TestCaseResult {
"error": undefined,
"name": "test_always_pass",
"result": "success",
"time": 2,
},
TestCaseResult {
"error": undefined,
"name": "test_with_subtests",
"result": "success",
"time": 5,
},
TestCaseResult {
"error": undefined,
"name": "test_parameterized[param1]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_parameterized[param2]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_always_skip",
"result": "skipped",
"time": 0,
},
TestCaseResult {
"error": {
"details": "def test_always_fail():
> assert False
E assert False
tests/test_lib.py:25: AssertionError
",
"line": undefined,
"message": "assert False",
"path": undefined,
},
"name": "test_always_fail",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_expected_failure",
"result": "skipped",
"time": 0,
},
TestCaseResult {
"error": {
"details": "def test_error():
> raise Exception("error")
E Exception: error
tests/test_lib.py:32: Exception
",
"line": undefined,
"message": "Exception: error",
"path": undefined,
},
"name": "test_error",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_with_record_property",
"result": "success",
"time": 0,
},
],
},
TestGroupResult {
"name": "custom_classname",
"tests": [
TestCaseResult {
"error": undefined,
"name": "test_with_record_xml_attribute",
"result": "success",
"time": 0,
},
],
},
],
"name": "pytest",
"totalTime": 19,
},
],
"totalTime": undefined,
}
`;
exports[`python-xunit unittest report report from python test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/python-xunit-unittest.xml",

View File

@@ -0,0 +1,31 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
This is a basic JUnit-style XML example to highlight the basis structure.
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
Testmo test management software - https://www.testmo.com/
-->
<testsuites time="15.682687">
<testsuite name="Tests.Registration" time="6.605871">
<testcase name="testCase1" classname="Tests.Registration" time="2.113871" />
<testcase name="testCase2" classname="Tests.Registration" time="1.051" />
<testcase name="testCase3" classname="Tests.Registration" time="3.441" />
</testsuite>
<testsuite name="Tests.Authentication" time="9.076816">
<!-- Java JUni4 XML files does not nest <testsuite> elements -->
<!--
<testsuite name="Tests.Authentication.Login" time="4.356">
<testcase name="testCase4" classname="Tests.Authentication.Login" time="2.244" />
<testcase name="testCase5" classname="Tests.Authentication.Login" time="0.781" />
<testcase name="testCase6" classname="Tests.Authentication.Login" time="1.331" />
</testsuite>
-->
<testcase name="testCase7" classname="Tests.Authentication" time="2.508" />
<testcase name="testCase8" classname="Tests.Authentication" time="1.230816" />
<testcase name="testCase9" classname="Tests.Authentication" time="0.982">
<failure message="Assertion error message" type="AssertionError">
<!-- Call stack printed here -->
</failure>
</testcase>
</testsuite>
</testsuites>

View File

@@ -0,0 +1,141 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
This is a JUnit-style XML example with commonly used tags and attributes.
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
Testmo test management software - https://www.testmo.com/
-->
<!-- <testsuites> Usually the root element of a JUnit XML file. Some tools leave out
the <testsuites> element if there is only a single top-level <testsuite> element (which
is then used as the root element).
name Name of the entire test run
tests Total number of tests in this file
failures Total number of failed tests in this file
errors Total number of errored tests in this file
skipped Total number of skipped tests in this file
assertions Total number of assertions for all tests in this file
time Aggregated time of all tests in this file in seconds
timestamp Date and time of when the test run was executed (in ISO 8601 format)
-->
<testsuites name="Test run" tests="8" failures="1" errors="1" skipped="1"
assertions="20" time="16.082687" timestamp="2021-04-02T15:48:23">
<!-- <testsuite> A test suite usually represents a class, folder or group of tests.
There can be many test suites in an XML file, and there can be test suites under other
test suites.
name Name of the test suite (e.g. class name or folder name)
tests Total number of tests in this suite
failures Total number of failed tests in this suite
errors Total number of errored tests in this suite
skipped Total number of skipped tests in this suite
assertions Total number of assertions for all tests in this suite
time Aggregated time of all tests in this file in seconds
timestamp Date and time of when the test suite was executed (in ISO 8601 format)
file Source code file of this test suite
-->
<testsuite name="Tests.Registration" tests="8" failures="1" errors="1" skipped="1"
assertions="20" time="16.082687" timestamp="2021-04-02T15:48:23"
file="tests/registration.code">
<!-- <properties> Test suites (and test cases, see below) can have additional
properties such as environment variables or version numbers. -->
<properties>
<!-- <property> Each property has a name and value. Some tools also support
properties with text values instead of value attributes. -->
<property name="version" value="1.774" />
<property name="commit" value="ef7bebf" />
<property name="browser" value="Google Chrome" />
<property name="ci" value="https://github.com/actions/runs/1234" />
<property name="config">
Config line #1
Config line #2
Config line #3
</property>
</properties>
<!-- <system-out> Optionally data written to standard out for the suite.
Also supported on a test case level, see below. -->
<system-out>Data written to standard out.</system-out>
<!-- <system-err> Optionally data written to standard error for the suite.
Also supported on a test case level, see below. -->
<system-err>Data written to standard error.</system-err>
<!-- <testcase> There are one or more test cases in a test suite. A test passed
if there isn't an additional result element (skipped, failure, error).
name The name of this test case, often the method name
classname The name of the parent class/folder, often the same as the suite's name
assertions Number of assertions checked during test case execution
time Execution time of the test in seconds
file Source code file of this test case
line Source code line number of the start of this test case
-->
<testcase name="testCase1" classname="Tests.Registration" assertions="2"
time="2.436" file="tests/registration.code" line="24" />
<testcase name="testCase2" classname="Tests.Registration" assertions="6"
time="1.534" file="tests/registration.code" line="62" />
<testcase name="testCase3" classname="Tests.Registration" assertions="3"
time="0.822" file="tests/registration.code" line="102" />
<!-- Example of a test case that was skipped -->
<testcase name="testCase4" classname="Tests.Registration" assertions="0"
time="0" file="tests/registration.code" line="164">
<!-- <skipped> Indicates that the test was not executed. Can have an optional
message describing why the test was skipped. -->
<skipped message="Test was skipped." />
</testcase>
<!-- Example of a test case that failed. -->
<testcase name="testCase5" classname="Tests.Registration" assertions="2"
time="2.902412" file="tests/registration.code" line="202">
<!-- <failure> The test failed because one of the assertions/checks failed.
Can have a message and failure type, often the assertion type or class. The text
content of the element often includes the failure description or stack trace. -->
<failure message="Expected value did not match." type="AssertionError">
<!-- Failure description or stack trace -->
</failure>
</testcase>
<!-- Example of a test case that had errors. -->
<testcase name="testCase6" classname="Tests.Registration" assertions="0"
time="3.819" file="tests/registration.code" line="235">
<!-- <error> The test had an unexpected error during execution. Can have a
message and error type, often the exception type or class. The text
content of the element often includes the error description or stack trace. -->
<error message="Division by zero." type="ArithmeticError">
<!-- Error description or stack trace -->
</error>
</testcase>
<!-- Example of a test case with outputs. -->
<testcase name="testCase7" classname="Tests.Registration" assertions="3"
time="2.944" file="tests/registration.code" line="287">
<!-- <system-out> Optional data written to standard out for the test case. -->
<system-out>Data written to standard out.</system-out>
<!-- <system-err> Optional data written to standard error for the test case. -->
<system-err>Data written to standard error.</system-err>
</testcase>
<!-- Example of a test case with properties -->
<testcase name="testCase8" classname="Tests.Registration" assertions="4"
time="1.625275" file="tests/registration.code" line="302">
<!-- <properties> Some tools also support properties for test cases. -->
<properties>
<property name="priority" value="high" />
<property name="language" value="english" />
<property name="author" value="Adrian" />
<property name="attachment" value="screenshots/dashboard.png" />
<property name="attachment" value="screenshots/users.png" />
<property name="description">
This text describes the purpose of this test case and provides
an overview of what the test does and how it works.
</property>
</properties>
</testcase>
</testsuite>
</testsuites>

View File

@@ -0,0 +1,42 @@
<?xml version="1.0" encoding="utf-8"?>
<testsuites name="pytest tests">
<testsuite name="pytest" errors="0" failures="2" skipped="2" tests="15" time="0.019"
timestamp="2025-11-15T11:51:49.548396-05:00" hostname="Mac.hsd1.va.comcast.net">
<properties>
<property name="custom_prop" value="custom_val"/>
</properties>
<testcase classname="tests.test_lib" name="test_always_pass" time="0.002"/>
<testcase classname="tests.test_lib" name="test_with_subtests" time="0.005"/>
<testcase classname="tests.test_lib" name="test_parameterized[param1]" time="0.000"/>
<testcase classname="tests.test_lib" name="test_parameterized[param2]" time="0.000"/>
<testcase classname="tests.test_lib" name="test_always_skip" time="0.000">
<skipped type="pytest.skip" message="skipped">/Users/mike/Projects/python-test/tests/test_lib.py:20: skipped
</skipped>
</testcase>
<testcase classname="tests.test_lib" name="test_always_fail" time="0.000">
<failure message="assert False">def test_always_fail():
&gt; assert False
E assert False
tests/test_lib.py:25: AssertionError
</failure>
</testcase>
<testcase classname="tests.test_lib" name="test_expected_failure" time="0.000">
<skipped type="pytest.xfail" message=""/>
</testcase>
<testcase classname="tests.test_lib" name="test_error" time="0.000">
<failure message="Exception: error">def test_error():
&gt; raise Exception("error")
E Exception: error
tests/test_lib.py:32: Exception
</failure>
</testcase>
<testcase classname="tests.test_lib" name="test_with_record_property" time="0.000">
<properties>
<property name="example_key" value="1"/>
</properties>
</testcase>
<testcase classname="custom_classname" name="test_with_record_xml_attribute" time="0.000"/>
</testsuite>
</testsuites>

View File

@@ -73,6 +73,46 @@ describe('java-junit tests', () => {
fs.writeFileSync(outputPath, report)
})
it('report from testmo/junitxml basic example matches snapshot', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'junit4-basic.xml')
const outputPath = path.join(__dirname, '__outputs__', 'junit-basic.md')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JavaJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
it('report from testmo/junitxml complete example matches snapshot', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'junit4-complete.xml')
const outputPath = path.join(__dirname, '__outputs__', 'junit-complete.md')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JavaJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
it('parses empty failures in test results', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'empty_failures.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))

View File

@@ -15,9 +15,9 @@ describe('python-xunit unittest report', () => {
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-unittest.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-unittest.md')
it('report from python test results matches snapshot', async () => {
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit.md')
const trackedFiles = ['tests/test_lib.py']
const opts: ParseOptions = {
...defaultOpts,
@@ -68,3 +68,26 @@ describe('python-xunit unittest report', () => {
expect(report).toMatch(/^# My Custom Title\n/)
})
})
describe('python-xunit pytest report', () => {
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-pytest.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-pytest.md')
it('report from python test results matches snapshot', async () => {
const trackedFiles = ['tests/test_lib.py']
const opts: ParseOptions = {
...defaultOpts,
trackedFiles
}
const parser = new PythonXunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
})

View File

@@ -1,6 +1,5 @@
name: Test Reporter
description: |
Shows test results in GitHub UI: .NET (xUnit, NUnit, MSTest), Dart, Flutter, Java (JUnit), JavaScript (JEST, Mocha)
description: Displays test results from popular testing frameworks directly in GitHub
author: Michal Dorner <dorner.michal@gmail.com>
inputs:
artifact:

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "test-reporter",
"version": "2.2.0",
"version": "2.3.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "test-reporter",
"version": "2.2.0",
"version": "2.3.0",
"license": "MIT",
"dependencies": {
"@actions/core": "^1.11.1",

View File

@@ -1,6 +1,6 @@
{
"name": "test-reporter",
"version": "2.2.0",
"version": "2.3.0",
"private": true,
"description": "Presents test results from popular testing frameworks as Github check run",
"main": "lib/main.js",

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -9,6 +9,6 @@
"author": "Michal Dorner <dorner.michal@gmail.com>",
"license": "MIT",
"devDependencies": {
"mocha": "^8.3.0"
"mocha": "^11.7.5"
}
}