mirror of
https://github.com/dorny/test-reporter.git
synced 2026-02-04 12:15:21 -08:00
Compare commits
7 Commits
fix/add-go
...
fix/comple
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4331a3b620 | ||
|
|
04232af26f | ||
|
|
cf146f4036 | ||
|
|
33fc27cf09 | ||
|
|
fc80cb4400 | ||
|
|
aef3d726a6 | ||
|
|
c1a56edcfe |
2
.github/workflows/check-dist.yml
vendored
2
.github/workflows/check-dist.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set Node.js
|
- name: Set Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v6
|
||||||
|
|||||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
|||||||
name: Build & Test
|
name: Build & Test
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: actions/setup-node@v6
|
- uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version-file: '.nvmrc'
|
node-version-file: '.nvmrc'
|
||||||
|
|||||||
2
.github/workflows/manual-run.yml
vendored
2
.github/workflows/manual-run.yml
vendored
@@ -8,7 +8,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- run: npm ci
|
- run: npm ci
|
||||||
- run: npm run build
|
- run: npm run build
|
||||||
- run: npm test
|
- run: npm test
|
||||||
|
|||||||
2
.github/workflows/test-report.yml
vendored
2
.github/workflows/test-report.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
|||||||
name: Workflow test
|
name: Workflow test
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- uses: ./
|
- uses: ./
|
||||||
with:
|
with:
|
||||||
artifact: test-results
|
artifact: test-results
|
||||||
|
|||||||
44
README.md
44
README.md
@@ -20,6 +20,7 @@ This [Github Action](https://github.com/features/actions) displays test results
|
|||||||
- Java / [JUnit](https://junit.org/)
|
- Java / [JUnit](https://junit.org/)
|
||||||
- JavaScript / [JEST](https://jestjs.io/) / [Mocha](https://mochajs.org/)
|
- JavaScript / [JEST](https://jestjs.io/) / [Mocha](https://mochajs.org/)
|
||||||
- Python / [pytest](https://docs.pytest.org/en/stable/) / [unittest](https://docs.python.org/3/library/unittest.html)
|
- Python / [pytest](https://docs.pytest.org/en/stable/) / [unittest](https://docs.python.org/3/library/unittest.html)
|
||||||
|
- Ruby / [RSpec](https://rspec.info/)
|
||||||
- Swift / xUnit
|
- Swift / xUnit
|
||||||
|
|
||||||
For more information see [Supported formats](#supported-formats) section.
|
For more information see [Supported formats](#supported-formats) section.
|
||||||
@@ -256,6 +257,20 @@ Supported testing frameworks:
|
|||||||
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
|
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>dotnet-nunit</summary>
|
||||||
|
|
||||||
|
Test execution must be configured to generate [NUnit3](https://docs.nunit.org/articles/nunit/technical-notes/usage/Test-Result-XML-Format.html) XML test results.
|
||||||
|
Install the [NUnit3TestAdapter](https://www.nuget.org/packages/NUnit3TestAdapter) package (required; it registers the `nunit` logger for `dotnet test`), then run tests with:
|
||||||
|
|
||||||
|
`dotnet test --logger "nunit;LogFileName=test-results.xml"`
|
||||||
|
|
||||||
|
Supported testing frameworks:
|
||||||
|
- [NUnit](https://nunit.org/)
|
||||||
|
|
||||||
|
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
|
||||||
|
</details>
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
<summary>flutter-json</summary>
|
<summary>flutter-json</summary>
|
||||||
|
|
||||||
@@ -357,9 +372,34 @@ Please update Mocha to version [v9.1.0](https://github.com/mochajs/mocha/release
|
|||||||
|
|
||||||
Support for Python test results in xUnit format is experimental - should work but it was not extensively tested.
|
Support for Python test results in xUnit format is experimental - should work but it was not extensively tested.
|
||||||
|
|
||||||
For pytest support, configure [JUnit XML output](https://docs.pytest.org/en/stable/how-to/output.html#creating-junitxml-format-files) and run with the `--junit-xml` option, which also lets you specify the output path for test results.
|
For **pytest** support, configure [JUnit XML output](https://docs.pytest.org/en/stable/how-to/output.html#creating-junitxml-format-files) and run with the `--junit-xml` option, which also lets you specify the output path for test results.
|
||||||
|
|
||||||
For unittest support, use a test runner that outputs the JUnit report format, such as [unittest-xml-reporting](https://pypi.org/project/unittest-xml-reporting/).
|
```shell
|
||||||
|
pytest --junit-xml=test-report.xml
|
||||||
|
```
|
||||||
|
|
||||||
|
For **unittest** support, use a test runner that outputs the JUnit report format, such as [unittest-xml-reporting](https://pypi.org/project/unittest-xml-reporting/).
|
||||||
|
</details>
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>rspec-json</summary>
|
||||||
|
|
||||||
|
[RSpec](https://rspec.info/) testing framework support requires the usage of JSON formatter.
|
||||||
|
You can configure RSpec to output JSON format by using the `--format json` option and redirecting to a file:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
rspec --format json --out rspec-results.json
|
||||||
|
```
|
||||||
|
|
||||||
|
Or configure it in `.rspec` file:
|
||||||
|
```
|
||||||
|
--format json
|
||||||
|
--out rspec-results.json
|
||||||
|
```
|
||||||
|
|
||||||
|
For more information see:
|
||||||
|
- [RSpec documentation](https://rspec.info/)
|
||||||
|
- [RSpec Formatters](https://relishapp.com/rspec/rspec-core/docs/formatters)
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
|
|||||||
26
__tests__/__outputs__/python-xunit-pytest.md
Normal file
26
__tests__/__outputs__/python-xunit-pytest.md
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|

|
||||||
|
|Report|Passed|Failed|Skipped|Time|
|
||||||
|
|:---|---:|---:|---:|---:|
|
||||||
|
|[fixtures/python-xunit-pytest.xml](#user-content-r0)|6 ✅|2 ❌|2 ⚪|19ms|
|
||||||
|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit-pytest.xml</a>
|
||||||
|
**10** tests were completed in **19ms** with **6** passed, **2** failed and **2** skipped.
|
||||||
|
|Test suite|Passed|Failed|Skipped|Time|
|
||||||
|
|:---|---:|---:|---:|---:|
|
||||||
|
|[pytest](#user-content-r0s0)|6 ✅|2 ❌|2 ⚪|19ms|
|
||||||
|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">pytest</a>
|
||||||
|
```
|
||||||
|
tests.test_lib
|
||||||
|
✅ test_always_pass
|
||||||
|
✅ test_with_subtests
|
||||||
|
✅ test_parameterized[param1]
|
||||||
|
✅ test_parameterized[param2]
|
||||||
|
⚪ test_always_skip
|
||||||
|
❌ test_always_fail
|
||||||
|
assert False
|
||||||
|
⚪ test_expected_failure
|
||||||
|
❌ test_error
|
||||||
|
Exception: error
|
||||||
|
✅ test_with_record_property
|
||||||
|
custom_classname
|
||||||
|
✅ test_with_record_xml_attribute
|
||||||
|
```
|
||||||
@@ -1,5 +1,110 @@
|
|||||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||||
|
|
||||||
|
exports[`python-xunit pytest report report from python test results matches snapshot 1`] = `
|
||||||
|
TestRunResult {
|
||||||
|
"path": "fixtures/python-xunit-pytest.xml",
|
||||||
|
"suites": [
|
||||||
|
TestSuiteResult {
|
||||||
|
"groups": [
|
||||||
|
TestGroupResult {
|
||||||
|
"name": "tests.test_lib",
|
||||||
|
"tests": [
|
||||||
|
TestCaseResult {
|
||||||
|
"error": undefined,
|
||||||
|
"name": "test_always_pass",
|
||||||
|
"result": "success",
|
||||||
|
"time": 2,
|
||||||
|
},
|
||||||
|
TestCaseResult {
|
||||||
|
"error": undefined,
|
||||||
|
"name": "test_with_subtests",
|
||||||
|
"result": "success",
|
||||||
|
"time": 5,
|
||||||
|
},
|
||||||
|
TestCaseResult {
|
||||||
|
"error": undefined,
|
||||||
|
"name": "test_parameterized[param1]",
|
||||||
|
"result": "success",
|
||||||
|
"time": 0,
|
||||||
|
},
|
||||||
|
TestCaseResult {
|
||||||
|
"error": undefined,
|
||||||
|
"name": "test_parameterized[param2]",
|
||||||
|
"result": "success",
|
||||||
|
"time": 0,
|
||||||
|
},
|
||||||
|
TestCaseResult {
|
||||||
|
"error": undefined,
|
||||||
|
"name": "test_always_skip",
|
||||||
|
"result": "skipped",
|
||||||
|
"time": 0,
|
||||||
|
},
|
||||||
|
TestCaseResult {
|
||||||
|
"error": {
|
||||||
|
"details": "def test_always_fail():
|
||||||
|
> assert False
|
||||||
|
E assert False
|
||||||
|
|
||||||
|
tests/test_lib.py:25: AssertionError
|
||||||
|
",
|
||||||
|
"line": undefined,
|
||||||
|
"message": "assert False",
|
||||||
|
"path": undefined,
|
||||||
|
},
|
||||||
|
"name": "test_always_fail",
|
||||||
|
"result": "failed",
|
||||||
|
"time": 0,
|
||||||
|
},
|
||||||
|
TestCaseResult {
|
||||||
|
"error": undefined,
|
||||||
|
"name": "test_expected_failure",
|
||||||
|
"result": "skipped",
|
||||||
|
"time": 0,
|
||||||
|
},
|
||||||
|
TestCaseResult {
|
||||||
|
"error": {
|
||||||
|
"details": "def test_error():
|
||||||
|
> raise Exception("error")
|
||||||
|
E Exception: error
|
||||||
|
|
||||||
|
tests/test_lib.py:32: Exception
|
||||||
|
",
|
||||||
|
"line": undefined,
|
||||||
|
"message": "Exception: error",
|
||||||
|
"path": undefined,
|
||||||
|
},
|
||||||
|
"name": "test_error",
|
||||||
|
"result": "failed",
|
||||||
|
"time": 0,
|
||||||
|
},
|
||||||
|
TestCaseResult {
|
||||||
|
"error": undefined,
|
||||||
|
"name": "test_with_record_property",
|
||||||
|
"result": "success",
|
||||||
|
"time": 0,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
TestGroupResult {
|
||||||
|
"name": "custom_classname",
|
||||||
|
"tests": [
|
||||||
|
TestCaseResult {
|
||||||
|
"error": undefined,
|
||||||
|
"name": "test_with_record_xml_attribute",
|
||||||
|
"result": "success",
|
||||||
|
"time": 0,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"name": "pytest",
|
||||||
|
"totalTime": 19,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"totalTime": undefined,
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
exports[`python-xunit unittest report report from python test results matches snapshot 1`] = `
|
exports[`python-xunit unittest report report from python test results matches snapshot 1`] = `
|
||||||
TestRunResult {
|
TestRunResult {
|
||||||
"path": "fixtures/python-xunit-unittest.xml",
|
"path": "fixtures/python-xunit-unittest.xml",
|
||||||
|
|||||||
42
__tests__/fixtures/python-xunit-pytest.xml
Normal file
42
__tests__/fixtures/python-xunit-pytest.xml
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<testsuites name="pytest tests">
|
||||||
|
<testsuite name="pytest" errors="0" failures="2" skipped="2" tests="15" time="0.019"
|
||||||
|
timestamp="2025-11-15T11:51:49.548396-05:00" hostname="Mac.hsd1.va.comcast.net">
|
||||||
|
<properties>
|
||||||
|
<property name="custom_prop" value="custom_val"/>
|
||||||
|
</properties>
|
||||||
|
<testcase classname="tests.test_lib" name="test_always_pass" time="0.002"/>
|
||||||
|
<testcase classname="tests.test_lib" name="test_with_subtests" time="0.005"/>
|
||||||
|
<testcase classname="tests.test_lib" name="test_parameterized[param1]" time="0.000"/>
|
||||||
|
<testcase classname="tests.test_lib" name="test_parameterized[param2]" time="0.000"/>
|
||||||
|
<testcase classname="tests.test_lib" name="test_always_skip" time="0.000">
|
||||||
|
<skipped type="pytest.skip" message="skipped">/Users/mike/Projects/python-test/tests/test_lib.py:20: skipped
|
||||||
|
</skipped>
|
||||||
|
</testcase>
|
||||||
|
<testcase classname="tests.test_lib" name="test_always_fail" time="0.000">
|
||||||
|
<failure message="assert False">def test_always_fail():
|
||||||
|
> assert False
|
||||||
|
E assert False
|
||||||
|
|
||||||
|
tests/test_lib.py:25: AssertionError
|
||||||
|
</failure>
|
||||||
|
</testcase>
|
||||||
|
<testcase classname="tests.test_lib" name="test_expected_failure" time="0.000">
|
||||||
|
<skipped type="pytest.xfail" message=""/>
|
||||||
|
</testcase>
|
||||||
|
<testcase classname="tests.test_lib" name="test_error" time="0.000">
|
||||||
|
<failure message="Exception: error">def test_error():
|
||||||
|
> raise Exception("error")
|
||||||
|
E Exception: error
|
||||||
|
|
||||||
|
tests/test_lib.py:32: Exception
|
||||||
|
</failure>
|
||||||
|
</testcase>
|
||||||
|
<testcase classname="tests.test_lib" name="test_with_record_property" time="0.000">
|
||||||
|
<properties>
|
||||||
|
<property name="example_key" value="1"/>
|
||||||
|
</properties>
|
||||||
|
</testcase>
|
||||||
|
<testcase classname="custom_classname" name="test_with_record_xml_attribute" time="0.000"/>
|
||||||
|
</testsuite>
|
||||||
|
</testsuites>
|
||||||
@@ -15,9 +15,9 @@ describe('python-xunit unittest report', () => {
|
|||||||
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-unittest.xml')
|
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-unittest.xml')
|
||||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||||
|
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-unittest.md')
|
||||||
|
|
||||||
it('report from python test results matches snapshot', async () => {
|
it('report from python test results matches snapshot', async () => {
|
||||||
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit.md')
|
|
||||||
const trackedFiles = ['tests/test_lib.py']
|
const trackedFiles = ['tests/test_lib.py']
|
||||||
const opts: ParseOptions = {
|
const opts: ParseOptions = {
|
||||||
...defaultOpts,
|
...defaultOpts,
|
||||||
@@ -68,3 +68,26 @@ describe('python-xunit unittest report', () => {
|
|||||||
expect(report).toMatch(/^# My Custom Title\n/)
|
expect(report).toMatch(/^# My Custom Title\n/)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('python-xunit pytest report', () => {
|
||||||
|
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-pytest.xml')
|
||||||
|
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||||
|
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||||
|
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-pytest.md')
|
||||||
|
|
||||||
|
it('report from python test results matches snapshot', async () => {
|
||||||
|
const trackedFiles = ['tests/test_lib.py']
|
||||||
|
const opts: ParseOptions = {
|
||||||
|
...defaultOpts,
|
||||||
|
trackedFiles
|
||||||
|
}
|
||||||
|
|
||||||
|
const parser = new PythonXunitParser(opts)
|
||||||
|
const result = await parser.parse(filePath, fileContent)
|
||||||
|
expect(result).toMatchSnapshot()
|
||||||
|
|
||||||
|
const report = getReport([result])
|
||||||
|
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||||
|
fs.writeFileSync(outputPath, report)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
name: Test Reporter
|
name: Test Reporter
|
||||||
description: |
|
description: |
|
||||||
Shows test results in GitHub UI: .NET (xUnit, NUnit, MSTest), Dart, Flutter, Java (JUnit), JavaScript (JEST, Mocha)
|
Shows test results in GitHub UI: .NET (xUnit, NUnit, MSTest), Dart, Flutter, Go, Java (JUnit), JavaScript (JEST, Mocha), Python (pytest, unittest), Ruby (RSpec), Swift
|
||||||
author: Michal Dorner <dorner.michal@gmail.com>
|
author: Michal Dorner <dorner.michal@gmail.com>
|
||||||
inputs:
|
inputs:
|
||||||
artifact:
|
artifact:
|
||||||
|
|||||||
Reference in New Issue
Block a user