Compare commits

..

3 Commits

Author SHA1 Message Date
Jozef Izso
3d4ef7027a Use esbuild 2025-06-07 17:47:45 +02:00
Jozef Izso
671f839d7c Use types arguments in the downloadStream event handlers
Issues #603
2025-06-07 13:40:06 +02:00
Jozef Izso
57d1916a57 Use typed WorkflowRunEvent when parsing workflow_run payload
Issue #603
2025-06-07 13:36:07 +02:00
91 changed files with 51444 additions and 70692 deletions

View File

@@ -21,10 +21,10 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- name: Set Node.js - name: Set Node.js
uses: actions/setup-node@v6 uses: actions/setup-node@v4
with: with:
node-version-file: '.nvmrc' node-version-file: '.nvmrc'
@@ -46,7 +46,7 @@ jobs:
id: diff id: diff
# If index.js was different than expected, upload the expected version as an artifact # If index.js was different than expected, upload the expected version as an artifact
- uses: actions/upload-artifact@v5 - uses: actions/upload-artifact@v4
if: ${{ failure() && steps.diff.conclusion == 'failure' }} if: ${{ failure() && steps.diff.conclusion == 'failure' }}
with: with:
name: dist name: dist

View File

@@ -13,8 +13,8 @@ jobs:
name: Build & Test name: Build & Test
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- uses: actions/setup-node@v6 - uses: actions/setup-node@v4
with: with:
node-version-file: '.nvmrc' node-version-file: '.nvmrc'
- run: npm ci - run: npm ci
@@ -25,7 +25,7 @@ jobs:
- name: Upload test results - name: Upload test results
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
uses: actions/upload-artifact@v5 uses: actions/upload-artifact@v4
with: with:
name: test-results name: test-results
path: __tests__/__results__/*.xml path: __tests__/__results__/*.xml

View File

@@ -8,7 +8,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- run: npm ci - run: npm ci
- run: npm run build - run: npm run build
- run: npm test - run: npm test

View File

@@ -11,7 +11,7 @@ jobs:
name: Workflow test name: Workflow test
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- uses: ./ - uses: ./
with: with:
artifact: test-results artifact: test-results

View File

@@ -1,13 +0,0 @@
{
"blanks-around-headings": false,
"blanks-around-lists": false,
"blanks-around-tables": false,
"blanks-around-fences": false,
"no-bare-urls": false,
"line-length": false,
"ul-style": false,
"no-inline-html": false,
"no-multiple-blanks": {
"maximum": 3
}
}

View File

@@ -1,30 +1,5 @@
# Changelog # Changelog
## 2.3.0
* Feature: Add Python support with `python-xunit` reporter (pytest) https://github.com/dorny/test-reporter/pull/643
* Feature: Add pytest traceback parsing and `directory-mapping` option https://github.com/dorny/test-reporter/pull/238
* Performance: Update sax.js to fix large XML file parsing https://github.com/dorny/test-reporter/pull/681
* Documentation: Complete documentation for all supported reporters https://github.com/dorny/test-reporter/pull/691
* Security: Bump js-yaml and mocha in /reports/mocha (fixes prototype pollution) https://github.com/dorny/test-reporter/pull/682
## 2.2.0
* Feature: Add collapsed option to control report summary visibility https://github.com/dorny/test-reporter/pull/664
* Fix badge encoding for values including underscore and hyphens https://github.com/dorny/test-reporter/pull/672
* Fix missing `report-title` attribute in action definition https://github.com/dorny/test-reporter/pull/637
* Refactor variable names to fix shadowing issues https://github.com/dorny/test-reporter/pull/630
## 2.1.1
* Fix error when a TestMethod element does not have a className attribute in a trx file https://github.com/dorny/test-reporter/pull/623
* Add stack trace from trx to summary https://github.com/dorny/test-reporter/pull/615
* List only failed tests https://github.com/dorny/test-reporter/pull/606
* Add type definitions to `github-utils.ts` https://github.com/dorny/test-reporter/pull/604
* Avoid split on undefined https://github.com/dorny/test-reporter/pull/258
* Return links to summary report https://github.com/dorny/test-reporter/pull/588
* Add step summary short summary https://github.com/dorny/test-reporter/pull/589
* Fix for empty TRX TestDefinitions https://github.com/dorny/test-reporter/pull/582
* Increase step summary limit to 1MiB https://github.com/dorny/test-reporter/pull/581
* Fix input description for list options https://github.com/dorny/test-reporter/pull/572
## 2.1.0 ## 2.1.0
* Feature: Add summary title https://github.com/dorny/test-reporter/pull/568 * Feature: Add summary title https://github.com/dorny/test-reporter/pull/568
* Feature: Add Golang test parser https://github.com/dorny/test-reporter/pull/571 * Feature: Add Golang test parser https://github.com/dorny/test-reporter/pull/571

View File

@@ -9,7 +9,7 @@ This [Github Action](https://github.com/features/actions) displays test results
✔️ Provides final `conclusion` and counts of `passed`, `failed` and `skipped` tests as output parameters ✔️ Provides final `conclusion` and counts of `passed`, `failed` and `skipped` tests as output parameters
**How it looks:** **How it looks:**
|![Summary showing test run with all tests passed, including details such as test file names, number of passed, failed, and skipped tests, and execution times. The interface is dark-themed and displays a green badge indicating 3527 passed and 4 skipped tests.](assets/fluent-validation-report.png)|![Summary showing test run with a failed unit test. The summary uses a dark background and highlights errors in red for quick identification.](assets/provider-error-summary.png)|![GitHub Actions annotation showing details of a failed unit test with a detailed error message, stack trace, and code annotation.](assets/provider-error-details.png)|![Test cases written in Mocha framework with a list of expectations for each test case. The table format and color-coded badges help users quickly assess test suite health.](assets/mocha-groups.png)| |![](assets/fluent-validation-report.png)|![](assets/provider-error-summary.png)|![](assets/provider-error-details.png)|![](assets/mocha-groups.png)|
|:--:|:--:|:--:|:--:| |:--:|:--:|:--:|:--:|
**Supported languages / frameworks:** **Supported languages / frameworks:**
@@ -19,9 +19,6 @@ This [Github Action](https://github.com/features/actions) displays test results
- Go / [go test](https://pkg.go.dev/testing) - Go / [go test](https://pkg.go.dev/testing)
- Java / [JUnit](https://junit.org/) - Java / [JUnit](https://junit.org/)
- JavaScript / [JEST](https://jestjs.io/) / [Mocha](https://mochajs.org/) - JavaScript / [JEST](https://jestjs.io/) / [Mocha](https://mochajs.org/)
- Python / [pytest](https://docs.pytest.org/en/stable/) / [unittest](https://docs.python.org/3/library/unittest.html)
- PHP / [PHPUnit](https://phpunit.de/)
- Ruby / [RSpec](https://rspec.info/)
- Swift / xUnit - Swift / xUnit
For more information see [Supported formats](#supported-formats) section. For more information see [Supported formats](#supported-formats) section.
@@ -148,10 +145,7 @@ jobs:
# java-junit # java-junit
# jest-junit # jest-junit
# mocha-json # mocha-json
# phpunit-junit
# python-xunit
# rspec-json # rspec-json
# swift-xunit
reporter: '' reporter: ''
# Allows you to generate only the summary. # Allows you to generate only the summary.
@@ -259,20 +253,6 @@ Supported testing frameworks:
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples) For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
</details> </details>
<details>
<summary>dotnet-nunit</summary>
Test execution must be configured to generate [NUnit3](https://docs.nunit.org/articles/nunit/technical-notes/usage/Test-Result-XML-Format.html) XML test results.
Install the [NUnit3TestAdapter](https://www.nuget.org/packages/NUnit3TestAdapter) package (required; it registers the `nunit` logger for `dotnet test`), then run tests with:
`dotnet test --logger "nunit;LogFileName=test-results.xml"`
Supported testing frameworks:
- [NUnit](https://nunit.org/)
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
</details>
<details> <details>
<summary>flutter-json</summary> <summary>flutter-json</summary>
@@ -316,14 +296,6 @@ This is due to the fact Java stack traces don't contain a full path to the sourc
Some heuristic was necessary to figure out the mapping between the line in the stack trace and an actual source file. Some heuristic was necessary to figure out the mapping between the line in the stack trace and an actual source file.
</details> </details>
<details>
<summary>phpunit-junit</summary>
[PHPUnit](https://phpunit.de/) can generate JUnit XML via CLI:
`phpunit --log-junit reports/phpunit-junit.xml`
</details>
<details> <details>
<summary>jest-junit</summary> <summary>jest-junit</summary>
@@ -377,41 +349,6 @@ Before version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0), M
Please update Mocha to version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0) or above if you encounter this issue. Please update Mocha to version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0) or above if you encounter this issue.
</details> </details>
<details>
<summary>python-xunit (Experimental)</summary>
Support for Python test results in xUnit format is experimental - should work but it was not extensively tested.
For **pytest** support, configure [JUnit XML output](https://docs.pytest.org/en/stable/how-to/output.html#creating-junitxml-format-files) and run with the `--junit-xml` option, which also lets you specify the output path for test results.
```shell
pytest --junit-xml=test-report.xml
```
For **unittest** support, use a test runner that outputs the JUnit report format, such as [unittest-xml-reporting](https://pypi.org/project/unittest-xml-reporting/).
</details>
<details>
<summary>rspec-json</summary>
[RSpec](https://rspec.info/) testing framework support requires the usage of JSON formatter.
You can configure RSpec to output JSON format by using the `--format json` option and redirecting to a file:
```shell
rspec --format json --out rspec-results.json
```
Or configure it in `.rspec` file:
```
--format json
--out rspec-results.json
```
For more information see:
- [RSpec documentation](https://rspec.info/)
- [RSpec Formatters](https://relishapp.com/rspec/rspec-core/docs/formatters)
</details>
<details> <details>
<summary>swift-xunit (Experimental)</summary> <summary>swift-xunit (Experimental)</summary>

View File

@@ -1,7 +1,7 @@
![Tests failed](https://img.shields.io/badge/tests-1%20passed%2C%204%20failed%2C%201%20skipped-critical) ![Tests failed](https://img.shields.io/badge/tests-1%20passed%2C%204%20failed%2C%201%20skipped-critical)
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/dart-json.json](#user-content-r0)|1 ✅|4 ❌|1 ⚪|4s| |fixtures/dart-json.json|1 ✅|4 ❌|1 ⚪|4s|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/dart-json.json</a> ## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/dart-json.json</a>
**6** tests were completed in **4s** with **1** passed, **4** failed and **1** skipped. **6** tests were completed in **4s** with **1** passed, **4** failed and **1** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|

View File

@@ -1,7 +1,7 @@
![Tests failed](https://img.shields.io/badge/tests-3%20passed%2C%205%20failed%2C%201%20skipped-critical) ![Tests failed](https://img.shields.io/badge/tests-3%20passed%2C%205%20failed%2C%201%20skipped-critical)
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/dotnet-nunit.xml](#user-content-r0)|3 ✅|5 ❌|1 ⚪|230ms| |fixtures/dotnet-nunit.xml|3 ✅|5 ❌|1 ⚪|230ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/dotnet-nunit.xml</a> ## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/dotnet-nunit.xml</a>
**9** tests were completed in **230ms** with **3** passed, **5** failed and **1** skipped. **9** tests were completed in **230ms** with **3** passed, **5** failed and **1** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|

View File

@@ -1,34 +0,0 @@
![Tests failed](https://img.shields.io/badge/tests-5%20passed%2C%205%20failed%2C%201%20skipped-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/dotnet-trx.trx](#user-content-r0)|5 ✅|5 ❌|1 ⚪|1s|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/dotnet-trx.trx</a>
**11** tests were completed in **1s** with **5** passed, **5** failed and **1** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[DotnetTests.XUnitTests.CalculatorTests](#user-content-r0s0)|5 ✅|5 ❌|1 ⚪|118ms|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">DotnetTests.XUnitTests.CalculatorTests</a>
```
❌ Exception_In_TargetTest
System.DivideByZeroException : Attempted to divide by zero.
at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.Unit\Calculator.cs:line 9
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 33
❌ Exception_In_Test
System.Exception : Test
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 39
❌ Failing_Test
Assert.Equal() Failure
Expected: 3
Actual: 2
at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 27
❌ Is_Even_Number(i: 3)
Assert.True() Failure
Expected: True
Actual: False
at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 59
❌ Should be even number(i: 3)
Assert.True() Failure
Expected: True
Actual: False
at DotnetTests.XUnitTests.CalculatorTests.Theory_With_Custom_Name(Int32 i) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 67
```

View File

@@ -1,7 +1,7 @@
![Tests failed](https://img.shields.io/badge/tests-5%20passed%2C%205%20failed%2C%201%20skipped-critical) ![Tests failed](https://img.shields.io/badge/tests-5%20passed%2C%205%20failed%2C%201%20skipped-critical)
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/dotnet-trx.trx](#user-content-r0)|5 ✅|5 ❌|1 ⚪|1s| |fixtures/dotnet-trx.trx|5 ✅|5 ❌|1 ⚪|1s|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/dotnet-trx.trx</a> ## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/dotnet-trx.trx</a>
**11** tests were completed in **1s** with **5** passed, **5** failed and **1** skipped. **11** tests were completed in **1s** with **5** passed, **5** failed and **1** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|
@@ -12,29 +12,23 @@
✅ Custom Name ✅ Custom Name
❌ Exception_In_TargetTest ❌ Exception_In_TargetTest
System.DivideByZeroException : Attempted to divide by zero. System.DivideByZeroException : Attempted to divide by zero.
at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.Unit\Calculator.cs:line 9
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 33
❌ Exception_In_Test ❌ Exception_In_Test
System.Exception : Test System.Exception : Test
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 39
❌ Failing_Test ❌ Failing_Test
Assert.Equal() Failure Assert.Equal() Failure
Expected: 3 Expected: 3
Actual: 2 Actual: 2
at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 27
✅ Is_Even_Number(i: 2) ✅ Is_Even_Number(i: 2)
❌ Is_Even_Number(i: 3) ❌ Is_Even_Number(i: 3)
Assert.True() Failure Assert.True() Failure
Expected: True Expected: True
Actual: False Actual: False
at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 59
✅ Passing_Test ✅ Passing_Test
✅ Should be even number(i: 2) ✅ Should be even number(i: 2)
❌ Should be even number(i: 3) ❌ Should be even number(i: 3)
Assert.True() Failure Assert.True() Failure
Expected: True Expected: True
Actual: False Actual: False
at DotnetTests.XUnitTests.CalculatorTests.Theory_With_Custom_Name(Int32 i) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 67
⚪ Skipped_Test ⚪ Skipped_Test
✅ Timeout_Test ✅ Timeout_Test
``` ```

View File

@@ -1,26 +0,0 @@
![Tests failed](https://img.shields.io/badge/tests-1%20passed%2C%203%20failed-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/dotnet-xunitv3.trx](#user-content-r0)|1 ✅|3 ❌||267ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/dotnet-xunitv3.trx</a>
**4** tests were completed in **267ms** with **1** passed, **3** failed and **0** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[DotnetTests.XUnitV3Tests.FixtureTests](#user-content-r0s0)|1 ✅|1 ❌||18ms|
|[Unclassified](#user-content-r0s1)||2 ❌||0ms|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">DotnetTests.XUnitV3Tests.FixtureTests</a>
```
❌ Failing_Test
Assert.Null() Failure: Value is not null
Expected: null
Actual: Fixture { }
at DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test() in /_/reports/dotnet/DotnetTests.XUnitV3Tests/FixtureTests.cs:line 25
at System.RuntimeMethodHandle.InvokeMethod(Object target, Void** arguments, Signature sig, Boolean isConstructor)
at System.Reflection.MethodBaseInvoker.InvokeWithNoArgs(Object obj, BindingFlags invokeAttr)
✅ Passing_Test
```
### ❌ <a id="user-content-r0s1" href="#user-content-r0s1">Unclassified</a>
```
❌ [Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test)]
❌ [Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test)]
```

View File

@@ -3,7 +3,7 @@
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/external/FluentValidation.Tests.trx](#user-content-r0)|803 ✅||1 ⚪|4s| |fixtures/external/FluentValidation.Tests.trx|803 ✅||1 ⚪|4s|
## ✅ <a id="user-content-r0" href="#user-content-r0">fixtures/external/FluentValidation.Tests.trx</a> ## ✅ <a id="user-content-r0" href="#user-content-r0">fixtures/external/FluentValidation.Tests.trx</a>
**804** tests were completed in **4s** with **803** passed, **0** failed and **1** skipped. **804** tests were completed in **4s** with **803** passed, **0** failed and **1** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|

View File

@@ -1,7 +1,7 @@
![Tests failed](https://img.shields.io/badge/tests-5%20passed%2C%206%20failed%2C%201%20skipped-critical) ![Tests failed](https://img.shields.io/badge/tests-5%20passed%2C%206%20failed%2C%201%20skipped-critical)
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/golang-json.json](#user-content-r0)|5 ✅|6 ❌|1 ⚪|6s| |fixtures/golang-json.json|5 ✅|6 ❌|1 ⚪|6s|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/golang-json.json</a> ## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/golang-json.json</a>
**12** tests were completed in **6s** with **5** passed, **6** failed and **1** skipped. **12** tests were completed in **6s** with **5** passed, **6** failed and **1** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|

View File

@@ -3,7 +3,7 @@
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/jest-junit-eslint.xml](#user-content-r0)|1 ✅|||0ms| |fixtures/jest-junit-eslint.xml|1 ✅|||0ms|
## ✅ <a id="user-content-r0" href="#user-content-r0">fixtures/jest-junit-eslint.xml</a> ## ✅ <a id="user-content-r0" href="#user-content-r0">fixtures/jest-junit-eslint.xml</a>
**1** tests were completed in **0ms** with **1** passed, **0** failed and **0** skipped. **1** tests were completed in **0ms** with **1** passed, **0** failed and **0** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|

View File

@@ -1,7 +1,7 @@
![Tests failed](https://img.shields.io/badge/tests-1%20passed%2C%204%20failed%2C%201%20skipped-critical) ![Tests failed](https://img.shields.io/badge/tests-1%20passed%2C%204%20failed%2C%201%20skipped-critical)
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/jest-junit.xml](#user-content-r0)|1 ✅|4 ❌|1 ⚪|1s| |fixtures/jest-junit.xml|1 ✅|4 ❌|1 ⚪|1s|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/jest-junit.xml</a> ## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/jest-junit.xml</a>
**6** tests were completed in **1s** with **1** passed, **4** failed and **1** skipped. **6** tests were completed in **1s** with **1** passed, **4** failed and **1** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|

View File

@@ -3,7 +3,7 @@
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/external/jest/jest-react-component-test-results.xml](#user-content-r0)|1 ✅|||1000ms| |fixtures/external/jest/jest-react-component-test-results.xml|1 ✅|||1000ms|
## ✅ <a id="user-content-r0" href="#user-content-r0">fixtures/external/jest/jest-react-component-test-results.xml</a> ## ✅ <a id="user-content-r0" href="#user-content-r0">fixtures/external/jest/jest-react-component-test-results.xml</a>
**1** tests were completed in **1000ms** with **1** passed, **0** failed and **0** skipped. **1** tests were completed in **1000ms** with **1** passed, **0** failed and **0** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|

View File

@@ -1,7 +1,7 @@
![Tests failed](https://img.shields.io/badge/tests-4207%20passed%2C%202%20failed%2C%2030%20skipped-critical) ![Tests failed](https://img.shields.io/badge/tests-4207%20passed%2C%202%20failed%2C%2030%20skipped-critical)
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/external/jest/jest-test-results.xml](#user-content-r0)|4207 ✅|2 ❌|30 ⚪|166s| |fixtures/external/jest/jest-test-results.xml|4207 ✅|2 ❌|30 ⚪|166s|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/jest/jest-test-results.xml</a> ## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/jest/jest-test-results.xml</a>
**4239** tests were completed in **166s** with **4207** passed, **2** failed and **30** skipped. **4239** tests were completed in **166s** with **4207** passed, **2** failed and **30** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|

View File

@@ -1,23 +0,0 @@
![Tests failed](https://img.shields.io/badge/tests-5%20passed%2C%201%20failed-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/external/java/junit4-basic.xml](#user-content-r0)|5 ✅|1 ❌||16s|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/junit4-basic.xml</a>
**6** tests were completed in **16s** with **5** passed, **1** failed and **0** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[Tests.Authentication](#user-content-r0s0)|2 ✅|1 ❌||9s|
|[Tests.Registration](#user-content-r0s1)|3 ✅|||7s|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Authentication</a>
```
✅ testCase7
✅ testCase8
❌ testCase9
AssertionError: Assertion error message
```
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">Tests.Registration</a>
```
✅ testCase1
✅ testCase2
✅ testCase3
```

View File

@@ -1,22 +0,0 @@
![Tests failed](https://img.shields.io/badge/tests-5%20passed%2C%202%20failed%2C%201%20skipped-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/external/java/junit4-complete.xml](#user-content-r0)|5 ✅|2 ❌|1 ⚪|16s|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/junit4-complete.xml</a>
**8** tests were completed in **16s** with **5** passed, **2** failed and **1** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[Tests.Registration](#user-content-r0s0)|5 ✅|2 ❌|1 ⚪|16s|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Registration</a>
```
✅ testCase1
✅ testCase2
✅ testCase3
⚪ testCase4
❌ testCase5
AssertionError: Expected value did not match.
❌ testCase6
ArithmeticError: Division by zero.
✅ testCase7
✅ testCase8
```

View File

@@ -1,7 +1,7 @@
![Tests failed](https://img.shields.io/badge/tests-1%20failed-critical) ![Tests failed](https://img.shields.io/badge/tests-1%20failed-critical)
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/junit-with-message.xml](#user-content-r0)||1 ❌||1ms| |fixtures/junit-with-message.xml||1 ❌||1ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/junit-with-message.xml</a> ## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/junit-with-message.xml</a>
**1** tests were completed in **1ms** with **0** passed, **1** failed and **0** skipped. **1** tests were completed in **1ms** with **0** passed, **1** failed and **0** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|

View File

@@ -1,7 +1,7 @@
![Tests failed](https://img.shields.io/badge/tests-1%20passed%2C%204%20failed%2C%201%20skipped-critical) ![Tests failed](https://img.shields.io/badge/tests-1%20passed%2C%204%20failed%2C%201%20skipped-critical)
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/mocha-json.json](#user-content-r0)|1 ✅|4 ❌|1 ⚪|12ms| |fixtures/mocha-json.json|1 ✅|4 ❌|1 ⚪|12ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/mocha-json.json</a> ## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/mocha-json.json</a>
**6** tests were completed in **12ms** with **1** passed, **4** failed and **1** skipped. **6** tests were completed in **12ms** with **1** passed, **4** failed and **1** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|

View File

@@ -3,7 +3,7 @@
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/external/mocha/mocha-test-results.json](#user-content-r0)|833 ✅||6 ⚪|6s| |fixtures/external/mocha/mocha-test-results.json|833 ✅||6 ⚪|6s|
## ✅ <a id="user-content-r0" href="#user-content-r0">fixtures/external/mocha/mocha-test-results.json</a> ## ✅ <a id="user-content-r0" href="#user-content-r0">fixtures/external/mocha/mocha-test-results.json</a>
**839** tests were completed in **6s** with **833** passed, **0** failed and **6** skipped. **839** tests were completed in **6s** with **833** passed, **0** failed and **6** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|

View File

@@ -1,30 +0,0 @@
![Tests failed](https://img.shields.io/badge/tests-8%20passed%2C%201%20failed-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/external/phpunit/junit-basic.xml](#user-content-r0)|8 ✅|1 ❌||16s|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/phpunit/junit-basic.xml</a>
**9** tests were completed in **16s** with **8** passed, **1** failed and **0** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[Tests.Authentication](#user-content-r0s0)|2 ✅|1 ❌||9s|
|[Tests.Authentication.Login](#user-content-r0s1)|3 ✅|||4s|
|[Tests.Registration](#user-content-r0s2)|3 ✅|||7s|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Authentication</a>
```
✅ testCase7
✅ testCase8
❌ testCase9
AssertionError: Assertion error message
```
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">Tests.Authentication.Login</a>
```
✅ testCase4
✅ testCase5
✅ testCase6
```
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">Tests.Registration</a>
```
✅ testCase1
✅ testCase2
✅ testCase3
```

View File

@@ -1,88 +0,0 @@
![Tests failed](https://img.shields.io/badge/tests-28%20passed%2C%202%20failed-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/external/phpunit/phpcheckstyle-phpunit.xml](#user-content-r0)|28 ✅|2 ❌||41ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/phpunit/phpcheckstyle-phpunit.xml</a>
**30** tests were completed in **41ms** with **28** passed, **2** failed and **0** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[CommentsTest](#user-content-r0s0)|3 ✅|||7ms|
|[DeprecationTest](#user-content-r0s1)|1 ✅|||1ms|
|[GoodTest](#user-content-r0s2)|4 ✅|||5ms|
|[IndentationTest](#user-content-r0s3)|8 ✅|||8ms|
|[MetricsTest](#user-content-r0s4)|1 ✅|||4ms|
|[NamingTest](#user-content-r0s5)|2 ✅|||3ms|
|[OptimizationTest](#user-content-r0s6)|1 ✅|||1ms|
|[OtherTest](#user-content-r0s7)|2 ✅|2 ❌||7ms|
|[PHPTagsTest](#user-content-r0s8)|2 ✅|||1ms|
|[ProhibitedTest](#user-content-r0s9)|1 ✅|||1ms|
|[StrictCompareTest](#user-content-r0s10)|1 ✅|||2ms|
|[UnusedTest](#user-content-r0s11)|2 ✅|||2ms|
### ✅ <a id="user-content-r0s0" href="#user-content-r0s0">CommentsTest</a>
```
✅ testGoodDoc
✅ testComments
✅ testTODOs
```
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">DeprecationTest</a>
```
✅ testDeprecations
```
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">GoodTest</a>
```
✅ testGood
✅ testDoWhile
✅ testAnonymousFunction
✅ testException
```
### ✅ <a id="user-content-r0s3" href="#user-content-r0s3">IndentationTest</a>
```
✅ testTabIndentation
✅ testSpaceIndentation
✅ testSpaceIndentationArray
✅ testGoodSpaceIndentationArray
✅ testGoodIndentationNewLine
✅ testGoodIndentationSpaces
✅ testBadSpaces
✅ testBadSpaceAfterControl
```
### ✅ <a id="user-content-r0s4" href="#user-content-r0s4">MetricsTest</a>
```
✅ testMetrics
```
### ✅ <a id="user-content-r0s5" href="#user-content-r0s5">NamingTest</a>
```
✅ testNaming
✅ testFunctionNaming
```
### ✅ <a id="user-content-r0s6" href="#user-content-r0s6">OptimizationTest</a>
```
✅ testTextAfterClosingTag
```
### ❌ <a id="user-content-r0s7" href="#user-content-r0s7">OtherTest</a>
```
❌ testOther
PHPUnit\Framework\ExpectationFailedException
❌ testException
PHPUnit\Framework\ExpectationFailedException
✅ testEmpty
✅ testSwitchCaseNeedBreak
```
### ✅ <a id="user-content-r0s8" href="#user-content-r0s8">PHPTagsTest</a>
```
✅ testTextAfterClosingTag
✅ testClosingTagNotNeeded
```
### ✅ <a id="user-content-r0s9" href="#user-content-r0s9">ProhibitedTest</a>
```
✅ testProhibited
```
### ✅ <a id="user-content-r0s10" href="#user-content-r0s10">StrictCompareTest</a>
```
✅ testStrictCompare
```
### ✅ <a id="user-content-r0s11" href="#user-content-r0s11">UnusedTest</a>
```
✅ testGoodUnused
✅ testBadUnused
```

View File

@@ -1,41 +0,0 @@
![Tests failed](https://img.shields.io/badge/tests-10%20passed%2C%202%20failed-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/phpunit/phpunit.xml](#user-content-r0)|10 ✅|2 ❌||148ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/phpunit/phpunit.xml</a>
**12** tests were completed in **148ms** with **10** passed, **2** failed and **0** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[CLI Arguments](#user-content-r0s0)||2 ❌||140ms|
|[PHPUnit\Event\CollectingDispatcherTest](#user-content-r0s1)|2 ✅|||4ms|
|[PHPUnit\Event\DeferringDispatcherTest](#user-content-r0s2)|4 ✅|||3ms|
|[PHPUnit\Event\DirectDispatcherTest](#user-content-r0s3)|4 ✅|||1ms|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">CLI Arguments</a>
```
❌ targeting-traits-with-coversclass-attribute-is-deprecated.phpt
PHPUnit\Framework\PhptAssertionFailedError
❌ targeting-traits-with-usesclass-attribute-is-deprecated.phpt
PHPUnit\Framework\PhptAssertionFailedError
```
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">PHPUnit\Event\CollectingDispatcherTest</a>
```
PHPUnit.Event.CollectingDispatcherTest
✅ testHasNoCollectedEventsWhenFlushedImmediatelyAfterCreation
✅ testCollectsDispatchedEventsUntilFlushed
```
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">PHPUnit\Event\DeferringDispatcherTest</a>
```
PHPUnit.Event.DeferringDispatcherTest
✅ testCollectsEventsUntilFlush
✅ testFlushesCollectedEvents
✅ testSubscriberCanBeRegistered
✅ testTracerCanBeRegistered
```
### ✅ <a id="user-content-r0s3" href="#user-content-r0s3">PHPUnit\Event\DirectDispatcherTest</a>
```
PHPUnit.Event.DirectDispatcherTest
✅ testDispatchesEventToKnownSubscribers
✅ testDispatchesEventToTracers
✅ testRegisterRejectsUnknownSubscriber
✅ testDispatchRejectsUnknownEventType
```

View File

@@ -1,7 +1,7 @@
![Tests failed](https://img.shields.io/badge/tests-268%20passed%2C%201%20failed-critical) ![Tests failed](https://img.shields.io/badge/tests-268%20passed%2C%201%20failed-critical)
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/external/flutter/provider-test-results.json](#user-content-r0)|268 ✅|1 ❌||0ms| |fixtures/external/flutter/provider-test-results.json|268 ✅|1 ❌||0ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/flutter/provider-test-results.json</a> ## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/flutter/provider-test-results.json</a>
**269** tests were completed in **0ms** with **268** passed, **1** failed and **0** skipped. **269** tests were completed in **0ms** with **268** passed, **1** failed and **0** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|

View File

@@ -1,7 +1,7 @@
![Tests failed](https://img.shields.io/badge/tests-1%20failed%2C%201%20skipped-critical) ![Tests failed](https://img.shields.io/badge/tests-1%20failed%2C%201%20skipped-critical)
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/external/java/TEST-org.apache.pulsar.AddMissingPatchVersionTest.xml](#user-content-r0)||1 ❌|1 ⚪|116ms| |fixtures/external/java/TEST-org.apache.pulsar.AddMissingPatchVersionTest.xml||1 ❌|1 ⚪|116ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/TEST-org.apache.pulsar.AddMissingPatchVersionTest.xml</a> ## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/TEST-org.apache.pulsar.AddMissingPatchVersionTest.xml</a>
**2** tests were completed in **116ms** with **0** passed, **1** failed and **1** skipped. **2** tests were completed in **116ms** with **0** passed, **1** failed and **1** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|

View File

@@ -1,7 +1,7 @@
![Tests failed](https://img.shields.io/badge/tests-793%20passed%2C%201%20failed%2C%2014%20skipped-critical) ![Tests failed](https://img.shields.io/badge/tests-793%20passed%2C%201%20failed%2C%2014%20skipped-critical)
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/external/java/pulsar-test-report.xml](#user-content-r0)|793 ✅|1 ❌|14 ⚪|2127s| |fixtures/external/java/pulsar-test-report.xml|793 ✅|1 ❌|14 ⚪|2127s|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/pulsar-test-report.xml</a> ## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/pulsar-test-report.xml</a>
**808** tests were completed in **2127s** with **793** passed, **1** failed and **14** skipped. **808** tests were completed in **2127s** with **793** passed, **1** failed and **14** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|

View File

@@ -1,26 +0,0 @@
![Tests failed](https://img.shields.io/badge/tests-6%20passed%2C%202%20failed%2C%202%20skipped-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/python-xunit-pytest.xml](#user-content-r0)|6 ✅|2 ❌|2 ⚪|19ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit-pytest.xml</a>
**10** tests were completed in **19ms** with **6** passed, **2** failed and **2** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[pytest](#user-content-r0s0)|6 ✅|2 ❌|2 ⚪|19ms|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">pytest</a>
```
tests.test_lib
✅ test_always_pass
✅ test_with_subtests
✅ test_parameterized[param1]
✅ test_parameterized[param2]
⚪ test_always_skip
❌ test_always_fail
assert False
⚪ test_expected_failure
❌ test_error
Exception: error
✅ test_with_record_property
custom_classname
✅ test_with_record_xml_attribute
```

View File

@@ -1,23 +0,0 @@
![Tests failed](https://img.shields.io/badge/tests-4%20passed%2C%202%20failed%2C%202%20skipped-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/python-xunit-unittest.xml](#user-content-r0)|4 ✅|2 ❌|2 ⚪|1ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit-unittest.xml</a>
**8** tests were completed in **1ms** with **4** passed, **2** failed and **2** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[TestAcme-20251114214921](#user-content-r0s0)|4 ✅|2 ❌|2 ⚪|1ms|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">TestAcme-20251114214921</a>
```
TestAcme
✅ test_always_pass
✅ test_parameterized_0_param1
✅ test_parameterized_1_param2
✅ test_with_subtests
❌ test_always_fail
AssertionError: failed
❌ test_error
Exception: error
⚪ test_always_skip
⚪ test_expected_failure
```

View File

@@ -1,7 +1,7 @@
![Tests failed](https://img.shields.io/badge/tests-1%20passed%2C%201%20failed%2C%201%20skipped-critical) ![Tests failed](https://img.shields.io/badge/tests-1%20passed%2C%201%20failed%2C%201%20skipped-critical)
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/rspec-json.json](#user-content-r0)|1 ✅|1 ❌|1 ⚪|0ms| |fixtures/rspec-json.json|1 ✅|1 ❌|1 ⚪|0ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/rspec-json.json</a> ## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/rspec-json.json</a>
**3** tests were completed in **0ms** with **1** passed, **1** failed and **1** skipped. **3** tests were completed in **0ms** with **1** passed, **1** failed and **1** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|

View File

@@ -3,7 +3,7 @@
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/external/SilentNotes.trx](#user-content-r0)|67 ✅||12 ⚪|1s| |fixtures/external/SilentNotes.trx|67 ✅||12 ⚪|1s|
## ✅ <a id="user-content-r0" href="#user-content-r0">fixtures/external/SilentNotes.trx</a> ## ✅ <a id="user-content-r0" href="#user-content-r0">fixtures/external/SilentNotes.trx</a>
**79** tests were completed in **1s** with **67** passed, **0** failed and **12** skipped. **79** tests were completed in **1s** with **67** passed, **0** failed and **12** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|

View File

@@ -1,7 +1,7 @@
![Tests failed](https://img.shields.io/badge/tests-2%20passed%2C%201%20failed-critical) ![Tests failed](https://img.shields.io/badge/tests-2%20passed%2C%201%20failed-critical)
|Report|Passed|Failed|Skipped|Time| |Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:| |:---|---:|---:|---:|---:|
|[fixtures/swift-xunit.xml](#user-content-r0)|2 ✅|1 ❌||220ms| |fixtures/swift-xunit.xml|2 ✅|1 ❌||220ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/swift-xunit.xml</a> ## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/swift-xunit.xml</a>
**3** tests were completed in **220ms** with **2** passed, **1** failed and **0** skipped. **3** tests were completed in **220ms** with **2** passed, **1** failed and **0** skipped.
|Test suite|Passed|Failed|Skipped|Time| |Test suite|Passed|Failed|Skipped|Time|

View File

@@ -1,4 +1,4 @@
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing // Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`dart-json tests matches report snapshot 1`] = ` exports[`dart-json tests matches report snapshot 1`] = `
TestRunResult { TestRunResult {

View File

@@ -1,4 +1,4 @@
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing // Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`dotnet-nunit tests report from ./reports/dotnet test results matches snapshot 1`] = ` exports[`dotnet-nunit tests report from ./reports/dotnet test results matches snapshot 1`] = `
TestRunResult { TestRunResult {

View File

@@ -1,6 +1,6 @@
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing // Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`dotnet-trx tests matches dotnet-trx report snapshot 1`] = ` exports[`dotnet-trx tests matches report snapshot 1`] = `
TestRunResult { TestRunResult {
"path": "fixtures/dotnet-trx.trx", "path": "fixtures/dotnet-trx.trx",
"suites": [ "suites": [
@@ -21,9 +21,7 @@ TestRunResult {
at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.Unit\\Calculator.cs:line 9 at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.Unit\\Calculator.cs:line 9
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 33", at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 33",
"line": 9, "line": 9,
"message": "System.DivideByZeroException : Attempted to divide by zero. "message": "System.DivideByZeroException : Attempted to divide by zero.",
at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.Unit\\Calculator.cs:line 9
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 33",
"path": "DotnetTests.Unit/Calculator.cs", "path": "DotnetTests.Unit/Calculator.cs",
}, },
"name": "Exception_In_TargetTest", "name": "Exception_In_TargetTest",
@@ -35,8 +33,7 @@ TestRunResult {
"details": "System.Exception : Test "details": "System.Exception : Test
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 39", at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 39",
"line": 39, "line": 39,
"message": "System.Exception : Test "message": "System.Exception : Test",
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 39",
"path": "DotnetTests.XUnitTests/CalculatorTests.cs", "path": "DotnetTests.XUnitTests/CalculatorTests.cs",
}, },
"name": "Exception_In_Test", "name": "Exception_In_Test",
@@ -52,8 +49,7 @@ Actual: 2
"line": 27, "line": 27,
"message": "Assert.Equal() Failure "message": "Assert.Equal() Failure
Expected: 3 Expected: 3
Actual: 2 Actual: 2",
at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 27",
"path": "DotnetTests.XUnitTests/CalculatorTests.cs", "path": "DotnetTests.XUnitTests/CalculatorTests.cs",
}, },
"name": "Failing_Test", "name": "Failing_Test",
@@ -75,8 +71,7 @@ Actual: False
"line": 59, "line": 59,
"message": "Assert.True() Failure "message": "Assert.True() Failure
Expected: True Expected: True
Actual: False Actual: False",
at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 59",
"path": "DotnetTests.XUnitTests/CalculatorTests.cs", "path": "DotnetTests.XUnitTests/CalculatorTests.cs",
}, },
"name": "Is_Even_Number(i: 3)", "name": "Is_Even_Number(i: 3)",
@@ -104,213 +99,7 @@ Actual: False
"line": 67, "line": 67,
"message": "Assert.True() Failure "message": "Assert.True() Failure
Expected: True Expected: True
Actual: False Actual: False",
at DotnetTests.XUnitTests.CalculatorTests.Theory_With_Custom_Name(Int32 i) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 67",
"path": "DotnetTests.XUnitTests/CalculatorTests.cs",
},
"name": "Should be even number(i: 3)",
"result": "failed",
"time": 0.6537000000000001,
},
TestCaseResult {
"error": undefined,
"name": "Skipped_Test",
"result": "skipped",
"time": 1,
},
TestCaseResult {
"error": undefined,
"name": "Timeout_Test",
"result": "success",
"time": 108.42580000000001,
},
],
},
],
"name": "DotnetTests.XUnitTests.CalculatorTests",
"totalTime": undefined,
},
],
"totalTime": 1116,
}
`;
exports[`dotnet-trx tests matches dotnet-xunitv3 report snapshot 1`] = `
TestRunResult {
"path": "fixtures/dotnet-xunitv3.trx",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": null,
"tests": [
TestCaseResult {
"error": {
"details": "Assert.Null() Failure: Value is not null
Expected: null
Actual: Fixture { }
at DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test() in /_/reports/dotnet/DotnetTests.XUnitV3Tests/FixtureTests.cs:line 25
at System.RuntimeMethodHandle.InvokeMethod(Object target, Void** arguments, Signature sig, Boolean isConstructor)
at System.Reflection.MethodBaseInvoker.InvokeWithNoArgs(Object obj, BindingFlags invokeAttr)",
"line": 25,
"message": "Assert.Null() Failure: Value is not null
Expected: null
Actual: Fixture { }
at DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test() in /_/reports/dotnet/DotnetTests.XUnitV3Tests/FixtureTests.cs:line 25
at System.RuntimeMethodHandle.InvokeMethod(Object target, Void** arguments, Signature sig, Boolean isConstructor)
at System.Reflection.MethodBaseInvoker.InvokeWithNoArgs(Object obj, BindingFlags invokeAttr)",
"path": "DotnetTests.XUnitV3Tests/FixtureTests.cs",
},
"name": "Failing_Test",
"result": "failed",
"time": 17.0545,
},
TestCaseResult {
"error": undefined,
"name": "Passing_Test",
"result": "success",
"time": 0.8786,
},
],
},
],
"name": "DotnetTests.XUnitV3Tests.FixtureTests",
"totalTime": undefined,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": null,
"tests": [
TestCaseResult {
"error": undefined,
"name": "[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test)]",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test)]",
"result": "failed",
"time": 0,
},
],
},
],
"name": "Unclassified",
"totalTime": undefined,
},
],
"totalTime": 267,
}
`;
exports[`dotnet-trx tests matches report snapshot (only failed tests) 1`] = `
TestRunResult {
"path": "fixtures/dotnet-trx.trx",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": null,
"tests": [
TestCaseResult {
"error": undefined,
"name": "Custom Name",
"result": "success",
"time": 0.1371,
},
TestCaseResult {
"error": {
"details": "System.DivideByZeroException : Attempted to divide by zero.
at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.Unit\\Calculator.cs:line 9
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 33",
"line": 9,
"message": "System.DivideByZeroException : Attempted to divide by zero.
at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.Unit\\Calculator.cs:line 9
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 33",
"path": "DotnetTests.Unit/Calculator.cs",
},
"name": "Exception_In_TargetTest",
"result": "failed",
"time": 0.8377,
},
TestCaseResult {
"error": {
"details": "System.Exception : Test
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 39",
"line": 39,
"message": "System.Exception : Test
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 39",
"path": "DotnetTests.XUnitTests/CalculatorTests.cs",
},
"name": "Exception_In_Test",
"result": "failed",
"time": 2.5175,
},
TestCaseResult {
"error": {
"details": "Assert.Equal() Failure
Expected: 3
Actual: 2
at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 27",
"line": 27,
"message": "Assert.Equal() Failure
Expected: 3
Actual: 2
at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 27",
"path": "DotnetTests.XUnitTests/CalculatorTests.cs",
},
"name": "Failing_Test",
"result": "failed",
"time": 3.8697,
},
TestCaseResult {
"error": undefined,
"name": "Is_Even_Number(i: 2)",
"result": "success",
"time": 0.0078,
},
TestCaseResult {
"error": {
"details": "Assert.True() Failure
Expected: True
Actual: False
at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 59",
"line": 59,
"message": "Assert.True() Failure
Expected: True
Actual: False
at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 59",
"path": "DotnetTests.XUnitTests/CalculatorTests.cs",
},
"name": "Is_Even_Number(i: 3)",
"result": "failed",
"time": 0.41409999999999997,
},
TestCaseResult {
"error": undefined,
"name": "Passing_Test",
"result": "success",
"time": 0.1365,
},
TestCaseResult {
"error": undefined,
"name": "Should be even number(i: 2)",
"result": "success",
"time": 0.0097,
},
TestCaseResult {
"error": {
"details": "Assert.True() Failure
Expected: True
Actual: False
at DotnetTests.XUnitTests.CalculatorTests.Theory_With_Custom_Name(Int32 i) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 67",
"line": 67,
"message": "Assert.True() Failure
Expected: True
Actual: False
at DotnetTests.XUnitTests.CalculatorTests.Theory_With_Custom_Name(Int32 i) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 67",
"path": "DotnetTests.XUnitTests/CalculatorTests.cs", "path": "DotnetTests.XUnitTests/CalculatorTests.cs",
}, },
"name": "Should be even number(i: 3)", "name": "Should be even number(i: 3)",

View File

@@ -1,4 +1,4 @@
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing // Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`golang-json tests report from ./reports/dotnet test results matches snapshot 1`] = ` exports[`golang-json tests report from ./reports/dotnet test results matches snapshot 1`] = `
TestRunResult { TestRunResult {

View File

@@ -1,4 +1,4 @@
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing // Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`java-junit tests report from apache/pulsar single suite test results matches snapshot 1`] = ` exports[`java-junit tests report from apache/pulsar single suite test results matches snapshot 1`] = `
TestRunResult { TestRunResult {
@@ -6878,153 +6878,3 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 2126531.0000000005, "totalTime": 2126531.0000000005,
} }
`; `;
exports[`java-junit tests report from testmo/junitxml basic example matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/java/junit4-basic.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase1",
"result": "success",
"time": 2113.871,
},
TestCaseResult {
"error": undefined,
"name": "testCase2",
"result": "success",
"time": 1051,
},
TestCaseResult {
"error": undefined,
"name": "testCase3",
"result": "success",
"time": 3441,
},
],
},
],
"name": "Tests.Registration",
"totalTime": 6605.870999999999,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase7",
"result": "success",
"time": 2508,
},
TestCaseResult {
"error": undefined,
"name": "testCase8",
"result": "success",
"time": 1230.8159999999998,
},
TestCaseResult {
"error": {
"details": undefined,
"line": undefined,
"message": "AssertionError: Assertion error message",
"path": undefined,
},
"name": "testCase9",
"result": "failed",
"time": 982,
},
],
},
],
"name": "Tests.Authentication",
"totalTime": 9076.816,
},
],
"totalTime": 15682.687,
}
`;
exports[`java-junit tests report from testmo/junitxml complete example matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/java/junit4-complete.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase1",
"result": "success",
"time": 2436,
},
TestCaseResult {
"error": undefined,
"name": "testCase2",
"result": "success",
"time": 1534,
},
TestCaseResult {
"error": undefined,
"name": "testCase3",
"result": "success",
"time": 822,
},
TestCaseResult {
"error": undefined,
"name": "testCase4",
"result": "skipped",
"time": 0,
},
TestCaseResult {
"error": {
"details": undefined,
"line": undefined,
"message": "AssertionError: Expected value did not match.",
"path": undefined,
},
"name": "testCase5",
"result": "failed",
"time": 2902.412,
},
TestCaseResult {
"error": {
"details": undefined,
"line": undefined,
"message": "ArithmeticError: Division by zero.",
"path": undefined,
},
"name": "testCase6",
"result": "failed",
"time": 3819,
},
TestCaseResult {
"error": undefined,
"name": "testCase7",
"result": "success",
"time": 2944,
},
TestCaseResult {
"error": undefined,
"name": "testCase8",
"result": "success",
"time": 1625.275,
},
],
},
],
"name": "Tests.Registration",
"totalTime": 16082.687,
},
],
"totalTime": 16082.687,
}
`;

View File

@@ -1,4 +1,4 @@
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing // Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`jest-junit tests parsing ESLint report without timing information works - PR #134 1`] = ` exports[`jest-junit tests parsing ESLint report without timing information works - PR #134 1`] = `
TestRunResult { TestRunResult {

View File

@@ -1,4 +1,4 @@
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing // Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`mocha-json tests report from ./reports/mocha-json test results matches snapshot 1`] = ` exports[`mocha-json tests report from ./reports/mocha-json test results matches snapshot 1`] = `
TestRunResult { TestRunResult {

View File

@@ -1,628 +0,0 @@
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`phpunit-junit tests report from junit-basic.xml matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/phpunit/junit-basic.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase1",
"result": "success",
"time": 2113.871,
},
TestCaseResult {
"error": undefined,
"name": "testCase2",
"result": "success",
"time": 1051,
},
TestCaseResult {
"error": undefined,
"name": "testCase3",
"result": "success",
"time": 3441,
},
],
},
],
"name": "Tests.Registration",
"totalTime": 6605.870999999999,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase4",
"result": "success",
"time": 2244,
},
TestCaseResult {
"error": undefined,
"name": "testCase5",
"result": "success",
"time": 781,
},
TestCaseResult {
"error": undefined,
"name": "testCase6",
"result": "success",
"time": 1331,
},
],
},
],
"name": "Tests.Authentication.Login",
"totalTime": 4356,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase7",
"result": "success",
"time": 2508,
},
TestCaseResult {
"error": undefined,
"name": "testCase8",
"result": "success",
"time": 1230.8159999999998,
},
TestCaseResult {
"error": {
"details": "",
"line": undefined,
"message": "AssertionError: Assertion error message",
"path": undefined,
},
"name": "testCase9",
"result": "failed",
"time": 982,
},
],
},
],
"name": "Tests.Authentication",
"totalTime": 9076.816,
},
],
"totalTime": 15682.687,
}
`;
exports[`phpunit-junit tests report from phpcheckstyle-phpunit.xml matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/phpunit/phpcheckstyle-phpunit.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testGoodDoc",
"result": "success",
"time": 5.093,
},
TestCaseResult {
"error": undefined,
"name": "testComments",
"result": "success",
"time": 0.921,
},
TestCaseResult {
"error": undefined,
"name": "testTODOs",
"result": "success",
"time": 0.6880000000000001,
},
],
},
],
"name": "CommentsTest",
"totalTime": 6.702,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testDeprecations",
"result": "success",
"time": 0.9740000000000001,
},
],
},
],
"name": "DeprecationTest",
"totalTime": 0.9740000000000001,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testGood",
"result": "success",
"time": 2.6470000000000002,
},
TestCaseResult {
"error": undefined,
"name": "testDoWhile",
"result": "success",
"time": 1.0219999999999998,
},
TestCaseResult {
"error": undefined,
"name": "testAnonymousFunction",
"result": "success",
"time": 0.8,
},
TestCaseResult {
"error": undefined,
"name": "testException",
"result": "success",
"time": 0.888,
},
],
},
],
"name": "GoodTest",
"totalTime": 5.357,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testTabIndentation",
"result": "success",
"time": 0.857,
},
TestCaseResult {
"error": undefined,
"name": "testSpaceIndentation",
"result": "success",
"time": 0.929,
},
TestCaseResult {
"error": undefined,
"name": "testSpaceIndentationArray",
"result": "success",
"time": 0.975,
},
TestCaseResult {
"error": undefined,
"name": "testGoodSpaceIndentationArray",
"result": "success",
"time": 1.212,
},
TestCaseResult {
"error": undefined,
"name": "testGoodIndentationNewLine",
"result": "success",
"time": 0.859,
},
TestCaseResult {
"error": undefined,
"name": "testGoodIndentationSpaces",
"result": "success",
"time": 0.78,
},
TestCaseResult {
"error": undefined,
"name": "testBadSpaces",
"result": "success",
"time": 1.1199999999999999,
},
TestCaseResult {
"error": undefined,
"name": "testBadSpaceAfterControl",
"result": "success",
"time": 0.9219999999999999,
},
],
},
],
"name": "IndentationTest",
"totalTime": 7.654,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testMetrics",
"result": "success",
"time": 4.146999999999999,
},
],
},
],
"name": "MetricsTest",
"totalTime": 4.146999999999999,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testNaming",
"result": "success",
"time": 1.426,
},
TestCaseResult {
"error": undefined,
"name": "testFunctionNaming",
"result": "success",
"time": 1.271,
},
],
},
],
"name": "NamingTest",
"totalTime": 2.697,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testTextAfterClosingTag",
"result": "success",
"time": 0.9940000000000001,
},
],
},
],
"name": "OptimizationTest",
"totalTime": 0.9940000000000001,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": {
"details": "OtherTest::testOther
We expect 20 warnings
Failed asserting that 19 matches expected 20.
/workspace/phpcheckstyle/test/OtherTest.php:24",
"line": 12,
"message": "PHPUnit\\Framework\\ExpectationFailedException",
"path": undefined,
},
"name": "testOther",
"result": "failed",
"time": 5.2509999999999994,
},
TestCaseResult {
"error": {
"details": "OtherTest::testException
We expect 1 error
Failed asserting that 0 matches expected 1.
/workspace/phpcheckstyle/test/OtherTest.php:40",
"line": 31,
"message": "PHPUnit\\Framework\\ExpectationFailedException",
"path": undefined,
},
"name": "testException",
"result": "failed",
"time": 0.751,
},
TestCaseResult {
"error": undefined,
"name": "testEmpty",
"result": "success",
"time": 0.42700000000000005,
},
TestCaseResult {
"error": undefined,
"name": "testSwitchCaseNeedBreak",
"result": "success",
"time": 0.901,
},
],
},
],
"name": "OtherTest",
"totalTime": 7.329,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testTextAfterClosingTag",
"result": "success",
"time": 0.641,
},
TestCaseResult {
"error": undefined,
"name": "testClosingTagNotNeeded",
"result": "success",
"time": 0.631,
},
],
},
],
"name": "PHPTagsTest",
"totalTime": 1.272,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testProhibited",
"result": "success",
"time": 0.9380000000000001,
},
],
},
],
"name": "ProhibitedTest",
"totalTime": 0.9380000000000001,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testStrictCompare",
"result": "success",
"time": 1.578,
},
],
},
],
"name": "StrictCompareTest",
"totalTime": 1.578,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testGoodUnused",
"result": "success",
"time": 0.94,
},
TestCaseResult {
"error": undefined,
"name": "testBadUnused",
"result": "success",
"time": 0.895,
},
],
},
],
"name": "UnusedTest",
"totalTime": 1.835,
},
],
"totalTime": undefined,
}
`;
exports[`phpunit-junit tests report from phpunit test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/phpunit/phpunit.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "PHPUnit.Event.CollectingDispatcherTest",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testHasNoCollectedEventsWhenFlushedImmediatelyAfterCreation",
"result": "success",
"time": 1.441,
},
TestCaseResult {
"error": undefined,
"name": "testCollectsDispatchedEventsUntilFlushed",
"result": "success",
"time": 2.815,
},
],
},
],
"name": "PHPUnit\\Event\\CollectingDispatcherTest",
"totalTime": 4.256,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "PHPUnit.Event.DeferringDispatcherTest",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCollectsEventsUntilFlush",
"result": "success",
"time": 1.6720000000000002,
},
TestCaseResult {
"error": undefined,
"name": "testFlushesCollectedEvents",
"result": "success",
"time": 0.661,
},
TestCaseResult {
"error": undefined,
"name": "testSubscriberCanBeRegistered",
"result": "success",
"time": 0.33399999999999996,
},
TestCaseResult {
"error": undefined,
"name": "testTracerCanBeRegistered",
"result": "success",
"time": 0.262,
},
],
},
],
"name": "PHPUnit\\Event\\DeferringDispatcherTest",
"totalTime": 2.928,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "PHPUnit.Event.DirectDispatcherTest",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testDispatchesEventToKnownSubscribers",
"result": "success",
"time": 0.17,
},
TestCaseResult {
"error": undefined,
"name": "testDispatchesEventToTracers",
"result": "success",
"time": 0.248,
},
TestCaseResult {
"error": undefined,
"name": "testRegisterRejectsUnknownSubscriber",
"result": "success",
"time": 0.257,
},
TestCaseResult {
"error": undefined,
"name": "testDispatchRejectsUnknownEventType",
"result": "success",
"time": 0.11900000000000001,
},
],
},
],
"name": "PHPUnit\\Event\\DirectDispatcherTest",
"totalTime": 0.794,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": {
"details": "targeting-traits-with-coversclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
--- Expected
+++ Actual
@@ @@
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
Test Runner Configured
Test Suite Loaded (1 test)
+Test Runner Triggered Warning (No code coverage driver available)
Event Facade Sealed
Test Runner Started
Test Suite Sorted
@@ @@
Test Preparation Started (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
Test Prepared (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
Test Passed (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\\TestFixture\\CoveredTrait with #[CoversClass] is deprecated, please refactor your test to use #[CoversTrait] instead.)
Test Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
Test Suite Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest, 1 test)
Test Runner Execution Finished
Test Runner Finished
-PHPUnit Finished (Shell Exit Code: 0)
+PHPUnit Finished (Shell Exit Code: 1)
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-coversclass-attribute-is-deprecated.phpt:28
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200",
"line": undefined,
"message": "PHPUnit\\Framework\\PhptAssertionFailedError",
"path": undefined,
},
"name": "targeting-traits-with-coversclass-attribute-is-deprecated.phpt",
"result": "failed",
"time": 68.151,
},
TestCaseResult {
"error": {
"details": "targeting-traits-with-usesclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
--- Expected
+++ Actual
@@ @@
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
Test Runner Configured
Test Suite Loaded (1 test)
+Test Runner Triggered Warning (No code coverage driver available)
Event Facade Sealed
Test Runner Started
Test Suite Sorted
@@ @@
Test Preparation Started (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
Test Prepared (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
Test Passed (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\\TestFixture\\CoveredTrait with #[UsesClass] is deprecated, please refactor your test to use #[UsesTrait] instead.)
Test Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
Test Suite Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest, 1 test)
Test Runner Execution Finished
Test Runner Finished
-PHPUnit Finished (Shell Exit Code: 0)
+PHPUnit Finished (Shell Exit Code: 1)
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-usesclass-attribute-is-deprecated.phpt:28
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200",
"line": undefined,
"message": "PHPUnit\\Framework\\PhptAssertionFailedError",
"path": undefined,
},
"name": "targeting-traits-with-usesclass-attribute-is-deprecated.phpt",
"result": "failed",
"time": 64.268,
},
],
},
],
"name": "CLI Arguments",
"totalTime": 140.397,
},
],
"totalTime": undefined,
}
`;

View File

@@ -1,192 +0,0 @@
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`python-xunit pytest report report from python test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/python-xunit-pytest.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "tests.test_lib",
"tests": [
TestCaseResult {
"error": undefined,
"name": "test_always_pass",
"result": "success",
"time": 2,
},
TestCaseResult {
"error": undefined,
"name": "test_with_subtests",
"result": "success",
"time": 5,
},
TestCaseResult {
"error": undefined,
"name": "test_parameterized[param1]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_parameterized[param2]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_always_skip",
"result": "skipped",
"time": 0,
},
TestCaseResult {
"error": {
"details": "def test_always_fail():
> assert False
E assert False
tests/test_lib.py:25: AssertionError
",
"line": undefined,
"message": "assert False",
"path": undefined,
},
"name": "test_always_fail",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_expected_failure",
"result": "skipped",
"time": 0,
},
TestCaseResult {
"error": {
"details": "def test_error():
> raise Exception("error")
E Exception: error
tests/test_lib.py:32: Exception
",
"line": undefined,
"message": "Exception: error",
"path": undefined,
},
"name": "test_error",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_with_record_property",
"result": "success",
"time": 0,
},
],
},
TestGroupResult {
"name": "custom_classname",
"tests": [
TestCaseResult {
"error": undefined,
"name": "test_with_record_xml_attribute",
"result": "success",
"time": 0,
},
],
},
],
"name": "pytest",
"totalTime": 19,
},
],
"totalTime": undefined,
}
`;
exports[`python-xunit unittest report report from python test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/python-xunit-unittest.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "TestAcme",
"tests": [
TestCaseResult {
"error": undefined,
"name": "test_always_pass",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_parameterized_0_param1",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
"name": "test_parameterized_1_param2",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_with_subtests",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": {
"details": "Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail
self.fail("failed")
AssertionError: failed
",
"line": undefined,
"message": "AssertionError: failed",
"path": undefined,
},
"name": "test_always_fail",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": {
"details": "Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error
raise Exception("error")
Exception: error
",
"line": undefined,
"message": "Exception: error",
"path": undefined,
},
"name": "test_error",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_always_skip",
"result": "skipped",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_expected_failure",
"result": "skipped",
"time": 0,
},
],
},
],
"name": "TestAcme-20251114214921",
"totalTime": 1,
},
],
"totalTime": 1,
}
`;

View File

@@ -1,4 +1,4 @@
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing // Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`rspec-json tests report from ./reports/rspec-json test results matches snapshot 1`] = ` exports[`rspec-json tests report from ./reports/rspec-json test results matches snapshot 1`] = `
TestRunResult { TestRunResult {

View File

@@ -1,4 +1,4 @@
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing // Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`swift-xunit tests report from swift test results matches snapshot 1`] = ` exports[`swift-xunit tests report from swift test results matches snapshot 1`] = `
TestRunResult { TestRunResult {

View File

@@ -3,7 +3,7 @@ import * as path from 'path'
import {DotnetTrxParser} from '../src/parsers/dotnet-trx/dotnet-trx-parser' import {DotnetTrxParser} from '../src/parsers/dotnet-trx/dotnet-trx-parser'
import {ParseOptions} from '../src/test-parser' import {ParseOptions} from '../src/test-parser'
import {DEFAULT_OPTIONS, getReport, ReportOptions} from '../src/report/get-report' import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report'
import {normalizeFilePath} from '../src/utils/path-utils' import {normalizeFilePath} from '../src/utils/path-utils'
describe('dotnet-trx tests', () => { describe('dotnet-trx tests', () => {
@@ -39,34 +39,9 @@ describe('dotnet-trx tests', () => {
expect(result.result).toBe('success') expect(result.result).toBe('success')
}) })
it.each([['dotnet-trx'], ['dotnet-xunitv3']])('matches %s report snapshot', async reportName => { it('matches report snapshot', async () => {
const fixturePath = path.join(__dirname, 'fixtures', `${reportName}.trx`)
const outputPath = path.join(__dirname, '__outputs__', `${reportName}.md`)
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: [
'DotnetTests.Unit/Calculator.cs',
'DotnetTests.XUnitTests/CalculatorTests.cs',
'DotnetTests.XUnitV3Tests/FixtureTests.cs'
]
//workDir: 'C:/Users/Michal/Workspace/dorny/test-check/reports/dotnet/'
}
const parser = new DotnetTrxParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
it('matches report snapshot (only failed tests)', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'dotnet-trx.trx') const fixturePath = path.join(__dirname, 'fixtures', 'dotnet-trx.trx')
const outputPath = path.join(__dirname, '__outputs__', 'dotnet-trx-only-failed.md') const outputPath = path.join(__dirname, '__outputs__', 'dotnet-trx.md')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath)) const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'}) const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
@@ -80,12 +55,7 @@ describe('dotnet-trx tests', () => {
const result = await parser.parse(filePath, fileContent) const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot() expect(result).toMatchSnapshot()
const reportOptions: ReportOptions = { const report = getReport([result])
...DEFAULT_OPTIONS,
listSuites: 'all',
listTests: 'failed'
}
const report = getReport([result], reportOptions)
fs.mkdirSync(path.dirname(outputPath), {recursive: true}) fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report) fs.writeFileSync(outputPath, report)
}) })

View File

@@ -1,60 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<TestRun id="54e29175-539e-48a3-a634-3a1855a0ed38" name="@Asterix 2025-06-22 14:17:12.022" xmlns="http://microsoft.com/schemas/VisualStudio/TeamTest/2010">
<Times creation="2025-06-22T14:17:11.756535Z" queuing="2025-06-22T14:17:11.756535Z" start="2025-06-22T14:17:11.756535Z" finish="2025-06-22T14:17:12.023063Z" />
<TestSettings name="default" id="932e6c6f-3e5b-4392-ad65-e04c1ef476b5">
<Deployment runDeploymentRoot="_Asterix_2025-06-22_14_17_12.022" />
</TestSettings>
<Results>
<UnitTestResult executionId="37242a1f-ca3e-44b3-8142-71e510480975" testId="f846a1e6-0b68-2ac6-9a66-f417926e3238" testName="DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test" computerName="Asterix" duration="00:00:00.0170545" startTime="2025-06-22T14:17:11.9339840+00:00" endTime="2025-06-22T14:17:11.9750850+00:00" testType="13CDC9D9-DDB5-4fa4-A97D-D965CCFC6D4B" outcome="Failed" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" relativeResultsDirectory="37242a1f-ca3e-44b3-8142-71e510480975">
<Output>
<ErrorInfo>
<Message>Assert.Null() Failure: Value is not null
Expected: null
Actual: Fixture { }</Message>
<StackTrace> at DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test() in /_/reports/dotnet/DotnetTests.XUnitV3Tests/FixtureTests.cs:line 25
at System.RuntimeMethodHandle.InvokeMethod(Object target, Void** arguments, Signature sig, Boolean isConstructor)
at System.Reflection.MethodBaseInvoker.InvokeWithNoArgs(Object obj, BindingFlags invokeAttr)</StackTrace>
</ErrorInfo>
</Output>
</UnitTestResult>
<UnitTestResult executionId="592aaafb-4dc0-49dc-b3c7-bcd81218d58a" testId="3ee930dd-8a75-92a0-0d90-373833166db1" testName="DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test" computerName="Asterix" duration="00:00:00.0008786" startTime="2025-06-22T14:17:11.9819890+00:00" endTime="2025-06-22T14:17:11.9833560+00:00" testType="13CDC9D9-DDB5-4fa4-A97D-D965CCFC6D4B" outcome="Passed" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" relativeResultsDirectory="592aaafb-4dc0-49dc-b3c7-bcd81218d58a" />
<UnitTestResult executionId="19c42d36-f4d7-4046-bcc6-dd9b85c9ca2b" testId="372fb60f-1f5b-a52e-032e-41a7556021e8" testName="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test)]" computerName="Asterix" duration="00:00:00" startTime="2025-06-22T14:17:12.0320280+00:00" endTime="2025-06-22T14:17:12.0320290+00:00" testType="13CDC9D9-DDB5-4fa4-A97D-D965CCFC6D4B" outcome="Failed" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" relativeResultsDirectory="19c42d36-f4d7-4046-bcc6-dd9b85c9ca2b" />
<UnitTestResult executionId="b7f40170-1e2c-45ce-b5e4-5bf49fd4c360" testId="a69083a1-56b4-3da3-2d7c-66fda374fd8e" testName="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test)]" computerName="Asterix" duration="00:00:00" startTime="2025-06-22T14:17:12.0320420+00:00" endTime="2025-06-22T14:17:12.0320430+00:00" testType="13CDC9D9-DDB5-4fa4-A97D-D965CCFC6D4B" outcome="Failed" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" relativeResultsDirectory="b7f40170-1e2c-45ce-b5e4-5bf49fd4c360" />
</Results>
<TestDefinitions>
<UnitTest name="DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test" storage="~/test-reporter/reports/dotnet/dotnettests.xunitv3tests/bin/debug/net8.0/dotnettests.xunitv3tests.dll" id="f846a1e6-0b68-2ac6-9a66-f417926e3238">
<Execution id="37242a1f-ca3e-44b3-8142-71e510480975" />
<TestMethod codeBase="~/test-reporter/reports/dotnet/DotnetTests.XUnitV3Tests/bin/Debug/net8.0/DotnetTests.XUnitV3Tests.dll" adapterTypeName="executor://30ea7c6e-dd24-4152-a360-1387158cd41d/2.0.3" className="DotnetTests.XUnitV3Tests.FixtureTests" name="DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test" />
</UnitTest>
<UnitTest name="DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test" storage="~/test-reporter/reports/dotnet/dotnettests.xunitv3tests/bin/debug/net8.0/dotnettests.xunitv3tests.dll" id="3ee930dd-8a75-92a0-0d90-373833166db1">
<Execution id="592aaafb-4dc0-49dc-b3c7-bcd81218d58a" />
<TestMethod codeBase="~/test-reporter/reports/dotnet/DotnetTests.XUnitV3Tests/bin/Debug/net8.0/DotnetTests.XUnitV3Tests.dll" adapterTypeName="executor://30ea7c6e-dd24-4152-a360-1387158cd41d/2.0.3" className="DotnetTests.XUnitV3Tests.FixtureTests" name="DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test" />
</UnitTest>
<UnitTest name="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test)]" storage="~/test-reporter/reports/dotnet/dotnettests.xunitv3tests/bin/debug/net8.0/dotnettests.xunitv3tests.dll" id="372fb60f-1f5b-a52e-032e-41a7556021e8">
<Execution id="19c42d36-f4d7-4046-bcc6-dd9b85c9ca2b" />
<TestMethod codeBase="~/test-reporter/reports/dotnet/DotnetTests.XUnitV3Tests/bin/Debug/net8.0/DotnetTests.XUnitV3Tests.dll" adapterTypeName="executor://30ea7c6e-dd24-4152-a360-1387158cd41d/2.0.3" name="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test)]" />
</UnitTest>
<UnitTest name="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test)]" storage="~/test-reporter/reports/dotnet/dotnettests.xunitv3tests/bin/debug/net8.0/dotnettests.xunitv3tests.dll" id="a69083a1-56b4-3da3-2d7c-66fda374fd8e">
<Execution id="b7f40170-1e2c-45ce-b5e4-5bf49fd4c360" />
<TestMethod codeBase="~/test-reporter/reports/dotnet/DotnetTests.XUnitV3Tests/bin/Debug/net8.0/DotnetTests.XUnitV3Tests.dll" adapterTypeName="executor://30ea7c6e-dd24-4152-a360-1387158cd41d/2.0.3" name="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test)]" />
</UnitTest>
</TestDefinitions>
<TestEntries>
<TestEntry testId="f846a1e6-0b68-2ac6-9a66-f417926e3238" executionId="37242a1f-ca3e-44b3-8142-71e510480975" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" />
<TestEntry testId="3ee930dd-8a75-92a0-0d90-373833166db1" executionId="592aaafb-4dc0-49dc-b3c7-bcd81218d58a" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" />
<TestEntry testId="372fb60f-1f5b-a52e-032e-41a7556021e8" executionId="19c42d36-f4d7-4046-bcc6-dd9b85c9ca2b" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" />
<TestEntry testId="a69083a1-56b4-3da3-2d7c-66fda374fd8e" executionId="b7f40170-1e2c-45ce-b5e4-5bf49fd4c360" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" />
</TestEntries>
<TestLists>
<TestList name="Results Not in a List" id="8C84FA94-04C1-424b-9868-57A2D4851A1D" />
<TestList name="All Loaded Results" id="19431567-8539-422a-85d7-44ee4e166bda" />
</TestLists>
<ResultSummary outcome="Failed">
<Counters total="4" executed="4" passed="1" failed="3" error="0" timeout="0" aborted="0" inconclusive="0" passedButRunAborted="0" notRunnable="0" notExecuted="0" disconnected="0" warning="0" completed="0" inProgress="0" pending="0" />
<RunInfos>
<RunInfo computerName="Asterix" outcome="Error" timestamp="2025-06-22T14:17:12.033401">
<Text>Exit code indicates failure: '2'. Please refer to https://aka.ms/testingplatform/exitcodes for more information.</Text>
</RunInfo>
</RunInfos>
</ResultSummary>
</TestRun>

View File

@@ -1,2 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuites/>

View File

@@ -1,31 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
This is a basic JUnit-style XML example to highlight the basis structure.
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
Testmo test management software - https://www.testmo.com/
-->
<testsuites time="15.682687">
<testsuite name="Tests.Registration" time="6.605871">
<testcase name="testCase1" classname="Tests.Registration" time="2.113871" />
<testcase name="testCase2" classname="Tests.Registration" time="1.051" />
<testcase name="testCase3" classname="Tests.Registration" time="3.441" />
</testsuite>
<testsuite name="Tests.Authentication" time="9.076816">
<!-- Java JUni4 XML files does not nest <testsuite> elements -->
<!--
<testsuite name="Tests.Authentication.Login" time="4.356">
<testcase name="testCase4" classname="Tests.Authentication.Login" time="2.244" />
<testcase name="testCase5" classname="Tests.Authentication.Login" time="0.781" />
<testcase name="testCase6" classname="Tests.Authentication.Login" time="1.331" />
</testsuite>
-->
<testcase name="testCase7" classname="Tests.Authentication" time="2.508" />
<testcase name="testCase8" classname="Tests.Authentication" time="1.230816" />
<testcase name="testCase9" classname="Tests.Authentication" time="0.982">
<failure message="Assertion error message" type="AssertionError">
<!-- Call stack printed here -->
</failure>
</testcase>
</testsuite>
</testsuites>

View File

@@ -1,141 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
This is a JUnit-style XML example with commonly used tags and attributes.
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
Testmo test management software - https://www.testmo.com/
-->
<!-- <testsuites> Usually the root element of a JUnit XML file. Some tools leave out
the <testsuites> element if there is only a single top-level <testsuite> element (which
is then used as the root element).
name Name of the entire test run
tests Total number of tests in this file
failures Total number of failed tests in this file
errors Total number of errored tests in this file
skipped Total number of skipped tests in this file
assertions Total number of assertions for all tests in this file
time Aggregated time of all tests in this file in seconds
timestamp Date and time of when the test run was executed (in ISO 8601 format)
-->
<testsuites name="Test run" tests="8" failures="1" errors="1" skipped="1"
assertions="20" time="16.082687" timestamp="2021-04-02T15:48:23">
<!-- <testsuite> A test suite usually represents a class, folder or group of tests.
There can be many test suites in an XML file, and there can be test suites under other
test suites.
name Name of the test suite (e.g. class name or folder name)
tests Total number of tests in this suite
failures Total number of failed tests in this suite
errors Total number of errored tests in this suite
skipped Total number of skipped tests in this suite
assertions Total number of assertions for all tests in this suite
time Aggregated time of all tests in this file in seconds
timestamp Date and time of when the test suite was executed (in ISO 8601 format)
file Source code file of this test suite
-->
<testsuite name="Tests.Registration" tests="8" failures="1" errors="1" skipped="1"
assertions="20" time="16.082687" timestamp="2021-04-02T15:48:23"
file="tests/registration.code">
<!-- <properties> Test suites (and test cases, see below) can have additional
properties such as environment variables or version numbers. -->
<properties>
<!-- <property> Each property has a name and value. Some tools also support
properties with text values instead of value attributes. -->
<property name="version" value="1.774" />
<property name="commit" value="ef7bebf" />
<property name="browser" value="Google Chrome" />
<property name="ci" value="https://github.com/actions/runs/1234" />
<property name="config">
Config line #1
Config line #2
Config line #3
</property>
</properties>
<!-- <system-out> Optionally data written to standard out for the suite.
Also supported on a test case level, see below. -->
<system-out>Data written to standard out.</system-out>
<!-- <system-err> Optionally data written to standard error for the suite.
Also supported on a test case level, see below. -->
<system-err>Data written to standard error.</system-err>
<!-- <testcase> There are one or more test cases in a test suite. A test passed
if there isn't an additional result element (skipped, failure, error).
name The name of this test case, often the method name
classname The name of the parent class/folder, often the same as the suite's name
assertions Number of assertions checked during test case execution
time Execution time of the test in seconds
file Source code file of this test case
line Source code line number of the start of this test case
-->
<testcase name="testCase1" classname="Tests.Registration" assertions="2"
time="2.436" file="tests/registration.code" line="24" />
<testcase name="testCase2" classname="Tests.Registration" assertions="6"
time="1.534" file="tests/registration.code" line="62" />
<testcase name="testCase3" classname="Tests.Registration" assertions="3"
time="0.822" file="tests/registration.code" line="102" />
<!-- Example of a test case that was skipped -->
<testcase name="testCase4" classname="Tests.Registration" assertions="0"
time="0" file="tests/registration.code" line="164">
<!-- <skipped> Indicates that the test was not executed. Can have an optional
message describing why the test was skipped. -->
<skipped message="Test was skipped." />
</testcase>
<!-- Example of a test case that failed. -->
<testcase name="testCase5" classname="Tests.Registration" assertions="2"
time="2.902412" file="tests/registration.code" line="202">
<!-- <failure> The test failed because one of the assertions/checks failed.
Can have a message and failure type, often the assertion type or class. The text
content of the element often includes the failure description or stack trace. -->
<failure message="Expected value did not match." type="AssertionError">
<!-- Failure description or stack trace -->
</failure>
</testcase>
<!-- Example of a test case that had errors. -->
<testcase name="testCase6" classname="Tests.Registration" assertions="0"
time="3.819" file="tests/registration.code" line="235">
<!-- <error> The test had an unexpected error during execution. Can have a
message and error type, often the exception type or class. The text
content of the element often includes the error description or stack trace. -->
<error message="Division by zero." type="ArithmeticError">
<!-- Error description or stack trace -->
</error>
</testcase>
<!-- Example of a test case with outputs. -->
<testcase name="testCase7" classname="Tests.Registration" assertions="3"
time="2.944" file="tests/registration.code" line="287">
<!-- <system-out> Optional data written to standard out for the test case. -->
<system-out>Data written to standard out.</system-out>
<!-- <system-err> Optional data written to standard error for the test case. -->
<system-err>Data written to standard error.</system-err>
</testcase>
<!-- Example of a test case with properties -->
<testcase name="testCase8" classname="Tests.Registration" assertions="4"
time="1.625275" file="tests/registration.code" line="302">
<!-- <properties> Some tools also support properties for test cases. -->
<properties>
<property name="priority" value="high" />
<property name="language" value="english" />
<property name="author" value="Adrian" />
<property name="attachment" value="screenshots/dashboard.png" />
<property name="attachment" value="screenshots/users.png" />
<property name="description">
This text describes the purpose of this test case and provides
an overview of what the test does and how it works.
</property>
</properties>
</testcase>
</testsuite>
</testsuites>

View File

@@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
This is a basic JUnit-style XML example to highlight the basis structure.
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
Testmo test management software - https://www.testmo.com/
-->
<testsuites time="15.682687">
<testsuite name="Tests.Registration" time="6.605871">
<testcase name="testCase1" classname="Tests.Registration" time="2.113871" />
<testcase name="testCase2" classname="Tests.Registration" time="1.051" />
<testcase name="testCase3" classname="Tests.Registration" time="3.441" />
</testsuite>
<testsuite name="Tests.Authentication" time="9.076816">
<testsuite name="Tests.Authentication.Login" time="4.356">
<testcase name="testCase4" classname="Tests.Authentication.Login" time="2.244" />
<testcase name="testCase5" classname="Tests.Authentication.Login" time="0.781" />
<testcase name="testCase6" classname="Tests.Authentication.Login" time="1.331" />
</testsuite>
<testcase name="testCase7" classname="Tests.Authentication" time="2.508" />
<testcase name="testCase8" classname="Tests.Authentication" time="1.230816" />
<testcase name="testCase9" classname="Tests.Authentication" time="0.982">
<failure message="Assertion error message" type="AssertionError">
<!-- Call stack printed here -->
</failure>
</testcase>
</testsuite>
</testsuites>

View File

@@ -1,212 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuites>
<testsuite name="/workspace/phpcheckstyle/phpunit.xml" tests="30" assertions="117" errors="0" failures="2" skipped="0" time="0.041478">
<testsuite name="PHPUnitTestSuite" tests="30" assertions="117" errors="0" failures="2" skipped="0" time="0.041478">
<testsuite name="CommentsTest" file="/workspace/phpcheckstyle/test/CommentsTest.php" tests="3" assertions="12" errors="0" failures="0" skipped="0" time="0.006702">
<testcase name="testGoodDoc" file="/workspace/phpcheckstyle/test/CommentsTest.php" line="12" class="CommentsTest" classname="CommentsTest" assertions="4" time="0.005093"/>
<testcase name="testComments" file="/workspace/phpcheckstyle/test/CommentsTest.php" line="30" class="CommentsTest" classname="CommentsTest" assertions="4" time="0.000921">
<system-out>File "./test/sample/bad_comments.php" warning, line 4 - Avoid Shell/Perl like comments.
File "./test/sample/bad_comments.php" warning, line 6 - The class Comments must have a docblock comment.
File "./test/sample/bad_comments.php" warning, line 10 - The function testComment must have a docblock comment.
File "./test/sample/bad_comments.php" warning, line 18 - The function testComment returns a value and must include @returns in its docblock.
File "./test/sample/bad_comments.php" warning, line 18 - The function testComment parameters must match those in its docblock @param.
File "./test/sample/bad_comments.php" warning, line 18 - The function testComment throws an exception and must include @throws in its docblock.
</system-out>
</testcase>
<testcase name="testTODOs" file="/workspace/phpcheckstyle/test/CommentsTest.php" line="48" class="CommentsTest" classname="CommentsTest" assertions="4" time="0.000688">
<system-out>File "./test/sample/todo.php" warning, line 3 - TODO: The todo message.
</system-out>
</testcase>
</testsuite>
<testsuite name="DeprecationTest" file="/workspace/phpcheckstyle/test/DeprecationTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.000974">
<testcase name="testDeprecations" file="/workspace/phpcheckstyle/test/DeprecationTest.php" line="12" class="DeprecationTest" classname="DeprecationTest" assertions="4" time="0.000974">
<system-out>File "./test/sample/bad_deprecation.php" warning, line 17 - split is deprecated since PHP 5.3. explode($pattern, $string) or preg_split('@'.$pattern.'@', $string) must be used instead.
File "./test/sample/bad_deprecation.php" warning, line 19 - ereg is deprecated since PHP 5.3. preg_match('@'.$pattern.'@', $string) must be used instead.
File "./test/sample/bad_deprecation.php" warning, line 21 - session_register is deprecated since PHP 5.3. $_SESSION must be used instead.
File "./test/sample/bad_deprecation.php" warning, line 23 - mysql_db_query is deprecated since PHP 5.3. mysql_select_db and mysql_query must be used instead.
File "./test/sample/bad_deprecation.php" warning, line 25 - $HTTP_GET_VARS is deprecated since PHP 5.3. $_GET must be used instead.
</system-out>
</testcase>
</testsuite>
<testsuite name="GoodTest" file="/workspace/phpcheckstyle/test/GoodTest.php" tests="4" assertions="16" errors="0" failures="0" skipped="0" time="0.005357">
<testcase name="testGood" file="/workspace/phpcheckstyle/test/GoodTest.php" line="12" class="GoodTest" classname="GoodTest" assertions="4" time="0.002647"/>
<testcase name="testDoWhile" file="/workspace/phpcheckstyle/test/GoodTest.php" line="32" class="GoodTest" classname="GoodTest" assertions="4" time="0.001022"/>
<testcase name="testAnonymousFunction" file="/workspace/phpcheckstyle/test/GoodTest.php" line="50" class="GoodTest" classname="GoodTest" assertions="4" time="0.000800"/>
<testcase name="testException" file="/workspace/phpcheckstyle/test/GoodTest.php" line="68" class="GoodTest" classname="GoodTest" assertions="4" time="0.000888"/>
</testsuite>
<testsuite name="IndentationTest" file="/workspace/phpcheckstyle/test/IndentationTest.php" tests="8" assertions="32" errors="0" failures="0" skipped="0" time="0.007654">
<testcase name="testTabIndentation" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="12" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000857">
<system-out>File "./test/sample/bad_indentation.php" warning, line 8 - Whitespace indentation must not be used.
File "./test/sample/bad_indentation.php" warning, line 15 - Whitespace indentation must not be used.
File "./test/sample/bad_indentation.php" warning, line 17 - Whitespace indentation must not be used.
File "./test/sample/bad_indentation.php" warning, line 18 - Whitespace indentation must not be used.
File "./test/sample/bad_indentation.php" warning, line 19 - Whitespace indentation must not be used.
File "./test/sample/bad_indentation.php" warning, line 20 - Whitespace indentation must not be used.
</system-out>
</testcase>
<testcase name="testSpaceIndentation" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="30" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000929">
<system-out>File "./test/sample/bad_indentation.php" warning, line 10 - Tab indentation must not be used.
File "./test/sample/bad_indentation.php" warning, line 10 - The indentation level must be 4 but was 1.
File "./test/sample/bad_indentation.php" warning, line 13 - Tab indentation must not be used.
File "./test/sample/bad_indentation.php" warning, line 13 - The indentation level must be 4 but was 1.
File "./test/sample/bad_indentation.php" warning, line 15 - The indentation level must be 8 but was 4.
File "./test/sample/bad_indentation.php" warning, line 16 - Tab indentation must not be used.
File "./test/sample/bad_indentation.php" warning, line 16 - The indentation level must be 8 but was 1.
File "./test/sample/bad_indentation.php" warning, line 17 - The indentation level must be 8 but was 3.
File "./test/sample/bad_indentation.php" warning, line 18 - The indentation level must be 8 but was 5.
File "./test/sample/bad_indentation.php" warning, line 19 - The indentation level must be 8 but was 6.
File "./test/sample/bad_indentation.php" warning, line 20 - The indentation level must be 4 but was 1.
</system-out>
</testcase>
<testcase name="testSpaceIndentationArray" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="51" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000975">
<system-out>File "./test/sample/bad_indentation_array.php" warning, line 10 - Tab indentation must not be used.
File "./test/sample/bad_indentation_array.php" warning, line 10 - The indentation level must be 4 but was 1.
File "./test/sample/bad_indentation_array.php" warning, line 13 - Tab indentation must not be used.
File "./test/sample/bad_indentation_array.php" warning, line 13 - The indentation level must be 4 but was 1.
File "./test/sample/bad_indentation_array.php" warning, line 16 - The indentation level must be 12 but was 8.
File "./test/sample/bad_indentation_array.php" warning, line 24 - The indentation level must be 12 but was 8.
File "./test/sample/bad_indentation_array.php" warning, line 29 - The indentation level must be 8 but was 12.
File "./test/sample/bad_indentation_array.php" warning, line 15 - Undeclared or unused variable: $aVar.
File "./test/sample/bad_indentation_array.php" warning, line 19 - Undeclared or unused variable: $bVar.
File "./test/sample/bad_indentation_array.php" warning, line 23 - Undeclared or unused variable: $cVar.
File "./test/sample/bad_indentation_array.php" warning, line 27 - Undeclared or unused variable: $dVar.
</system-out>
</testcase>
<testcase name="testGoodSpaceIndentationArray" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="72" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.001212"/>
<testcase name="testGoodIndentationNewLine" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="93" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000859"/>
<testcase name="testGoodIndentationSpaces" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="116" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000780"/>
<testcase name="testBadSpaces" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="137" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.001120">
<system-out>File "./test/sample/bad_spaces.php" warning, line 17 - Whitespace must follow ,.
File "./test/sample/bad_spaces.php" warning, line 17 - Whitespace must precede {.
File "./test/sample/bad_spaces.php" warning, line 19 - Whitespace must follow if.
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must precede =.
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must follow =.
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must precede +.
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must follow +.
File "./test/sample/bad_spaces.php" info, line 25 - Whitespace must not precede ,.
File "./test/sample/bad_spaces.php" info, line 26 - Whitespace must not follow !.
</system-out>
</testcase>
<testcase name="testBadSpaceAfterControl" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="155" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000922">
<system-out>File "./test/sample/bad_space_after_control.php" warning, line 19 - Whitespace must not follow if.
</system-out>
</testcase>
</testsuite>
<testsuite name="MetricsTest" file="/workspace/phpcheckstyle/test/MetricsTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.004147">
<testcase name="testMetrics" file="/workspace/phpcheckstyle/test/MetricsTest.php" line="12" class="MetricsTest" classname="MetricsTest" assertions="4" time="0.004147">
<system-out>File "./test/sample/bad_metrics.php" warning, line 21 - The function testMetrics's number of parameters (6) must not exceed 4.
File "./test/sample/bad_metrics.php" info, line 55 - Line is too long. [233/160]
File "./test/sample/bad_metrics.php" warning, line 21 - The Cyclomatic Complexity of function testMetrics is too high. [15/10]
File "./test/sample/bad_metrics.php" warning, line 244 - The testMetrics function body length is too long. [223/200]
</system-out>
</testcase>
</testsuite>
<testsuite name="NamingTest" file="/workspace/phpcheckstyle/test/NamingTest.php" tests="2" assertions="8" errors="0" failures="0" skipped="0" time="0.002697">
<testcase name="testNaming" file="/workspace/phpcheckstyle/test/NamingTest.php" line="12" class="NamingTest" classname="NamingTest" assertions="4" time="0.001426">
<system-out>File "./test/sample/_bad_naming.php" error, line 11 - Constant _badly_named_constant name should follow the pattern /^[A-Z][A-Z0-9_]*$/.
File "./test/sample/_bad_naming.php" error, line 13 - Constant bad_CONST name should follow the pattern /^[A-Z][A-Z0-9_]*$/.
File "./test/sample/_bad_naming.php" warning, line 17 - Top level variable $XXX name should follow the pattern /^[a-z_][a-zA-Z0-9]*$/.
File "./test/sample/_bad_naming.php" warning, line 20 - Variable x name length is too short.
File "./test/sample/_bad_naming.php" error, line 28 - Class badlynamedclass name should follow the pattern /^[A-Z][a-zA-Z0-9_]*$/.
File "./test/sample/_bad_naming.php" warning, line 32 - Member variable $YYY name should follow the pattern /^[a-z_][a-zA-Z0-9]*$/.
File "./test/sample/_bad_naming.php" warning, line 37 - The constructor name must be __construct().
File "./test/sample/_bad_naming.php" error, line 44 - Function Badlynamedfunction name should follow the pattern /^[a-z][a-zA-Z0-9]*$/.
File "./test/sample/_bad_naming.php" warning, line 47 - Local variable $ZZZ name should follow the pattern /^[a-z_][a-zA-Z0-9]*$/.
File "./test/sample/_bad_naming.php" error, line 54 - Protected function Badlynamedfunction2 name should follow the pattern /^[a-z][a-zA-Z0-9]*$/.
File "./test/sample/_bad_naming.php" error, line 61 - Private function badlynamedfunction3 name should follow the pattern /^_[a-z][a-zA-Z0-9]*$/.
File "./test/sample/_bad_naming.php" error, line 70 - Interface _badlynamedinterface name should follow the pattern /^[A-Z][a-zA-Z0-9_]*$/.
File "./test/sample/_bad_naming.php" error, line 75 - File _bad_naming.php name should follow the pattern /^[a-zA-Z][a-zA-Z0-9._]*$/.
</system-out>
</testcase>
<testcase name="testFunctionNaming" file="/workspace/phpcheckstyle/test/NamingTest.php" line="32" class="NamingTest" classname="NamingTest" assertions="4" time="0.001271"/>
</testsuite>
<testsuite name="OptimizationTest" file="/workspace/phpcheckstyle/test/OptimizationTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.000994">
<testcase name="testTextAfterClosingTag" file="/workspace/phpcheckstyle/test/OptimizationTest.php" line="12" class="OptimizationTest" classname="OptimizationTest" assertions="4" time="0.000994">
<system-out>File "./test/sample/bad_optimisation.php" warning, line 18 - count function must not be used inside a loop.
File "./test/sample/bad_optimisation.php" warning, line 23 - count function must not be used inside a loop.
</system-out>
</testcase>
</testsuite>
<testsuite name="OtherTest" file="/workspace/phpcheckstyle/test/OtherTest.php" tests="4" assertions="13" errors="0" failures="2" skipped="0" time="0.007329">
<testcase name="testOther" file="/workspace/phpcheckstyle/test/OtherTest.php" line="12" class="OtherTest" classname="OtherTest" assertions="4" time="0.005251">
<failure type="PHPUnit\Framework\ExpectationFailedException">OtherTest::testOther
We expect 20 warnings
Failed asserting that 19 matches expected 20.
/workspace/phpcheckstyle/test/OtherTest.php:24</failure>
<system-out>File "./test/sample/bad_other.php" warning, line 17 - All arguments with default values must be at the end of the block or statement.
File "./test/sample/bad_other.php" warning, line 21 - Errors must not be silenced when calling a function.
File "./test/sample/bad_other.php" warning, line 23 - Prefer single-quoted strings when you don't need string interpolation.
File "./test/sample/bad_other.php" warning, line 23 - Encapsed variables must not be used inside a string.
File "./test/sample/bad_other.php" warning, line 23 - Encapsed variables must not be used inside a string.
File "./test/sample/bad_other.php" warning, line 23 - Prefer single-quoted strings when you don't need string interpolation.
File "./test/sample/bad_other.php" warning, line 37 - TODO: Show todos
File "./test/sample/bad_other.php" warning, line 40 - Avoid empty statements (;;).
File "./test/sample/bad_other.php" warning, line 42 - Boolean operators (&amp;&amp;) must be used instead of logical operators (AND).
File "./test/sample/bad_other.php" warning, line 42 - Empty if block.
File "./test/sample/bad_other.php" warning, line 48 - Heredoc syntax must not be used.
File "./test/sample/bad_other.php" warning, line 52 - The statement if must contain its code within a {} block.
File "./test/sample/bad_other.php" warning, line 54 - Consider using a strict comparison operator instead of ==.
File "./test/sample/bad_other.php" warning, line 54 - The statement while must contain its code within a {} block.
File "./test/sample/bad_other.php" warning, line 66 - The switch statement must have a default case.
File "./test/sample/bad_other.php" warning, line 79 - The default case of a switch statement must be located after all other cases.
File "./test/sample/bad_other.php" warning, line 93 - Unary operators (++ or --) must not be used inside a control statement
File "./test/sample/bad_other.php" warning, line 95 - Assigments (=) must not be used inside a control statement.
File "./test/sample/bad_other.php" warning, line 106 - File ./test/sample/bad_other.php must not have multiple class declarations.
</system-out>
</testcase>
<testcase name="testException" file="/workspace/phpcheckstyle/test/OtherTest.php" line="31" class="OtherTest" classname="OtherTest" assertions="1" time="0.000751">
<failure type="PHPUnit\Framework\ExpectationFailedException">OtherTest::testException
We expect 1 error
Failed asserting that 0 matches expected 1.
/workspace/phpcheckstyle/test/OtherTest.php:40</failure>
</testcase>
<testcase name="testEmpty" file="/workspace/phpcheckstyle/test/OtherTest.php" line="50" class="OtherTest" classname="OtherTest" assertions="4" time="0.000427">
<system-out>File "./test/sample/empty.php" warning, line 1 - The file ./test/sample/empty.php is empty.
</system-out>
</testcase>
<testcase name="testSwitchCaseNeedBreak" file="/workspace/phpcheckstyle/test/OtherTest.php" line="69" class="OtherTest" classname="OtherTest" assertions="4" time="0.000901">
<system-out>File "./test/sample/switch_multi_case.php" warning, line 10 - The case statement must contain a break.
</system-out>
</testcase>
</testsuite>
<testsuite name="PHPTagsTest" file="/workspace/phpcheckstyle/test/PHPTagsTest.php" tests="2" assertions="8" errors="0" failures="0" skipped="0" time="0.001272">
<testcase name="testTextAfterClosingTag" file="/workspace/phpcheckstyle/test/PHPTagsTest.php" line="12" class="PHPTagsTest" classname="PHPTagsTest" assertions="4" time="0.000641">
<system-out>File "./test/sample/bad_php_tags_text_after_end.php" warning, line 9 - A PHP close tag must not be included at the end of the file.
</system-out>
</testcase>
<testcase name="testClosingTagNotNeeded" file="/workspace/phpcheckstyle/test/PHPTagsTest.php" line="30" class="PHPTagsTest" classname="PHPTagsTest" assertions="4" time="0.000631">
<system-out>File "./test/sample/bad_php_tags_end_not_needed.php" warning, line 1 - PHP tag should be at the beginning of the line.
</system-out>
</testcase>
</testsuite>
<testsuite name="ProhibitedTest" file="/workspace/phpcheckstyle/test/ProhibitedTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.000938">
<testcase name="testProhibited" file="/workspace/phpcheckstyle/test/ProhibitedTest.php" line="13" class="ProhibitedTest" classname="ProhibitedTest" assertions="4" time="0.000938">
<system-out>File "./test/sample/bad_prohibited.php" warning, line 18 - The function exec must not be called.
File "./test/sample/bad_prohibited.php" warning, line 20 - Token T_PRINT must not be used.
</system-out>
</testcase>
</testsuite>
<testsuite name="StrictCompareTest" file="/workspace/phpcheckstyle/test/StrictCompareTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.001578">
<testcase name="testStrictCompare" file="/workspace/phpcheckstyle/test/StrictCompareTest.php" line="12" class="StrictCompareTest" classname="StrictCompareTest" assertions="4" time="0.001578">
<system-out>File "./test/sample/bad_strictcompare.php" warning, line 14 - Consider using a strict comparison operator instead of ==.
File "./test/sample/bad_strictcompare.php" warning, line 19 - Consider using a strict comparison operator instead of !=.
File "./test/sample/bad_strictcompare.php" warning, line 24 - Consider using a strict comparison operator instead of ==.
File "./test/sample/bad_strictcompare.php" warning, line 29 - Consider using a strict comparison operator instead of ==.
</system-out>
</testcase>
</testsuite>
<testsuite name="UnusedTest" file="/workspace/phpcheckstyle/test/UnusedTest.php" tests="2" assertions="8" errors="0" failures="0" skipped="0" time="0.001835">
<testcase name="testGoodUnused" file="/workspace/phpcheckstyle/test/UnusedTest.php" line="13" class="UnusedTest" classname="UnusedTest" assertions="4" time="0.000940"/>
<testcase name="testBadUnused" file="/workspace/phpcheckstyle/test/UnusedTest.php" line="32" class="UnusedTest" classname="UnusedTest" assertions="4" time="0.000895">
<system-out>File "./test/sample/bad_unused.php" warning, line 23 - Function _testUnused has unused code after RETURN.
File "./test/sample/bad_unused.php" warning, line 27 - The function _testUnused parameter $b is not used.
File "./test/sample/bad_unused.php" warning, line 18 - Unused private function: _testUnused.
File "./test/sample/bad_unused.php" warning, line 20 - Undeclared or unused variable: $c.
</system-out>
</testcase>
</testsuite>
</testsuite>
</testsuite>
</testsuites>

View File

@@ -1,23 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuites>
<testsuite name="SampleSuite" tests="6" failures="6" time="0.006">
<testcase name="testFailure" classname="SampleSuite" file="/home/runner/work/repo/src/Fake.php" line="42" time="0.001">
<failure type="Exception" message="Boom">/home/runner/work/repo/src/Fake.php:42</failure>
</testcase>
<testcase name="testStringFailure" classname="SampleSuite" file="/home/runner/work/repo/src/Other.php" line="10" time="0.001">
<failure>/home/runner/work/repo/src/Other.php:10</failure>
</testcase>
<testcase name="testParenFailure" classname="SampleSuite" file="/home/runner/work/repo/src/Paren.php" line="123" time="0.001">
<failure>at /home/runner/work/repo/src/Paren.php(123)</failure>
</testcase>
<testcase name="testWindowsFailure" classname="SampleSuite" file="C:\repo\src\Win.php" line="77" time="0.001">
<failure>C:\repo\src\Win.php:77</failure>
</testcase>
<testcase name="testWindowsParenFailure" classname="SampleSuite" file="C:\repo\src\WinParen.php" line="88" time="0.001">
<failure>at C:\repo\src\WinParen.php(88)</failure>
</testcase>
<testcase name="testPhptFailure" classname="SampleSuite" file="/home/runner/work/repo/tests/Sample.phpt" line="12" time="0.001">
<failure>/home/runner/work/repo/tests/Sample.phpt:12</failure>
</testcase>
</testsuite>
</testsuites>

View File

@@ -1,79 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuites>
<testsuite name="CLI Arguments" tests="12" assertions="12" errors="0" failures="2" skipped="0" time="0.140397">
<testcase name="targeting-traits-with-coversclass-attribute-is-deprecated.phpt" file="/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-coversclass-attribute-is-deprecated.phpt" assertions="1" time="0.068151">
<failure type="PHPUnit\Framework\PhptAssertionFailedError">targeting-traits-with-coversclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
--- Expected
+++ Actual
@@ @@
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
Test Runner Configured
Test Suite Loaded (1 test)
+Test Runner Triggered Warning (No code coverage driver available)
Event Facade Sealed
Test Runner Started
Test Suite Sorted
@@ @@
Test Preparation Started (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
Test Prepared (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
Test Passed (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\TestFixture\CoveredTrait with #[CoversClass] is deprecated, please refactor your test to use #[CoversTrait] instead.)
Test Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
Test Suite Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest, 1 test)
Test Runner Execution Finished
Test Runner Finished
-PHPUnit Finished (Shell Exit Code: 0)
+PHPUnit Finished (Shell Exit Code: 1)
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-coversclass-attribute-is-deprecated.phpt:28
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200</failure>
</testcase>
<testcase name="targeting-traits-with-usesclass-attribute-is-deprecated.phpt" file="/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-usesclass-attribute-is-deprecated.phpt" assertions="1" time="0.064268">
<failure type="PHPUnit\Framework\PhptAssertionFailedError">targeting-traits-with-usesclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
--- Expected
+++ Actual
@@ @@
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
Test Runner Configured
Test Suite Loaded (1 test)
+Test Runner Triggered Warning (No code coverage driver available)
Event Facade Sealed
Test Runner Started
Test Suite Sorted
@@ @@
Test Preparation Started (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
Test Prepared (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
Test Passed (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\TestFixture\CoveredTrait with #[UsesClass] is deprecated, please refactor your test to use #[UsesTrait] instead.)
Test Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
Test Suite Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest, 1 test)
Test Runner Execution Finished
Test Runner Finished
-PHPUnit Finished (Shell Exit Code: 0)
+PHPUnit Finished (Shell Exit Code: 1)
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-usesclass-attribute-is-deprecated.phpt:28
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200</failure>
</testcase>
<testsuite name="PHPUnit\Event\CollectingDispatcherTest" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/CollectingDispatcherTest.php" tests="2" assertions="2" errors="0" failures="0" skipped="0" time="0.004256">
<testcase name="testHasNoCollectedEventsWhenFlushedImmediatelyAfterCreation" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/CollectingDispatcherTest.php" line="20" class="PHPUnit\Event\CollectingDispatcherTest" classname="PHPUnit.Event.CollectingDispatcherTest" assertions="1" time="0.001441"/>
<testcase name="testCollectsDispatchedEventsUntilFlushed" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/CollectingDispatcherTest.php" line="27" class="PHPUnit\Event\CollectingDispatcherTest" classname="PHPUnit.Event.CollectingDispatcherTest" assertions="1" time="0.002815"/>
</testsuite>
<testsuite name="PHPUnit\Event\DeferringDispatcherTest" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" tests="4" assertions="4" errors="0" failures="0" skipped="0" time="0.002928">
<testcase name="testCollectsEventsUntilFlush" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="22" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.001672"/>
<testcase name="testFlushesCollectedEvents" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="35" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.000661"/>
<testcase name="testSubscriberCanBeRegistered" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="53" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.000334"/>
<testcase name="testTracerCanBeRegistered" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="69" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.000262"/>
</testsuite>
<testsuite name="PHPUnit\Event\DirectDispatcherTest" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" tests="4" assertions="4" errors="0" failures="0" skipped="0" time="0.000794">
<testcase name="testDispatchesEventToKnownSubscribers" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="24" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000170"/>
<testcase name="testDispatchesEventToTracers" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="43" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000248"/>
<testcase name="testRegisterRejectsUnknownSubscriber" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="62" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000257"/>
<testcase name="testDispatchRejectsUnknownEventType" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="73" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000119"/>
</testsuite>
</testsuite>
</testsuites>

View File

@@ -1,42 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<testsuites name="pytest tests">
<testsuite name="pytest" errors="0" failures="2" skipped="2" tests="15" time="0.019"
timestamp="2025-11-15T11:51:49.548396-05:00" hostname="Mac.hsd1.va.comcast.net">
<properties>
<property name="custom_prop" value="custom_val"/>
</properties>
<testcase classname="tests.test_lib" name="test_always_pass" time="0.002"/>
<testcase classname="tests.test_lib" name="test_with_subtests" time="0.005"/>
<testcase classname="tests.test_lib" name="test_parameterized[param1]" time="0.000"/>
<testcase classname="tests.test_lib" name="test_parameterized[param2]" time="0.000"/>
<testcase classname="tests.test_lib" name="test_always_skip" time="0.000">
<skipped type="pytest.skip" message="skipped">/Users/mike/Projects/python-test/tests/test_lib.py:20: skipped
</skipped>
</testcase>
<testcase classname="tests.test_lib" name="test_always_fail" time="0.000">
<failure message="assert False">def test_always_fail():
&gt; assert False
E assert False
tests/test_lib.py:25: AssertionError
</failure>
</testcase>
<testcase classname="tests.test_lib" name="test_expected_failure" time="0.000">
<skipped type="pytest.xfail" message=""/>
</testcase>
<testcase classname="tests.test_lib" name="test_error" time="0.000">
<failure message="Exception: error">def test_error():
&gt; raise Exception("error")
E Exception: error
tests/test_lib.py:32: Exception
</failure>
</testcase>
<testcase classname="tests.test_lib" name="test_with_record_property" time="0.000">
<properties>
<property name="example_key" value="1"/>
</properties>
</testcase>
<testcase classname="custom_classname" name="test_with_record_xml_attribute" time="0.000"/>
</testsuite>
</testsuites>

View File

@@ -1,27 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuite name="TestAcme-20251114214921" tests="8" file=".py" time="0.001" timestamp="2025-11-14T21:49:22" failures="1" errors="1" skipped="2">
<testcase classname="TestAcme" name="test_always_pass" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="8"/>
<testcase classname="TestAcme" name="test_parameterized_0_param1" time="0.001" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="618"/>
<testcase classname="TestAcme" name="test_parameterized_1_param2" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="618"/>
<testcase classname="TestAcme" name="test_with_subtests" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="11"/>
<testcase classname="TestAcme" name="test_always_fail" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="23">
<failure type="AssertionError" message="failed"><![CDATA[Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail
self.fail("failed")
AssertionError: failed
]]></failure>
</testcase>
<testcase classname="TestAcme" name="test_error" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="30">
<error type="Exception" message="error"><![CDATA[Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error
raise Exception("error")
Exception: error
]]></error>
</testcase>
<testcase classname="TestAcme" name="test_always_skip" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="20">
<skipped type="skip" message="skipped"/>
</testcase>
<testcase classname="TestAcme" name="test_expected_failure" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="26">
<skipped type="XFAIL" message="expected failure: (&lt;class 'AssertionError'&gt;, AssertionError('expected failure'), &lt;traceback object at 0x100c125c0&gt;)"/>
</testcase>
</testsuite>

View File

@@ -73,46 +73,6 @@ describe('java-junit tests', () => {
fs.writeFileSync(outputPath, report) fs.writeFileSync(outputPath, report)
}) })
it('report from testmo/junitxml basic example matches snapshot', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'junit4-basic.xml')
const outputPath = path.join(__dirname, '__outputs__', 'junit-basic.md')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JavaJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
it('report from testmo/junitxml complete example matches snapshot', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'junit4-complete.xml')
const outputPath = path.join(__dirname, '__outputs__', 'junit-complete.md')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JavaJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
it('parses empty failures in test results', async () => { it('parses empty failures in test results', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'empty_failures.xml') const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'empty_failures.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath)) const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))

View File

@@ -207,143 +207,4 @@ describe('jest-junit tests', () => {
// Report should have the title as the first line // Report should have the title as the first line
expect(report).toMatch(/^# My Custom Title\n/) expect(report).toMatch(/^# My Custom Title\n/)
}) })
it('report can be collapsed when configured', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JestJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
const report = getReport([result], {
...DEFAULT_OPTIONS,
collapsed: 'always'
})
// Report should include collapsible details
expect(report).toContain('<details><summary>Expand for details</summary>')
expect(report).toContain('</details>')
})
it('report is not collapsed when configured to never', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JestJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
const report = getReport([result], {
...DEFAULT_OPTIONS,
collapsed: 'never'
})
// Report should not include collapsible details
expect(report).not.toContain('<details><summary>Expand for details</summary>')
expect(report).not.toContain('</details>')
})
it('report auto-collapses when all tests pass', async () => {
// Test with a fixture that has all passing tests (no failures)
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit-eslint.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JestJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
// Verify this fixture has no failures
expect(result.failed).toBe(0)
const report = getReport([result], {
...DEFAULT_OPTIONS,
collapsed: 'auto'
})
// Should collapse when all tests pass
expect(report).toContain('<details><summary>Expand for details</summary>')
expect(report).toContain('</details>')
})
it('report does not auto-collapse when tests fail', async () => {
// Test with a fixture that has failing tests
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JestJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
// Verify this fixture has failures
expect(result.failed).toBeGreaterThan(0)
const report = getReport([result], {
...DEFAULT_OPTIONS,
collapsed: 'auto'
})
// Should not collapse when there are failures
expect(report).not.toContain('<details><summary>Expand for details</summary>')
expect(report).not.toContain('</details>')
})
it('report includes the short summary', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JestJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
const shortSummary = '1 passed, 4 failed and 1 skipped'
const report = getReport([result], DEFAULT_OPTIONS, shortSummary)
// Report should have the title as the first line
expect(report).toMatch(/^## 1 passed, 4 failed and 1 skipped\n/)
})
it('report includes a custom report title and short summary', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JestJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
const shortSummary = '1 passed, 4 failed and 1 skipped'
const report = getReport(
[result],
{
...DEFAULT_OPTIONS,
reportTitle: 'My Custom Title'
},
shortSummary
)
// Report should have the title as the first line
expect(report).toMatch(/^# My Custom Title\n## 1 passed, 4 failed and 1 skipped\n/)
})
}) })

View File

@@ -1,347 +0,0 @@
import * as fs from 'fs'
import * as path from 'path'
import {PhpunitJunitParser} from '../src/parsers/phpunit-junit/phpunit-junit-parser'
import {ParseOptions} from '../src/test-parser'
import {getReport} from '../src/report/get-report'
import {normalizeFilePath} from '../src/utils/path-utils'
describe('phpunit-junit tests', () => {
it('produces empty test run result when there are no test cases', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'empty', 'phpunit-empty.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result.tests).toBe(0)
expect(result.result).toBe('success')
})
it('report from phpunit test results matches snapshot', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit.xml')
const outputPath = path.join(__dirname, '__outputs__', 'phpunit-test-results.md')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
it('parses nested test suites correctly', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
// Should have 4 test suites (3 nested ones plus the parent with direct testcases)
expect(result.suites.length).toBe(4)
// Verify suite names
const suiteNames = result.suites.map(s => s.name)
expect(suiteNames).toContain('PHPUnit\\Event\\CollectingDispatcherTest')
expect(suiteNames).toContain('PHPUnit\\Event\\DeferringDispatcherTest')
expect(suiteNames).toContain('PHPUnit\\Event\\DirectDispatcherTest')
expect(suiteNames).toContain('CLI Arguments')
// Verify total test count
expect(result.tests).toBe(12)
expect(result.passed).toBe(10)
expect(result.failed).toBe(2)
})
it('extracts error details from failures', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
// Find the CLI Arguments suite which has failures
const cliSuite = result.suites.find(s => s.name === 'CLI Arguments')
expect(cliSuite).toBeDefined()
// Get the failed tests
const failedTests = cliSuite!.groups.flatMap(g => g.tests).filter(t => t.result === 'failed')
expect(failedTests.length).toBe(2)
// Verify error details are captured
for (const test of failedTests) {
expect(test.error).toBeDefined()
expect(test.error!.details).toContain('Failed asserting that string matches format description')
}
})
it('maps absolute paths to tracked files for annotations', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit-paths.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: [
'src/Fake.php',
'src/Other.php',
'src/Paren.php',
'src/Win.php',
'src/WinParen.php',
'tests/Sample.phpt'
]
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
const suite = result.suites.find(s => s.name === 'SampleSuite')
expect(suite).toBeDefined()
const tests = suite!.groups.flatMap(g => g.tests)
const fileFailure = tests.find(t => t.name === 'testFailure')
expect(fileFailure).toBeDefined()
expect(fileFailure!.error).toBeDefined()
expect(fileFailure!.error!.path).toBe('src/Fake.php')
expect(fileFailure!.error!.line).toBe(42)
const stringFailure = tests.find(t => t.name === 'testStringFailure')
expect(stringFailure).toBeDefined()
expect(stringFailure!.error).toBeDefined()
expect(stringFailure!.error!.path).toBe('src/Other.php')
expect(stringFailure!.error!.line).toBe(10)
const parenFailure = tests.find(t => t.name === 'testParenFailure')
expect(parenFailure).toBeDefined()
expect(parenFailure!.error).toBeDefined()
expect(parenFailure!.error!.path).toBe('src/Paren.php')
expect(parenFailure!.error!.line).toBe(123)
const windowsFailure = tests.find(t => t.name === 'testWindowsFailure')
expect(windowsFailure).toBeDefined()
expect(windowsFailure!.error).toBeDefined()
expect(windowsFailure!.error!.path).toBe('src/Win.php')
expect(windowsFailure!.error!.line).toBe(77)
const windowsParenFailure = tests.find(t => t.name === 'testWindowsParenFailure')
expect(windowsParenFailure).toBeDefined()
expect(windowsParenFailure!.error).toBeDefined()
expect(windowsParenFailure!.error!.path).toBe('src/WinParen.php')
expect(windowsParenFailure!.error!.line).toBe(88)
const phptFailure = tests.find(t => t.name === 'testPhptFailure')
expect(phptFailure).toBeDefined()
expect(phptFailure!.error).toBeDefined()
expect(phptFailure!.error!.path).toBe('tests/Sample.phpt')
expect(phptFailure!.error!.line).toBe(12)
})
it('parses junit-basic.xml with nested suites and failure', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'junit-basic.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
// Verify test counts
expect(result.tests).toBe(9)
expect(result.passed).toBe(8)
expect(result.failed).toBe(1)
expect(result.result).toBe('failed')
// Verify suites - should have Tests.Registration, Tests.Authentication.Login, and Tests.Authentication
expect(result.suites.length).toBe(3)
const suiteNames = result.suites.map(s => s.name)
expect(suiteNames).toContain('Tests.Registration')
expect(suiteNames).toContain('Tests.Authentication.Login')
expect(suiteNames).toContain('Tests.Authentication')
// Verify the Registration suite has 3 tests
const registrationSuite = result.suites.find(s => s.name === 'Tests.Registration')
expect(registrationSuite).toBeDefined()
const registrationTests = registrationSuite!.groups.flatMap(g => g.tests)
expect(registrationTests.length).toBe(3)
// Verify the Authentication suite has 3 direct tests (not counting nested suite)
const authSuite = result.suites.find(s => s.name === 'Tests.Authentication')
expect(authSuite).toBeDefined()
const authTests = authSuite!.groups.flatMap(g => g.tests)
expect(authTests.length).toBe(3)
// Verify the Login nested suite has 3 tests
const loginSuite = result.suites.find(s => s.name === 'Tests.Authentication.Login')
expect(loginSuite).toBeDefined()
const loginTests = loginSuite!.groups.flatMap(g => g.tests)
expect(loginTests.length).toBe(3)
// Verify failure is captured
const failedTest = authTests.find(t => t.name === 'testCase9')
expect(failedTest).toBeDefined()
expect(failedTest!.result).toBe('failed')
expect(failedTest!.error).toBeDefined()
expect(failedTest!.error!.message).toBe('AssertionError: Assertion error message')
})
it('parses phpcheckstyle-phpunit.xml with deeply nested suites', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
// Verify test counts from the XML: tests="30", failures="2"
expect(result.tests).toBe(30)
expect(result.passed).toBe(28)
expect(result.failed).toBe(2)
expect(result.result).toBe('failed')
// Verify the number of test suites extracted (leaf suites with testcases)
// CommentsTest, DeprecationTest, GoodTest, IndentationTest, MetricsTest,
// NamingTest, OptimizationTest, OtherTest, PHPTagsTest, ProhibitedTest,
// StrictCompareTest, UnusedTest = 12 suites
expect(result.suites.length).toBe(12)
const suiteNames = result.suites.map(s => s.name)
expect(suiteNames).toContain('CommentsTest')
expect(suiteNames).toContain('GoodTest')
expect(suiteNames).toContain('IndentationTest')
expect(suiteNames).toContain('OtherTest')
})
it('extracts test data from phpcheckstyle-phpunit.xml', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
// Find the CommentsTest suite
const commentsSuite = result.suites.find(s => s.name === 'CommentsTest')
expect(commentsSuite).toBeDefined()
// Verify tests are extracted correctly
const tests = commentsSuite!.groups.flatMap(g => g.tests)
expect(tests.length).toBe(3)
const testGoodDoc = tests.find(t => t.name === 'testGoodDoc')
expect(testGoodDoc).toBeDefined()
expect(testGoodDoc!.result).toBe('success')
})
it('captures failure details from phpcheckstyle-phpunit.xml', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
// Find the OtherTest suite which has failures
const otherSuite = result.suites.find(s => s.name === 'OtherTest')
expect(otherSuite).toBeDefined()
const failedTests = otherSuite!.groups.flatMap(g => g.tests).filter(t => t.result === 'failed')
expect(failedTests.length).toBe(2)
// Verify failure details
const testOther = failedTests.find(t => t.name === 'testOther')
expect(testOther).toBeDefined()
expect(testOther!.error).toBeDefined()
expect(testOther!.error!.details).toContain('We expect 20 warnings')
expect(testOther!.error!.details).toContain('Failed asserting that 19 matches expected 20')
const testException = failedTests.find(t => t.name === 'testException')
expect(testException).toBeDefined()
expect(testException!.error).toBeDefined()
expect(testException!.error!.details).toContain('We expect 1 error')
})
it('report from junit-basic.xml matches snapshot', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'junit-basic.xml')
const outputPath = path.join(__dirname, '__outputs__', 'phpunit-junit-basic-results.md')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
it('report from phpcheckstyle-phpunit.xml matches snapshot', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
const outputPath = path.join(__dirname, '__outputs__', 'phpunit-phpcheckstyle-results.md')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
})

View File

@@ -1,93 +0,0 @@
import * as fs from 'fs'
import * as path from 'path'
import {PythonXunitParser} from '../src/parsers/python-xunit/python-xunit-parser'
import {ParseOptions} from '../src/test-parser'
import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report'
import {normalizeFilePath} from '../src/utils/path-utils'
const defaultOpts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
describe('python-xunit unittest report', () => {
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-unittest.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-unittest.md')
it('report from python test results matches snapshot', async () => {
const trackedFiles = ['tests/test_lib.py']
const opts: ParseOptions = {
...defaultOpts,
trackedFiles
}
const parser = new PythonXunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
it('report does not include a title by default', async () => {
const parser = new PythonXunitParser(defaultOpts)
const result = await parser.parse(filePath, fileContent)
const report = getReport([result])
// Report should have the badge as the first line
expect(report).toMatch(/^!\[Tests failed]/)
})
it.each([
['empty string', ''],
['space', ' '],
['tab', '\t'],
['newline', '\n']
])('report does not include a title when configured value is %s', async (_, reportTitle) => {
const parser = new PythonXunitParser(defaultOpts)
const result = await parser.parse(filePath, fileContent)
const report = getReport([result], {
...DEFAULT_OPTIONS,
reportTitle
})
// Report should have the badge as the first line
expect(report).toMatch(/^!\[Tests failed]/)
})
it('report includes a custom report title', async () => {
const parser = new PythonXunitParser(defaultOpts)
const result = await parser.parse(filePath, fileContent)
const report = getReport([result], {
...DEFAULT_OPTIONS,
reportTitle: 'My Custom Title'
})
// Report should have the title as the first line
expect(report).toMatch(/^# My Custom Title\n/)
})
})
describe('python-xunit pytest report', () => {
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-pytest.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-pytest.md')
it('report from python test results matches snapshot', async () => {
const trackedFiles = ['tests/test_lib.py']
const opts: ParseOptions = {
...defaultOpts,
trackedFiles
}
const parser = new PythonXunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
})

View File

@@ -1,120 +0,0 @@
import {getBadge, DEFAULT_OPTIONS, ReportOptions} from '../../src/report/get-report'
describe('getBadge', () => {
describe('URI encoding with special characters', () => {
it('generates correct URI with simple badge title', () => {
const options: ReportOptions = {
...DEFAULT_OPTIONS,
badgeTitle: 'tests'
}
const badge = getBadge(5, 0, 1, options)
expect(badge).toBe('![Tests passed successfully](https://img.shields.io/badge/tests-5%20passed%2C%201%20skipped-success)')
})
it('handles badge title with single hyphen', () => {
const options: ReportOptions = {
...DEFAULT_OPTIONS,
badgeTitle: 'unit-tests'
}
const badge = getBadge(3, 0, 0, options)
// The hyphen in the badge title should be encoded as --
expect(badge).toBe('![Tests passed successfully](https://img.shields.io/badge/unit--tests-3%20passed-success)')
})
it('handles badge title with multiple hyphens', () => {
const options: ReportOptions = {
...DEFAULT_OPTIONS,
badgeTitle: 'integration-api-tests'
}
const badge = getBadge(10, 0, 0, options)
// All hyphens in the title should be encoded as --
expect(badge).toBe('![Tests passed successfully](https://img.shields.io/badge/integration--api--tests-10%20passed-success)')
})
it('handles badge title with multiple underscores', () => {
const options: ReportOptions = {
...DEFAULT_OPTIONS,
badgeTitle: 'my_integration_test'
}
const badge = getBadge(10, 0, 0, options)
// All underscores in the title should be encoded as __
expect(badge).toBe('![Tests passed successfully](https://img.shields.io/badge/my__integration__test-10%20passed-success)')
})
it('handles badge title with version format containing hyphen', () => {
const options: ReportOptions = {
...DEFAULT_OPTIONS,
badgeTitle: 'MariaDb 12.0-ubi database tests'
}
const badge = getBadge(1, 0, 0, options)
// The hyphen in "12.0-ubi" should be encoded as --
expect(badge).toBe('![Tests passed successfully](https://img.shields.io/badge/MariaDb%2012.0--ubi%20database%20tests-1%20passed-success)')
})
it('handles badge title with dots and hyphens', () => {
const options: ReportOptions = {
...DEFAULT_OPTIONS,
badgeTitle: 'v1.2.3-beta-test'
}
const badge = getBadge(4, 1, 0, options)
expect(badge).toBe('![Tests failed](https://img.shields.io/badge/v1.2.3--beta--test-4%20passed%2C%201%20failed-critical)')
})
it('preserves structural hyphens between label and message', () => {
const options: ReportOptions = {
...DEFAULT_OPTIONS,
badgeTitle: 'test-suite'
}
const badge = getBadge(2, 3, 1, options)
// The URI should have literal hyphens separating title-message-color
expect(badge).toBe('![Tests failed](https://img.shields.io/badge/test--suite-2%20passed%2C%203%20failed%2C%201%20skipped-critical)')
})
})
describe('generates test outcome as color name for imgshields', () => {
it('uses success color when all tests pass', () => {
const options: ReportOptions = {...DEFAULT_OPTIONS}
const badge = getBadge(5, 0, 0, options)
expect(badge).toContain('-success)')
})
it('uses critical color when tests fail', () => {
const options: ReportOptions = {...DEFAULT_OPTIONS}
const badge = getBadge(5, 2, 0, options)
expect(badge).toContain('-critical)')
})
it('uses yellow color when no tests found', () => {
const options: ReportOptions = {...DEFAULT_OPTIONS}
const badge = getBadge(0, 0, 0, options)
expect(badge).toContain('-yellow)')
})
})
describe('badge message composition', () => {
it('includes only passed count when no failures or skips', () => {
const options: ReportOptions = {...DEFAULT_OPTIONS}
const badge = getBadge(5, 0, 0, options)
expect(badge).toBe('![Tests passed successfully](https://img.shields.io/badge/tests-5%20passed-success)')
})
it('includes passed and failed counts', () => {
const options: ReportOptions = {...DEFAULT_OPTIONS}
const badge = getBadge(5, 2, 0, options)
expect(badge).toBe('![Tests failed](https://img.shields.io/badge/tests-5%20passed%2C%202%20failed-critical)')
})
it('includes passed, failed and skipped counts', () => {
const options: ReportOptions = {...DEFAULT_OPTIONS}
const badge = getBadge(5, 2, 1, options)
expect(badge).toBe('![Tests failed](https://img.shields.io/badge/tests-5%20passed%2C%202%20failed%2C%201%20skipped-critical)')
})
it('uses "none" message when no tests', () => {
const options: ReportOptions = {...DEFAULT_OPTIONS}
const badge = getBadge(0, 0, 0, options)
expect(badge).toBe('![Tests passed successfully](https://img.shields.io/badge/tests-none-yellow)')
})
})
})

View File

@@ -32,6 +32,6 @@ describe('parseNetDuration', () => {
}) })
it('throws when string has invalid format', () => { it('throws when string has invalid format', () => {
expect(() => parseNetDuration('12:34:56 not a duration')).toThrow(/^Invalid format/) expect(() => parseNetDuration('12:34:56 not a duration')).toThrowError(/^Invalid format/)
}) })
}) })

View File

@@ -1,5 +1,6 @@
name: Test Reporter name: Test Reporter
description: Displays test results from popular testing frameworks directly in GitHub description: |
Shows test results in GitHub UI: .NET (xUnit, NUnit, MSTest), Dart, Flutter, Java (JUnit), JavaScript (JEST, Mocha)
author: Michal Dorner <dorner.michal@gmail.com> author: Michal Dorner <dorner.michal@gmail.com>
inputs: inputs:
artifact: artifact:
@@ -28,12 +29,9 @@ inputs:
- dotnet-nunit - dotnet-nunit
- dotnet-trx - dotnet-trx
- flutter-json - flutter-json
- golang-json
- java-junit - java-junit
- jest-junit - jest-junit
- mocha-json - mocha-json
- phpunit-junit
- python-xunit
- rspec-json - rspec-json
- swift-xunit - swift-xunit
required: true required: true
@@ -70,10 +68,6 @@ inputs:
working-directory: working-directory:
description: Relative path under $GITHUB_WORKSPACE where the repository was checked out description: Relative path under $GITHUB_WORKSPACE where the repository was checked out
required: false required: false
report-title:
description: Title for the test report summary
required: false
default: ''
only-summary: only-summary:
description: | description: |
Allows you to generate only the summary. Allows you to generate only the summary.
@@ -91,14 +85,6 @@ inputs:
description: Customize badge title description: Customize badge title
required: false required: false
default: 'tests' default: 'tests'
collapsed:
description: |
Controls whether test report details are collapsed or expanded. Supported options:
- auto: Collapse only if all tests pass (default behavior)
- always: Always collapse the report details
- never: Always expand the report details
required: false
default: 'auto'
token: token:
description: GitHub Access Token description: GitHub Access Token
required: false required: false

108633
dist/index.js generated vendored

File diff suppressed because one or more lines are too long

8
dist/index.js.map generated vendored

File diff suppressed because one or more lines are too long

80
dist/licenses.txt generated vendored
View File

@@ -1350,62 +1350,48 @@ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
sax sax
BlueOak-1.0.0 ISC
# Blue Oak Model License The ISC License
Version 1.0.0 Copyright (c) Isaac Z. Schlueter and Contributors
## Purpose Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
This license gives everyone as much permission to work with THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
this software as possible, while protecting contributors WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
from liability. MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
## Acceptance ====
In order to receive this license, you must agree to its `String.fromCodePoint` by Mathias Bynens used according to terms of MIT
rules. The rules of this license are both obligations License, as follows:
under that agreement and conditions to your license.
You must not do anything with this software that triggers
a rule that you cannot or will not follow.
## Copyright Copyright Mathias Bynens <https://mathiasbynens.be/>
Each contributor licenses you to do everything with this Permission is hereby granted, free of charge, to any person obtaining
software that would otherwise infringe that contributor's a copy of this software and associated documentation files (the
copyright in it. "Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
## Notices The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
You must ensure that everyone who gets a copy of THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
any part of this software from you, with or without EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
changes, also gets the text of this license or a link to MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
<https://blueoakcouncil.org/license/1.0.0>. NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
## Excuse OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
If anyone notifies you in writing that you have not
complied with [Notices](#notices), you can keep your
license by taking all practical steps to comply within 30
days after the notice. If you do not do so, your license
ends immediately.
## Patent
Each contributor licenses you to do everything with this
software that would otherwise infringe any patent claims
they can license or become able to license.
## Reliability
No contributor can revoke this license.
## No Liability
***As far as the law allows, this software comes as is,
without any warranty or condition, and no contributor
will be liable to anyone for any damages related to this
software or this license, under any kind of legal claim.***
to-regex-range to-regex-range

3517
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "test-reporter", "name": "test-reporter",
"version": "2.3.0", "version": "2.1.0",
"private": true, "private": true,
"description": "Presents test results from popular testing frameworks as Github check run", "description": "Presents test results from popular testing frameworks as Github check run",
"main": "lib/main.js", "main": "lib/main.js",
@@ -9,14 +9,13 @@
"format": "prettier --write **/*.ts", "format": "prettier --write **/*.ts",
"format-check": "prettier --check **/*.ts", "format-check": "prettier --check **/*.ts",
"lint": "eslint src/**/*.ts", "lint": "eslint src/**/*.ts",
"package": "ncc build --license licenses.txt && eolConverter lf 'dist/*'", "package": "esbuild lib/main.js --bundle --sourcemap --platform=node --outfile=dist/index.js && eolConverter lf 'dist/*'",
"version": "npm run build && npm run package && git add dist/*", "version": "npm run build && npm run package && git add dist/*",
"test": "jest --ci --reporters=default --reporters=jest-junit", "test": "jest --ci --reporters=default --reporters=jest-junit",
"jest:updatesnapshot": "jest --updateSnapshot", "jest:updatesnapshot": "jest --updateSnapshot",
"all": "npm run build && npm run format && npm run lint && npm run package && npm test", "all": "npm run build && npm run format && npm run lint && npm run package && npm test",
"dart-fixture": "cd \"reports/dart\" && dart test --file-reporter=\"json:../../__tests__/fixtures/dart-json.json\"", "dart-fixture": "cd \"reports/dart\" && dart test --file-reporter=\"json:../../__tests__/fixtures/dart-json.json\"",
"dotnet-fixture": "dotnet test reports/dotnet/DotnetTests.XUnitTests --logger \"trx;LogFileName=../../../../__tests__/fixtures/dotnet-trx.trx\"", "dotnet-fixture": "dotnet test reports/dotnet/DotnetTests.XUnitTests --logger \"trx;LogFileName=../../../../__tests__/fixtures/dotnet-trx.trx\"",
"dotnet-xunitv3-fixture": "dotnet run --project reports/dotnet/DotnetTests.XUnitV3Tests/DotnetTests.XUnitV3Tests.csproj --report-trx --report-trx-filename dotnet-xunitv3.trx --results-directory __tests__/fixtures/",
"dotnet-nunit-fixture": "nunit.exe reports/dotnet/DotnetTests.NUnitV3Tests/bin/Debug/netcoreapp3.1/DotnetTests.NUnitV3Tests.dll --result=__tests__/fixtures/dotnet-nunit.xml", "dotnet-nunit-fixture": "nunit.exe reports/dotnet/DotnetTests.NUnitV3Tests/bin/Debug/netcoreapp3.1/DotnetTests.NUnitV3Tests.dll --result=__tests__/fixtures/dotnet-nunit.xml",
"dotnet-nunit-legacy-fixture": "nunit-console.exe reports/dotnet-nunit-legacy/NUnitLegacy.sln --result=__tests__/fixtures/dotnet-nunit-legacy.xml", "dotnet-nunit-legacy-fixture": "nunit-console.exe reports/dotnet-nunit-legacy/NUnitLegacy.sln --result=__tests__/fixtures/dotnet-nunit-legacy.xml",
"golang-json-fixture": "go test -v -json -timeout 5s ./reports/go | tee __tests__/fixtures/golang-json.json", "golang-json-fixture": "go test -v -json -timeout 5s ./reports/go | tee __tests__/fixtures/golang-json.json",
@@ -42,35 +41,33 @@
"adm-zip": "^0.5.16", "adm-zip": "^0.5.16",
"fast-glob": "^3.3.3", "fast-glob": "^3.3.3",
"got": "^11.8.6", "got": "^11.8.6",
"picomatch": "^4.0.3", "picomatch": "^4.0.2",
"xml2js": "^0.6.2" "xml2js": "^0.6.2"
}, },
"devDependencies": { "devDependencies": {
"@octokit/webhooks-types": "^7.6.1", "@octokit/webhooks-types": "^7.6.1",
"@types/adm-zip": "^0.5.7", "@types/adm-zip": "^0.5.7",
"@types/jest": "^30.0.0", "@types/jest": "^29.5.14",
"@types/node": "^20.19.23", "@types/node": "^20.19.0",
"@types/picomatch": "^4.0.2", "@types/picomatch": "^2.3.4",
"@types/xml2js": "^0.4.14", "@types/xml2js": "^0.4.14",
"@typescript-eslint/eslint-plugin": "^7.18.0", "@typescript-eslint/eslint-plugin": "^7.18.0",
"@typescript-eslint/parser": "^7.18.0", "@typescript-eslint/parser": "^7.18.0",
"@vercel/ncc": "^0.38.4",
"eol-converter-cli": "^1.1.0", "eol-converter-cli": "^1.1.0",
"esbuild": "0.25.5",
"eslint": "^8.57.1", "eslint": "^8.57.1",
"eslint-import-resolver-typescript": "^3.10.1", "eslint-import-resolver-typescript": "^3.10.1",
"eslint-plugin-github": "^4.10.2", "eslint-plugin-github": "^4.10.2",
"eslint-plugin-import": "^2.32.0", "eslint-plugin-import": "^2.31.0",
"eslint-plugin-jest": "^28.14.0", "eslint-plugin-jest": "^28.13.0",
"eslint-plugin-prettier": "^5.5.4", "eslint-plugin-prettier": "^5.4.1",
"jest": "^30.2.0", "jest": "^29.7.0",
"jest-circus": "^29.7.0",
"jest-junit": "^16.0.0", "jest-junit": "^16.0.0",
"js-yaml": "^4.1.1", "js-yaml": "^4.1.0",
"prettier": "^3.6.2", "prettier": "^3.5.3",
"ts-jest": "^29.4.5", "ts-jest": "^29.3.4",
"typescript": "^5.9.3" "typescript": "^5.8.3"
},
"overrides": {
"sax": "^1.4.3"
}, },
"jest-junit": { "jest-junit": {
"suiteName": "jest tests", "suiteName": "jest tests",
@@ -84,10 +81,5 @@
}, },
"engines": { "engines": {
"node": ">=20" "node": ">=20"
},
"markdownlint-cli2": {
"ignores": [
"__tests__/**/*"
]
} }
} }

View File

@@ -40,7 +40,7 @@ namespace DotnetTests.XUnitTests
} }
[Test] [Test]
[CancelAfter(1)] [Timeout(1)]
public void Timeout_Test() public void Timeout_Test()
{ {
Thread.Sleep(100); Thread.Sleep(100);
@@ -58,7 +58,7 @@ namespace DotnetTests.XUnitTests
[TestCase(3)] [TestCase(3)]
public void Is_Even_Number(int i) public void Is_Even_Number(int i)
{ {
Assert.That(i % 2 == 0); Assert.True(i % 2 == 0);
} }
} }
} }

View File

@@ -1,13 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFramework>net8.0</TargetFramework> <TargetFramework>netcoreapp3.1</TargetFramework>
<DeterministicSourcePaths>true</DeterministicSourcePaths>
<IsPackable>false</IsPackable>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="NUnit" Version="4.3.2" /> <PackageReference Include="NUnit" Version="3.13.2" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" /> <PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.5.0" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>

View File

@@ -2,7 +2,6 @@
<PropertyGroup> <PropertyGroup>
<TargetFramework>netstandard2.0</TargetFramework> <TargetFramework>netstandard2.0</TargetFramework>
<DeterministicSourcePaths>true</DeterministicSourcePaths>
</PropertyGroup> </PropertyGroup>
</Project> </Project>

View File

@@ -1,14 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFramework>net8.0</TargetFramework> <TargetFramework>netcoreapp3.1</TargetFramework>
<DeterministicSourcePaths>true</DeterministicSourcePaths>
<IsPackable>false</IsPackable>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" /> <PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.5.0" />
<PackageReference Include="xunit" Version="2.9.3" /> <PackageReference Include="xunit" Version="2.4.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.1" /> <PackageReference Include="xunit.runner.visualstudio" Version="2.4.0" />
<PackageReference Include="coverlet.collector" Version="1.2.0" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>

View File

@@ -1,15 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<OutputType>exe</OutputType>
<DeterministicSourcePaths>true</DeterministicSourcePaths>
<UseMicrosoftTestingPlatformRunner>true</UseMicrosoftTestingPlatformRunner>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Testing.Extensions.TrxReport" Version="1.7.3" />
<PackageReference Include="xunit.v3" Version="2.0.3" />
</ItemGroup>
</Project>

View File

@@ -1,27 +0,0 @@
using System;
using Xunit;
namespace DotnetTests.XUnitV3Tests;
public sealed class Fixture : IDisposable
{
public void Dispose()
{
throw new InvalidOperationException("Failure during fixture disposal");
}
}
public class FixtureTests(Fixture fixture) : IClassFixture<Fixture>
{
[Fact]
public void Passing_Test()
{
Assert.NotNull(fixture);
}
[Fact]
public void Failing_Test()
{
Assert.Null(fixture);
}
}

View File

@@ -11,8 +11,6 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "DotnetTests.XUnitTests", "D
EndProject EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DotnetTests.NUnitV3Tests", "DotnetTests.NUnitV3Tests\DotnetTests.NUnitV3Tests.csproj", "{81023ED7-56CB-47E9-86C5-9125A0873C55}" Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DotnetTests.NUnitV3Tests", "DotnetTests.NUnitV3Tests\DotnetTests.NUnitV3Tests.csproj", "{81023ED7-56CB-47E9-86C5-9125A0873C55}"
EndProject EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DotnetTests.XUnitV3Tests", "DotnetTests.XUnitV3Tests\DotnetTests.XUnitV3Tests.csproj", "{D35E65DC-62EF-4612-9FF3-66F5600BFB74}"
EndProject
Global Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU Debug|Any CPU = Debug|Any CPU
@@ -31,10 +29,6 @@ Global
{81023ED7-56CB-47E9-86C5-9125A0873C55}.Debug|Any CPU.Build.0 = Debug|Any CPU {81023ED7-56CB-47E9-86C5-9125A0873C55}.Debug|Any CPU.Build.0 = Debug|Any CPU
{81023ED7-56CB-47E9-86C5-9125A0873C55}.Release|Any CPU.ActiveCfg = Release|Any CPU {81023ED7-56CB-47E9-86C5-9125A0873C55}.Release|Any CPU.ActiveCfg = Release|Any CPU
{81023ED7-56CB-47E9-86C5-9125A0873C55}.Release|Any CPU.Build.0 = Release|Any CPU {81023ED7-56CB-47E9-86C5-9125A0873C55}.Release|Any CPU.Build.0 = Release|Any CPU
{D35E65DC-62EF-4612-9FF3-66F5600BFB74}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D35E65DC-62EF-4612-9FF3-66F5600BFB74}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D35E65DC-62EF-4612-9FF3-66F5600BFB74}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D35E65DC-62EF-4612-9FF3-66F5600BFB74}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection EndGlobalSection
GlobalSection(SolutionProperties) = preSolution GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE HideSolutionNode = FALSE
@@ -42,7 +36,6 @@ Global
GlobalSection(NestedProjects) = preSolution GlobalSection(NestedProjects) = preSolution
{F8607EDB-D25D-47AA-8132-38ACA242E845} = {BCAC3B31-ADB1-4221-9D5B-182EE868648C} {F8607EDB-D25D-47AA-8132-38ACA242E845} = {BCAC3B31-ADB1-4221-9D5B-182EE868648C}
{81023ED7-56CB-47E9-86C5-9125A0873C55} = {BCAC3B31-ADB1-4221-9D5B-182EE868648C} {81023ED7-56CB-47E9-86C5-9125A0873C55} = {BCAC3B31-ADB1-4221-9D5B-182EE868648C}
{D35E65DC-62EF-4612-9FF3-66F5600BFB74} = {BCAC3B31-ADB1-4221-9D5B-182EE868648C}
EndGlobalSection EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {6ED5543C-74AA-4B21-8050-943550F3F66E} SolutionGuid = {6ED5543C-74AA-4B21-8050-943550F3F66E}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -9,6 +9,6 @@
"author": "Michal Dorner <dorner.michal@gmail.com>", "author": "Michal Dorner <dorner.michal@gmail.com>",
"license": "MIT", "license": "MIT",
"devDependencies": { "devDependencies": {
"mocha": "^11.7.5" "mocha": "^8.3.0"
} }
} }

View File

@@ -17,10 +17,9 @@ import {GolangJsonParser} from './parsers/golang-json/golang-json-parser'
import {JavaJunitParser} from './parsers/java-junit/java-junit-parser' import {JavaJunitParser} from './parsers/java-junit/java-junit-parser'
import {JestJunitParser} from './parsers/jest-junit/jest-junit-parser' import {JestJunitParser} from './parsers/jest-junit/jest-junit-parser'
import {MochaJsonParser} from './parsers/mocha-json/mocha-json-parser' import {MochaJsonParser} from './parsers/mocha-json/mocha-json-parser'
import {PhpunitJunitParser} from './parsers/phpunit-junit/phpunit-junit-parser'
import {PythonXunitParser} from './parsers/python-xunit/python-xunit-parser'
import {RspecJsonParser} from './parsers/rspec-json/rspec-json-parser' import {RspecJsonParser} from './parsers/rspec-json/rspec-json-parser'
import {SwiftXunitParser} from './parsers/swift-xunit/swift-xunit-parser' import {SwiftXunitParser} from './parsers/swift-xunit/swift-xunit-parser'
import {normalizeDirPath, normalizeFilePath} from './utils/path-utils' import {normalizeDirPath, normalizeFilePath} from './utils/path-utils'
import {getCheckRunContext} from './utils/github-utils' import {getCheckRunContext} from './utils/github-utils'
@@ -50,7 +49,6 @@ class TestReporter {
readonly useActionsSummary = core.getInput('use-actions-summary', {required: false}) === 'true' readonly useActionsSummary = core.getInput('use-actions-summary', {required: false}) === 'true'
readonly badgeTitle = core.getInput('badge-title', {required: false}) readonly badgeTitle = core.getInput('badge-title', {required: false})
readonly reportTitle = core.getInput('report-title', {required: false}) readonly reportTitle = core.getInput('report-title', {required: false})
readonly collapsed = core.getInput('collapsed', {required: false}) as 'auto' | 'always' | 'never'
readonly token = core.getInput('token', {required: true}) readonly token = core.getInput('token', {required: true})
readonly octokit: InstanceType<typeof GitHub> readonly octokit: InstanceType<typeof GitHub>
readonly context = getCheckRunContext() readonly context = getCheckRunContext()
@@ -68,11 +66,6 @@ class TestReporter {
return return
} }
if (this.collapsed !== 'auto' && this.collapsed !== 'always' && this.collapsed !== 'never') {
core.setFailed(`Input parameter 'collapsed' has invalid value`)
return
}
if (isNaN(this.maxAnnotations) || this.maxAnnotations < 0 || this.maxAnnotations > 50) { if (isNaN(this.maxAnnotations) || this.maxAnnotations < 0 || this.maxAnnotations > 50) {
core.setFailed(`Input parameter 'max-annotations' has invalid value`) core.setFailed(`Input parameter 'max-annotations' has invalid value`)
return return
@@ -173,29 +166,19 @@ class TestReporter {
} }
} }
const {listSuites, listTests, onlySummary, useActionsSummary, badgeTitle, reportTitle, collapsed} = this const {listSuites, listTests, onlySummary, useActionsSummary, badgeTitle, reportTitle} = this
const passed = results.reduce((sum, tr) => sum + tr.passed, 0)
const failed = results.reduce((sum, tr) => sum + tr.failed, 0)
const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0)
const shortSummary = `${passed} passed, ${failed} failed and ${skipped} skipped `
let baseUrl = '' let baseUrl = ''
if (this.useActionsSummary) { if (this.useActionsSummary) {
const summary = getReport( const summary = getReport(results, {
results, listSuites,
{ listTests,
listSuites, baseUrl,
listTests, onlySummary,
baseUrl, useActionsSummary,
onlySummary, badgeTitle,
useActionsSummary, reportTitle
badgeTitle, })
reportTitle,
collapsed
},
shortSummary
)
core.info('Summary content:') core.info('Summary content:')
core.info(summary) core.info(summary)
@@ -222,8 +205,7 @@ class TestReporter {
onlySummary, onlySummary,
useActionsSummary, useActionsSummary,
badgeTitle, badgeTitle,
reportTitle, reportTitle
collapsed
}) })
core.info('Creating annotations') core.info('Creating annotations')
@@ -232,6 +214,11 @@ class TestReporter {
const isFailed = this.failOnError && results.some(tr => tr.result === 'failed') const isFailed = this.failOnError && results.some(tr => tr.result === 'failed')
const conclusion = isFailed ? 'failure' : 'success' const conclusion = isFailed ? 'failure' : 'success'
const passed = results.reduce((sum, tr) => sum + tr.passed, 0)
const failed = results.reduce((sum, tr) => sum + tr.failed, 0)
const skipped = results.reduce((sum, tr) => sum + tr.skipped, 0)
const shortSummary = `${passed} passed, ${failed} failed and ${skipped} skipped `
core.info(`Updating check run conclusion (${conclusion}) and output`) core.info(`Updating check run conclusion (${conclusion}) and output`)
const resp = await this.octokit.rest.checks.update({ const resp = await this.octokit.rest.checks.update({
check_run_id: createResp.data.id, check_run_id: createResp.data.id,
@@ -272,10 +259,6 @@ class TestReporter {
return new JestJunitParser(options) return new JestJunitParser(options)
case 'mocha-json': case 'mocha-json':
return new MochaJsonParser(options) return new MochaJsonParser(options)
case 'phpunit-junit':
return new PhpunitJunitParser(options)
case 'python-xunit':
return new PythonXunitParser(options)
case 'rspec-json': case 'rspec-json':
return new RspecJsonParser(options) return new RspecJsonParser(options)
case 'swift-xunit': case 'swift-xunit':

View File

@@ -242,13 +242,13 @@ export class DartJsonParser implements TestParser {
private getRelativePath(path: string): string { private getRelativePath(path: string): string {
const prefix = 'file://' const prefix = 'file://'
if (path.startsWith(prefix)) { if (path.startsWith(prefix)) {
path = path.substring(prefix.length) path = path.substr(prefix.length)
} }
path = normalizeFilePath(path) path = normalizeFilePath(path)
const workDir = this.getWorkDir(path) const workDir = this.getWorkDir(path)
if (workDir !== undefined && path.startsWith(workDir)) { if (workDir !== undefined && path.startsWith(workDir)) {
path = path.substring(workDir.length) path = path.substr(workDir.length)
} }
return path return path
} }

View File

@@ -77,13 +77,13 @@ export class DotnetNunitParser implements TestParser {
.join('.') .join('.')
const groupName = suitesWithoutTheories[suitesWithoutTheories.length - 1].$.name const groupName = suitesWithoutTheories[suitesWithoutTheories.length - 1].$.name
let existingSuite = result.find(suite => suite.name === suiteName) let existingSuite = result.find(existingSuite => existingSuite.name === suiteName)
if (existingSuite === undefined) { if (existingSuite === undefined) {
existingSuite = new TestSuiteResult(suiteName, []) existingSuite = new TestSuiteResult(suiteName, [])
result.push(existingSuite) result.push(existingSuite)
} }
let existingGroup = existingSuite.groups.find(group => group.name === groupName) let existingGroup = existingSuite.groups.find(existingGroup => existingGroup.name === groupName)
if (existingGroup === undefined) { if (existingGroup === undefined) {
existingGroup = new TestGroupResult(groupName, []) existingGroup = new TestGroupResult(groupName, [])
existingSuite.groups.push(existingGroup) existingSuite.groups.push(existingGroup)
@@ -136,7 +136,7 @@ export class DotnetNunitParser implements TestParser {
path = normalizeFilePath(path) path = normalizeFilePath(path)
const workDir = this.getWorkDir(path) const workDir = this.getWorkDir(path)
if (workDir !== undefined && path.startsWith(workDir)) { if (workDir !== undefined && path.startsWith(workDir)) {
path = path.substring(workDir.length) path = path.substr(workDir.length)
} }
return path return path
} }

View File

@@ -81,7 +81,7 @@ export class DotnetTrxParser implements TestParser {
const testClasses: {[name: string]: TestClass} = {} const testClasses: {[name: string]: TestClass} = {}
for (const r of unitTestsResults) { for (const r of unitTestsResults) {
const className = r.test.TestMethod[0].$.className ?? "Unclassified" const className = r.test.TestMethod[0].$.className
let tc = testClasses[className] let tc = testClasses[className]
if (tc === undefined) { if (tc === undefined) {
tc = new TestClass(className) tc = new TestClass(className)
@@ -94,7 +94,7 @@ export class DotnetTrxParser implements TestParser {
const resultTestName = r.result.$.testName const resultTestName = r.result.$.testName
const testName = const testName =
resultTestName.startsWith(className) && resultTestName[className.length] === '.' resultTestName.startsWith(className) && resultTestName[className.length] === '.'
? resultTestName.substring(className.length + 1) ? resultTestName.substr(className.length + 1)
: resultTestName : resultTestName
const test = new Test(testName, r.result.$.outcome, duration, error) const test = new Test(testName, r.result.$.outcome, duration, error)
@@ -146,8 +146,8 @@ export class DotnetTrxParser implements TestParser {
return undefined return undefined
} }
const message = test.error.Message[0]
const stackTrace = test.error.StackTrace[0] const stackTrace = test.error.StackTrace[0]
const message = `${test.error.Message[0]}\n${stackTrace}`
let path let path
let line let line
@@ -161,7 +161,7 @@ export class DotnetTrxParser implements TestParser {
path, path,
line, line,
message, message,
details: `${message}` details: `${message}\n${stackTrace}`
} }
} }
@@ -177,7 +177,7 @@ export class DotnetTrxParser implements TestParser {
const filePath = normalizeFilePath(fileStr) const filePath = normalizeFilePath(fileStr)
const workDir = this.getWorkDir(filePath) const workDir = this.getWorkDir(filePath)
if (workDir) { if (workDir) {
const file = filePath.substring(workDir.length) const file = filePath.substr(workDir.length)
if (trackedFiles.includes(file)) { if (trackedFiles.includes(file)) {
const line = parseInt(lineStr) const line = parseInt(lineStr)
return {path: file, line} return {path: file, line}

View File

@@ -106,7 +106,7 @@ export class JestJunitParser implements TestParser {
path = normalizeFilePath(path) path = normalizeFilePath(path)
const workDir = this.getWorkDir(path) const workDir = this.getWorkDir(path)
if (workDir !== undefined && path.startsWith(workDir)) { if (workDir !== undefined && path.startsWith(workDir)) {
path = path.substring(workDir.length) path = path.substr(workDir.length)
} }
return path return path
} }

View File

@@ -61,7 +61,7 @@ export class MochaJsonParser implements TestParser {
private processTest(suite: TestSuiteResult, test: MochaJsonTest, result: TestExecutionResult): void { private processTest(suite: TestSuiteResult, test: MochaJsonTest, result: TestExecutionResult): void {
const groupName = const groupName =
test.fullTitle !== test.title test.fullTitle !== test.title
? test.fullTitle.substring(0, test.fullTitle.length - test.title.length).trimEnd() ? test.fullTitle.substr(0, test.fullTitle.length - test.title.length).trimEnd()
: null : null
let group = suite.groups.find(grp => grp.name === groupName) let group = suite.groups.find(grp => grp.name === groupName)
@@ -103,7 +103,7 @@ export class MochaJsonParser implements TestParser {
path = normalizeFilePath(path) path = normalizeFilePath(path)
const workDir = this.getWorkDir(path) const workDir = this.getWorkDir(path)
if (workDir !== undefined && path.startsWith(workDir)) { if (workDir !== undefined && path.startsWith(workDir)) {
path = path.substring(workDir.length) path = path.substr(workDir.length)
} }
return path return path
} }

View File

@@ -1,258 +0,0 @@
import {ParseOptions, TestParser} from '../../test-parser'
import {parseStringPromise} from 'xml2js'
import {PhpunitReport, SingleSuiteReport, TestCase, TestSuite} from './phpunit-junit-types'
import {getBasePath, normalizeFilePath} from '../../utils/path-utils'
import {
TestExecutionResult,
TestRunResult,
TestSuiteResult,
TestGroupResult,
TestCaseResult,
TestCaseError
} from '../../test-results'
export class PhpunitJunitParser implements TestParser {
readonly trackedFiles: Set<string>
readonly trackedFilesList: string[]
private assumedWorkDir: string | undefined
constructor(readonly options: ParseOptions) {
this.trackedFilesList = options.trackedFiles.map(f => normalizeFilePath(f))
this.trackedFiles = new Set(this.trackedFilesList)
}
async parse(filePath: string, content: string): Promise<TestRunResult> {
const reportOrSuite = await this.getPhpunitReport(filePath, content)
const isReport = (reportOrSuite as PhpunitReport).testsuites !== undefined
// XML might contain:
// - multiple suites under <testsuites> root node
// - single <testsuite> as root node
let report: PhpunitReport
if (isReport) {
report = reportOrSuite as PhpunitReport
} else {
// Make it behave the same way as if suite was inside <testsuites> root node
const suite = (reportOrSuite as SingleSuiteReport).testsuite
report = {
testsuites: {
$: {time: suite.$.time},
testsuite: [suite]
}
}
}
return this.getTestRunResult(filePath, report)
}
private async getPhpunitReport(filePath: string, content: string): Promise<PhpunitReport | SingleSuiteReport> {
try {
return await parseStringPromise(content)
} catch (e) {
throw new Error(`Invalid XML at ${filePath}\n\n${e}`)
}
}
private getTestRunResult(filePath: string, report: PhpunitReport): TestRunResult {
const suites: TestSuiteResult[] = []
this.collectSuites(suites, report.testsuites.testsuite ?? [])
const seconds = parseFloat(report.testsuites.$?.time ?? '')
const time = isNaN(seconds) ? undefined : seconds * 1000
return new TestRunResult(filePath, suites, time)
}
private collectSuites(results: TestSuiteResult[], testsuites: TestSuite[]): void {
for (const ts of testsuites) {
// Recursively process nested test suites first (depth-first)
if (ts.testsuite) {
this.collectSuites(results, ts.testsuite)
}
// Only add suites that have direct test cases
// This avoids adding container suites that only hold nested suites
if (ts.testcase && ts.testcase.length > 0) {
const name = ts.$.name.trim()
const time = parseFloat(ts.$.time) * 1000
results.push(new TestSuiteResult(name, this.getGroups(ts), time))
}
}
}
private getGroups(suite: TestSuite): TestGroupResult[] {
if (!suite.testcase || suite.testcase.length === 0) {
return []
}
const groups: {name: string; tests: TestCase[]}[] = []
for (const tc of suite.testcase) {
// Use classname (PHPUnit style) for grouping
// If classname matches suite name, use empty string to avoid redundancy
const className = tc.$.classname ?? tc.$.class ?? ''
const groupName = className === suite.$.name ? '' : className
let grp = groups.find(g => g.name === groupName)
if (grp === undefined) {
grp = {name: groupName, tests: []}
groups.push(grp)
}
grp.tests.push(tc)
}
return groups.map(grp => {
const tests = grp.tests.map(tc => {
const name = tc.$.name.trim()
const result = this.getTestCaseResult(tc)
const time = parseFloat(tc.$.time) * 1000
const error = this.getTestCaseError(tc)
return new TestCaseResult(name, result, time, error)
})
return new TestGroupResult(grp.name, tests)
})
}
private getTestCaseResult(test: TestCase): TestExecutionResult {
if (test.failure || test.error) return 'failed'
if (test.skipped) return 'skipped'
return 'success'
}
private getTestCaseError(tc: TestCase): TestCaseError | undefined {
if (!this.options.parseErrors) {
return undefined
}
// We process <error> and <failure> the same way
const failures = tc.failure ?? tc.error
if (!failures || failures.length === 0) {
return undefined
}
const failure = failures[0]
const details = typeof failure === 'string' ? failure : failure._ ?? ''
// PHPUnit provides file path directly in testcase attributes
let filePath: string | undefined
let line: number | undefined
if (tc.$.file) {
const relativePath = this.getRelativePath(tc.$.file)
if (this.trackedFiles.has(relativePath)) {
filePath = relativePath
}
if (tc.$.line) {
line = parseInt(tc.$.line)
}
}
// If file not in tracked files, try to extract from error details
if (!filePath && details) {
const extracted = this.extractFileAndLine(details)
if (extracted) {
filePath = extracted.filePath
line = extracted.line
}
}
let message: string | undefined
if (typeof failure !== 'string' && failure.$) {
message = failure.$.message
if (failure.$.type) {
message = message ? `${failure.$.type}: ${message}` : failure.$.type
}
}
return {
path: filePath,
line,
details,
message
}
}
private extractFileAndLine(details: string): {filePath: string; line: number} | undefined {
// PHPUnit stack traces typically have format: /path/to/file.php:123
const lines = details.split(/\r?\n/)
for (const str of lines) {
// Match patterns like /path/to/file.php:123 or at /path/to/file.php(123)
const matchColon = str.match(/((?:[A-Za-z]:)?[^\s:()]+?\.(?:php|phpt)):(\d+)/)
if (matchColon) {
const relativePath = this.getRelativePath(matchColon[1])
if (this.trackedFiles.has(relativePath)) {
return {filePath: relativePath, line: parseInt(matchColon[2])}
}
}
const matchParen = str.match(/((?:[A-Za-z]:)?[^\s:()]+?\.(?:php|phpt))\((\d+)\)/)
if (matchParen) {
const relativePath = this.getRelativePath(matchParen[1])
if (this.trackedFiles.has(relativePath)) {
return {filePath: relativePath, line: parseInt(matchParen[2])}
}
}
}
return undefined
}
/**
* Converts an absolute file path to a relative path by stripping the working directory prefix.
*
* @param path - The absolute file path from PHPUnit output (e.g., `/home/runner/work/repo/src/Test.php`)
* @returns The relative path (e.g., `src/Test.php`) if a working directory can be determined,
* otherwise returns the normalized original path
*/
private getRelativePath(path: string): string {
path = normalizeFilePath(path)
const workDir = this.getWorkDir(path)
if (workDir !== undefined && path.startsWith(workDir)) {
path = path.substring(workDir.length)
}
return path
}
/**
* Determines the working directory prefix to strip from absolute file paths.
*
* The working directory is resolved using the following priority:
*
* 1. **Explicit configuration** - If `options.workDir` is set, it takes precedence.
* This allows users to explicitly specify the working directory.
*
* 2. **Cached assumption** - If we've previously determined a working directory
* (`assumedWorkDir`) and the current path starts with it, we reuse that value.
* This avoids redundant computation for subsequent paths.
*
* 3. **Heuristic detection** - Uses `getBasePath()` to find the common prefix between
* the absolute path and the list of tracked files in the repository. For example:
* - Absolute path: `/home/runner/work/repo/src/Test.php`
* - Tracked file: `src/Test.php`
* - Detected workDir: `/home/runner/work/repo/`
*
* Once detected, the working directory is cached in `assumedWorkDir` for efficiency.
*
* @param path - The normalized absolute file path to analyze
* @returns The working directory prefix (with trailing slash), or `undefined` if it cannot be determined
*
* @example
* // With tracked file 'src/Foo.php' and path '/home/runner/work/repo/src/Foo.php'
* // Returns: '/home/runner/work/repo/'
*/
private getWorkDir(path: string): string | undefined {
if (this.options.workDir) {
return this.options.workDir
}
if (this.assumedWorkDir && path.startsWith(this.assumedWorkDir)) {
return this.assumedWorkDir
}
const basePath = getBasePath(path, this.trackedFilesList)
if (basePath !== undefined) {
this.assumedWorkDir = basePath
}
return basePath
}
}

View File

@@ -1,52 +0,0 @@
export interface PhpunitReport {
testsuites: TestSuites
}
export interface SingleSuiteReport {
testsuite: TestSuite
}
export interface TestSuites {
$?: {
time?: string
}
testsuite?: TestSuite[]
}
export interface TestSuite {
$: {
name: string
tests?: string
assertions?: string
errors?: string
failures?: string
skipped?: string
time: string
file?: string
}
testcase?: TestCase[]
testsuite?: TestSuite[]
}
export interface TestCase {
$: {
name: string
class?: string
classname?: string
file?: string
line?: string
assertions?: string
time: string
}
failure?: Failure[]
error?: Failure[]
skipped?: string[]
}
export interface Failure {
_: string
$?: {
type?: string
message?: string
}
}

View File

@@ -1,8 +0,0 @@
import {ParseOptions} from '../../test-parser'
import {JavaJunitParser} from '../java-junit/java-junit-parser'
export class PythonXunitParser extends JavaJunitParser {
constructor(readonly options: ParseOptions) {
super(options)
}
}

View File

@@ -55,7 +55,7 @@ export class RspecJsonParser implements TestParser {
private processTest(suite: TestSuiteResult, test: RspecExample, result: TestExecutionResult): void { private processTest(suite: TestSuiteResult, test: RspecExample, result: TestExecutionResult): void {
const groupName = const groupName =
test.full_description !== test.description test.full_description !== test.description
? test.full_description.substring(0, test.full_description.length - test.description.length).trimEnd() ? test.full_description.substr(0, test.full_description.length - test.description.length).trimEnd()
: null : null
let group = suite.groups.find(grp => grp.name === groupName) let group = suite.groups.find(grp => grp.name === groupName)

View File

@@ -16,7 +16,6 @@ export interface ReportOptions {
useActionsSummary: boolean useActionsSummary: boolean
badgeTitle: string badgeTitle: string
reportTitle: string reportTitle: string
collapsed: 'auto' | 'always' | 'never'
} }
export const DEFAULT_OPTIONS: ReportOptions = { export const DEFAULT_OPTIONS: ReportOptions = {
@@ -26,19 +25,16 @@ export const DEFAULT_OPTIONS: ReportOptions = {
onlySummary: false, onlySummary: false,
useActionsSummary: true, useActionsSummary: true,
badgeTitle: 'tests', badgeTitle: 'tests',
reportTitle: '', reportTitle: ''
collapsed: 'auto'
} }
export function getReport( export function getReport(results: TestRunResult[], options: ReportOptions = DEFAULT_OPTIONS): string {
results: TestRunResult[], core.info('Generating check run summary')
options: ReportOptions = DEFAULT_OPTIONS,
shortSummary = ''
): string {
applySort(results) applySort(results)
const opts = {...options} const opts = {...options}
let lines = renderReport(results, opts, shortSummary) let lines = renderReport(results, opts)
let report = lines.join('\n') let report = lines.join('\n')
if (getByteLength(report) <= getMaxReportLength(options)) { if (getByteLength(report) <= getMaxReportLength(options)) {
@@ -48,7 +44,7 @@ export function getReport(
if (opts.listTests === 'all') { if (opts.listTests === 'all') {
core.info("Test report summary is too big - setting 'listTests' to 'failed'") core.info("Test report summary is too big - setting 'listTests' to 'failed'")
opts.listTests = 'failed' opts.listTests = 'failed'
lines = renderReport(results, opts, shortSummary) lines = renderReport(results, opts)
report = lines.join('\n') report = lines.join('\n')
if (getByteLength(report) <= getMaxReportLength(options)) { if (getByteLength(report) <= getMaxReportLength(options)) {
return report return report
@@ -105,7 +101,7 @@ function getByteLength(text: string): number {
return Buffer.byteLength(text, 'utf8') return Buffer.byteLength(text, 'utf8')
} }
function renderReport(results: TestRunResult[], options: ReportOptions, shortSummary: string): string[] { function renderReport(results: TestRunResult[], options: ReportOptions): string[] {
const sections: string[] = [] const sections: string[] = []
const reportTitle: string = options.reportTitle.trim() const reportTitle: string = options.reportTitle.trim()
@@ -113,10 +109,6 @@ function renderReport(results: TestRunResult[], options: ReportOptions, shortSum
sections.push(`# ${reportTitle}`) sections.push(`# ${reportTitle}`)
} }
if (shortSummary) {
sections.push(`## ${shortSummary}`)
}
const badge = getReportBadge(results, options) const badge = getReportBadge(results, options)
sections.push(badge) sections.push(badge)
@@ -133,7 +125,7 @@ function getReportBadge(results: TestRunResult[], options: ReportOptions): strin
return getBadge(passed, failed, skipped, options) return getBadge(passed, failed, skipped, options)
} }
export function getBadge(passed: number, failed: number, skipped: number, options: ReportOptions): string { function getBadge(passed: number, failed: number, skipped: number, options: ReportOptions): string {
const text = [] const text = []
if (passed > 0) { if (passed > 0) {
text.push(`${passed} passed`) text.push(`${passed} passed`)
@@ -153,37 +145,28 @@ export function getBadge(passed: number, failed: number, skipped: number, option
color = 'yellow' color = 'yellow'
} }
const hint = failed > 0 ? 'Tests failed' : 'Tests passed successfully' const hint = failed > 0 ? 'Tests failed' : 'Tests passed successfully'
const encodedBadgeTitle = encodeImgShieldsURIComponent(options.badgeTitle) const uri = encodeURIComponent(`${options.badgeTitle}-${message}-${color}`)
const encodedMessage = encodeImgShieldsURIComponent(message) return `![${hint}](https://img.shields.io/badge/${uri})`
const encodedColor = encodeImgShieldsURIComponent(color)
return `![${hint}](https://img.shields.io/badge/${encodedBadgeTitle}-${encodedMessage}-${encodedColor})`
} }
function getTestRunsReport(testRuns: TestRunResult[], options: ReportOptions): string[] { function getTestRunsReport(testRuns: TestRunResult[], options: ReportOptions): string[] {
const sections: string[] = [] const sections: string[] = []
const totalFailed = testRuns.reduce((sum, tr) => sum + tr.failed, 0) const totalFailed = testRuns.reduce((sum, tr) => sum + tr.failed, 0)
if (totalFailed === 0) {
// Determine if report should be collapsed based on collapsed option
const shouldCollapse = options.collapsed === 'always' || (options.collapsed === 'auto' && totalFailed === 0)
if (shouldCollapse) {
sections.push(`<details><summary>Expand for details</summary>`) sections.push(`<details><summary>Expand for details</summary>`)
sections.push(` `) sections.push(` `)
} }
if (testRuns.length > 0 || options.onlySummary) { if (testRuns.length > 0 || options.onlySummary) {
const tableData = testRuns const tableData = testRuns
.map((tr, originalIndex) => ({tr, originalIndex})) .filter(tr => tr.passed > 0 || tr.failed > 0 || tr.skipped > 0)
.filter(({tr}) => tr.passed > 0 || tr.failed > 0 || tr.skipped > 0) .map(tr => {
.map(({tr, originalIndex}) => {
const time = formatTime(tr.time) const time = formatTime(tr.time)
const name = tr.path const name = tr.path
const addr = options.baseUrl + makeRunSlug(originalIndex, options).link
const nameLink = link(name, addr)
const passed = tr.passed > 0 ? `${tr.passed} ${Icon.success}` : '' const passed = tr.passed > 0 ? `${tr.passed} ${Icon.success}` : ''
const failed = tr.failed > 0 ? `${tr.failed} ${Icon.fail}` : '' const failed = tr.failed > 0 ? `${tr.failed} ${Icon.fail}` : ''
const skipped = tr.skipped > 0 ? `${tr.skipped} ${Icon.skip}` : '' const skipped = tr.skipped > 0 ? `${tr.skipped} ${Icon.skip}` : ''
return [nameLink, passed, failed, skipped, time] return [name, passed, failed, skipped, time]
}) })
const resultsTable = table( const resultsTable = table(
@@ -199,7 +182,7 @@ function getTestRunsReport(testRuns: TestRunResult[], options: ReportOptions): s
sections.push(...suitesReports) sections.push(...suitesReports)
} }
if (shouldCollapse) { if (totalFailed === 0) {
sections.push(`</details>`) sections.push(`</details>`)
} }
return sections return sections
@@ -277,9 +260,6 @@ function getTestsReport(ts: TestSuiteResult, runIndex: number, suiteIndex: numbe
} }
const space = grp.name ? ' ' : '' const space = grp.name ? ' ' : ''
for (const tc of grp.tests) { for (const tc of grp.tests) {
if (options.listTests === 'failed' && tc.result !== 'failed') {
continue
}
const result = getResultIcon(tc.result) const result = getResultIcon(tc.result)
sections.push(`${space}${result} ${tc.name}`) sections.push(`${space}${result} ${tc.name}`)
if (tc.error) { if (tc.error) {
@@ -319,7 +299,3 @@ function getResultIcon(result: TestExecutionResult): string {
return '' return ''
} }
} }
function encodeImgShieldsURIComponent(component: string): string {
return encodeURIComponent(component).replace(/-/g, '--').replace(/_/g, '__')
}

View File

@@ -36,7 +36,7 @@ export function ellipsis(text: string, maxLength: number): string {
return text return text
} }
return text.substring(0, maxLength - 3) + '...' return text.substr(0, maxLength - 3) + '...'
} }
export function formatTime(ms: number): string { export function formatTime(ms: number): string {

View File

@@ -34,6 +34,6 @@ export function getBasePath(path: string, trackedFiles: string[]): string | unde
return undefined return undefined
} }
const base = path.substring(0, path.length - max.length) const base = path.substr(0, path.length - max.length)
return base return base
} }