Compare commits

...

36 Commits

Author SHA1 Message Date
Jozef Izso
055bc8c025 Rebuild the dist/index.js file 2025-12-29 15:06:29 +01:00
Jozef Izso
17c900ba4e Use String.substring() function instead of the deprecated String.substr() 2025-12-29 15:06:16 +01:00
Jozef Izso
ff2d13cc36 Merge pull request #422 from mbeccati/phpunit-support 2025-12-29 14:59:02 +01:00
Jozef Izso
20823bb69a Merge pull request #701 from dorny/feature/junit-xml-samples 2025-12-29 14:56:09 +01:00
Jozef Izso
0be3971fec Rebuild the dist/index.js file 2025-12-29 14:41:25 +01:00
Jozef Izso
4ee97617f7 Document the behavior of getRelativePath() and getWorkDir() functions
Co-Authored-By: Claude Code <noreply@anthropic.com>
2025-12-29 14:36:48 +01:00
Jozef Izso
a97700c53c Include tests for parsing files names and line numbers in the PhpunitJunitParser
Co-Authored-By: Codex <codex@openai.com>
2025-12-29 14:25:10 +01:00
Jozef Izso
837045e72b Add sample files from PHPUnit results in JUnit XML format
Co-Authored-By: Claude Code <noreply@anthropic.com>
2025-12-29 13:58:55 +01:00
Jozef Izso
d1de4d5f06 Support for the PHPUnit dialect of JUnit
Refactor PHPUnit support into separate phpunit-junit parser

Instead of modifying the Java JUnit parser, this creates a dedicated
PHPUnit parser that properly handles PHPUnit's nested testsuite elements.
This keeps the parsers cleanly separated and allows for future PHPUnit-
specific features.

Co-Authored-By: Matteo Beccati <matteo@beccati.com>
Co-Authored-By: Claude Code <noreply@anthropic.com>
2025-12-29 13:58:55 +01:00
Jozef Izso
f24c625f56 Create tests for sample JUnit files
Source: https://github.com/testmoapp/junitxml/
2025-12-27 00:05:09 +01:00
Jozef Izso
ee446707ff Merge pull request #692 from dorny/release/v2.3.0 2025-11-30 01:52:48 +01:00
Jozef Izso
fe45e95373 test-reporter release v2.3.0 2025-11-30 01:49:30 +01:00
Jozef Izso
e40a1da745 Merge pull request #682 from dorny/dependabot/npm_and_yarn/reports/mocha/multi-f14266366f 2025-11-30 01:01:42 +01:00
dependabot[bot]
3445860437 Bump js-yaml and mocha in /reports/mocha
Bumps [js-yaml](https://github.com/nodeca/js-yaml) to 4.1.1 and updates ancestor dependency [mocha](https://github.com/mochajs/mocha). These dependencies need to be updated together.


Updates `js-yaml` from 4.0.0 to 4.1.1
- [Changelog](https://github.com/nodeca/js-yaml/blob/master/CHANGELOG.md)
- [Commits](https://github.com/nodeca/js-yaml/compare/4.0.0...4.1.1)

Updates `mocha` from 8.3.0 to 11.7.5
- [Release notes](https://github.com/mochajs/mocha/releases)
- [Changelog](https://github.com/mochajs/mocha/blob/v11.7.5/CHANGELOG.md)
- [Commits](https://github.com/mochajs/mocha/compare/v8.3.0...v11.7.5)

---
updated-dependencies:
- dependency-name: js-yaml
  dependency-version: 4.1.1
  dependency-type: indirect
- dependency-name: mocha
  dependency-version: 11.7.5
  dependency-type: direct:development
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-11-29 23:45:48 +00:00
Jozef Izso
9ef5c136b2 Merge pull request #691 from dorny/fix/complete-documentation 2025-11-30 00:40:18 +01:00
Jozef Izso
83e20c1534 Merge pull request #685 from dorny/dependabot/npm_and_yarn/reports/jest/js-yaml-3.14.2 2025-11-30 00:37:29 +01:00
Jozef Izso
4331a3b620 Clarify the dotnet-nunit docs to require NUnit3TestAdapter for nunit logger 2025-11-23 15:26:03 +01:00
Jozef Izso
04232af26f Complete documentation for all supported reporters
This commit addresses several documentation gaps to ensure all implemented
reporters are properly documented across action.yml and README.md.

Changes:
1. Updated action.yml description to include all supported languages:
   - Added: Go, Python (pytest, unittest), Ruby (RSpec), Swift

2. Added Ruby/RSpec to supported languages list in README.md

3. Added detailed documentation sections in README.md:
   - dotnet-nunit: Added section with NUnit3 XML format instructions
   - rspec-json: Added section with RSpec JSON formatter configuration

All reporters now have:
- Entry in action.yml description
- Entry in README supported languages list
- Entry in README usage documentation (reporter input)
- Detailed documentation section in README "Supported formats"
- Implementation in src/main.ts
- Tests in __tests__/

This ensures users can discover and use all available reporters without
confusion about what is supported.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-22 18:05:33 +01:00
Jozef Izso
cf146f4036 Merge pull request #690 from dorny/fix/add-golang-json-to-action-yml 2025-11-22 17:50:03 +01:00
Jozef Izso
33fc27cf09 Merge pull request #687 from dorny/dependabot/github_actions/actions/checkout-6 2025-11-22 17:49:02 +01:00
Jozef Izso
8fd5fc58ca Add missing golang-json reporter to action.yml
The golang-json reporter has been fully implemented since earlier versions
but was missing from the action.yml documentation. This made it undiscoverable
for users looking for Go test support.

Changes:
- Added golang-json to the list of supported reporters in action.yml

This aligns the action.yml with:
- The actual implementation in src/main.ts (lines 264-265)
- The README.md documentation (line 145)
- The existing parser and tests

Fixes #689

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-22 17:47:11 +01:00
dependabot[bot]
fc80cb4400 Bump actions/checkout from 5 to 6
Bumps [actions/checkout](https://github.com/actions/checkout) from 5 to 6.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v5...v6)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: '6'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-11-21 23:07:16 +00:00
dependabot[bot]
79ea6a9d0e Bump js-yaml from 3.14.0 to 3.14.2 in /reports/jest
Bumps [js-yaml](https://github.com/nodeca/js-yaml) from 3.14.0 to 3.14.2.
- [Changelog](https://github.com/nodeca/js-yaml/blob/master/CHANGELOG.md)
- [Commits](https://github.com/nodeca/js-yaml/compare/3.14.0...3.14.2)

---
updated-dependencies:
- dependency-name: js-yaml
  dependency-version: 3.14.2
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-11-18 19:47:48 +00:00
Jozef Izso
aef3d726a6 Merge pull request #683 from micmarc/feature/python-pytest 2025-11-15 18:19:24 +01:00
Michael Marcus
c1a56edcfe Enhance pytest support
Add robust test schema for pytest report
Update README with sample pytest command
2025-11-15 11:55:41 -05:00
Jozef Izso
3b9dad208e Merge pull request #681 from phactum-mnestler/main
Update sax.js to fix large XML file parsing #681
2025-11-15 11:24:15 +01:00
Jozef Izso
7c636a991c Merge pull request #643 from micmarc/feature/python-support 2025-11-15 11:12:45 +01:00
Michael Nestler
cfce4bda71 Add saxjs to version overrides 2025-11-15 11:07:56 +01:00
Michael Marcus
fe87682515 Improve testing with robust schema for unittest report 2025-11-14 21:59:25 -05:00
Michael Marcus
9b8d3b002e Python support
Add python-xunit-parser.ts with associated case statement
Add python-xunit to reporter docs in action.yml
Add tests
Update README

Resolves #244
Resolves #633
2025-11-14 16:29:58 -05:00
Jozef Izso
e2f0ff6339 Merge pull request #645 from micmarc/fix/report-title-short-summary 2025-11-14 20:00:35 +01:00
Jozef Izso
bc8c29617e test-reporter release v2.2.0
Merge pull request #679 from dorny/release/v2.2.0
2025-11-14 18:46:03 +01:00
Michael Marcus
9aef9d168f Remove info log 2025-11-14 12:01:42 -05:00
Michael Marcus
6b64465c34 Rebuild index.js after rebase from main 2025-11-14 11:59:46 -05:00
Michael Marcus
6617053f9c Fix short summary formatting when a report title is present 2025-11-14 11:58:16 -05:00
Michael Nestler
43a747d94c Update sax.js to fix large XML file parsing 2025-11-14 16:06:35 +01:00
50 changed files with 7590 additions and 2760 deletions

View File

@@ -21,7 +21,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v6
- name: Set Node.js - name: Set Node.js
uses: actions/setup-node@v6 uses: actions/setup-node@v6

View File

@@ -13,7 +13,7 @@ jobs:
name: Build & Test name: Build & Test
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v6
- uses: actions/setup-node@v6 - uses: actions/setup-node@v6
with: with:
node-version-file: '.nvmrc' node-version-file: '.nvmrc'

View File

@@ -8,7 +8,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v6
- run: npm ci - run: npm ci
- run: npm run build - run: npm run build
- run: npm test - run: npm test

View File

@@ -11,7 +11,7 @@ jobs:
name: Workflow test name: Workflow test
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v6
- uses: ./ - uses: ./
with: with:
artifact: test-results artifact: test-results

View File

@@ -1,5 +1,12 @@
# Changelog # Changelog
## 2.3.0
* Feature: Add Python support with `python-xunit` reporter (pytest) https://github.com/dorny/test-reporter/pull/643
* Feature: Add pytest traceback parsing and `directory-mapping` option https://github.com/dorny/test-reporter/pull/238
* Performance: Update sax.js to fix large XML file parsing https://github.com/dorny/test-reporter/pull/681
* Documentation: Complete documentation for all supported reporters https://github.com/dorny/test-reporter/pull/691
* Security: Bump js-yaml and mocha in /reports/mocha (fixes prototype pollution) https://github.com/dorny/test-reporter/pull/682
## 2.2.0 ## 2.2.0
* Feature: Add collapsed option to control report summary visibility https://github.com/dorny/test-reporter/pull/664 * Feature: Add collapsed option to control report summary visibility https://github.com/dorny/test-reporter/pull/664
* Fix badge encoding for values including underscore and hyphens https://github.com/dorny/test-reporter/pull/672 * Fix badge encoding for values including underscore and hyphens https://github.com/dorny/test-reporter/pull/672

View File

@@ -19,6 +19,9 @@ This [Github Action](https://github.com/features/actions) displays test results
- Go / [go test](https://pkg.go.dev/testing) - Go / [go test](https://pkg.go.dev/testing)
- Java / [JUnit](https://junit.org/) - Java / [JUnit](https://junit.org/)
- JavaScript / [JEST](https://jestjs.io/) / [Mocha](https://mochajs.org/) - JavaScript / [JEST](https://jestjs.io/) / [Mocha](https://mochajs.org/)
- Python / [pytest](https://docs.pytest.org/en/stable/) / [unittest](https://docs.python.org/3/library/unittest.html)
- PHP / [PHPUnit](https://phpunit.de/)
- Ruby / [RSpec](https://rspec.info/)
- Swift / xUnit - Swift / xUnit
For more information see [Supported formats](#supported-formats) section. For more information see [Supported formats](#supported-formats) section.
@@ -145,7 +148,10 @@ jobs:
# java-junit # java-junit
# jest-junit # jest-junit
# mocha-json # mocha-json
# phpunit-junit
# python-xunit
# rspec-json # rspec-json
# swift-xunit
reporter: '' reporter: ''
# Allows you to generate only the summary. # Allows you to generate only the summary.
@@ -253,6 +259,20 @@ Supported testing frameworks:
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples) For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
</details> </details>
<details>
<summary>dotnet-nunit</summary>
Test execution must be configured to generate [NUnit3](https://docs.nunit.org/articles/nunit/technical-notes/usage/Test-Result-XML-Format.html) XML test results.
Install the [NUnit3TestAdapter](https://www.nuget.org/packages/NUnit3TestAdapter) package (required; it registers the `nunit` logger for `dotnet test`), then run tests with:
`dotnet test --logger "nunit;LogFileName=test-results.xml"`
Supported testing frameworks:
- [NUnit](https://nunit.org/)
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
</details>
<details> <details>
<summary>flutter-json</summary> <summary>flutter-json</summary>
@@ -296,6 +316,14 @@ This is due to the fact Java stack traces don't contain a full path to the sourc
Some heuristic was necessary to figure out the mapping between the line in the stack trace and an actual source file. Some heuristic was necessary to figure out the mapping between the line in the stack trace and an actual source file.
</details> </details>
<details>
<summary>phpunit-junit</summary>
[PHPUnit](https://phpunit.de/) can generate JUnit XML via CLI:
`phpunit --log-junit reports/phpunit-junit.xml`
</details>
<details> <details>
<summary>jest-junit</summary> <summary>jest-junit</summary>
@@ -349,6 +377,41 @@ Before version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0), M
Please update Mocha to version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0) or above if you encounter this issue. Please update Mocha to version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0) or above if you encounter this issue.
</details> </details>
<details>
<summary>python-xunit (Experimental)</summary>
Support for Python test results in xUnit format is experimental - should work but it was not extensively tested.
For **pytest** support, configure [JUnit XML output](https://docs.pytest.org/en/stable/how-to/output.html#creating-junitxml-format-files) and run with the `--junit-xml` option, which also lets you specify the output path for test results.
```shell
pytest --junit-xml=test-report.xml
```
For **unittest** support, use a test runner that outputs the JUnit report format, such as [unittest-xml-reporting](https://pypi.org/project/unittest-xml-reporting/).
</details>
<details>
<summary>rspec-json</summary>
[RSpec](https://rspec.info/) testing framework support requires the usage of JSON formatter.
You can configure RSpec to output JSON format by using the `--format json` option and redirecting to a file:
```shell
rspec --format json --out rspec-results.json
```
Or configure it in `.rspec` file:
```
--format json
--out rspec-results.json
```
For more information see:
- [RSpec documentation](https://rspec.info/)
- [RSpec Formatters](https://relishapp.com/rspec/rspec-core/docs/formatters)
</details>
<details> <details>
<summary>swift-xunit (Experimental)</summary> <summary>swift-xunit (Experimental)</summary>

View File

@@ -0,0 +1,23 @@
![Tests failed](https://img.shields.io/badge/tests-5%20passed%2C%201%20failed-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/external/java/junit4-basic.xml](#user-content-r0)|5 ✅|1 ❌||16s|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/junit4-basic.xml</a>
**6** tests were completed in **16s** with **5** passed, **1** failed and **0** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[Tests.Authentication](#user-content-r0s0)|2 ✅|1 ❌||9s|
|[Tests.Registration](#user-content-r0s1)|3 ✅|||7s|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Authentication</a>
```
✅ testCase7
✅ testCase8
❌ testCase9
AssertionError: Assertion error message
```
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">Tests.Registration</a>
```
✅ testCase1
✅ testCase2
✅ testCase3
```

View File

@@ -0,0 +1,22 @@
![Tests failed](https://img.shields.io/badge/tests-5%20passed%2C%202%20failed%2C%201%20skipped-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/external/java/junit4-complete.xml](#user-content-r0)|5 ✅|2 ❌|1 ⚪|16s|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/junit4-complete.xml</a>
**8** tests were completed in **16s** with **5** passed, **2** failed and **1** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[Tests.Registration](#user-content-r0s0)|5 ✅|2 ❌|1 ⚪|16s|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Registration</a>
```
✅ testCase1
✅ testCase2
✅ testCase3
⚪ testCase4
❌ testCase5
AssertionError: Expected value did not match.
❌ testCase6
ArithmeticError: Division by zero.
✅ testCase7
✅ testCase8
```

View File

@@ -0,0 +1,30 @@
![Tests failed](https://img.shields.io/badge/tests-8%20passed%2C%201%20failed-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/external/phpunit/junit-basic.xml](#user-content-r0)|8 ✅|1 ❌||16s|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/phpunit/junit-basic.xml</a>
**9** tests were completed in **16s** with **8** passed, **1** failed and **0** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[Tests.Authentication](#user-content-r0s0)|2 ✅|1 ❌||9s|
|[Tests.Authentication.Login](#user-content-r0s1)|3 ✅|||4s|
|[Tests.Registration](#user-content-r0s2)|3 ✅|||7s|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Authentication</a>
```
✅ testCase7
✅ testCase8
❌ testCase9
AssertionError: Assertion error message
```
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">Tests.Authentication.Login</a>
```
✅ testCase4
✅ testCase5
✅ testCase6
```
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">Tests.Registration</a>
```
✅ testCase1
✅ testCase2
✅ testCase3
```

View File

@@ -0,0 +1,88 @@
![Tests failed](https://img.shields.io/badge/tests-28%20passed%2C%202%20failed-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/external/phpunit/phpcheckstyle-phpunit.xml](#user-content-r0)|28 ✅|2 ❌||41ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/phpunit/phpcheckstyle-phpunit.xml</a>
**30** tests were completed in **41ms** with **28** passed, **2** failed and **0** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[CommentsTest](#user-content-r0s0)|3 ✅|||7ms|
|[DeprecationTest](#user-content-r0s1)|1 ✅|||1ms|
|[GoodTest](#user-content-r0s2)|4 ✅|||5ms|
|[IndentationTest](#user-content-r0s3)|8 ✅|||8ms|
|[MetricsTest](#user-content-r0s4)|1 ✅|||4ms|
|[NamingTest](#user-content-r0s5)|2 ✅|||3ms|
|[OptimizationTest](#user-content-r0s6)|1 ✅|||1ms|
|[OtherTest](#user-content-r0s7)|2 ✅|2 ❌||7ms|
|[PHPTagsTest](#user-content-r0s8)|2 ✅|||1ms|
|[ProhibitedTest](#user-content-r0s9)|1 ✅|||1ms|
|[StrictCompareTest](#user-content-r0s10)|1 ✅|||2ms|
|[UnusedTest](#user-content-r0s11)|2 ✅|||2ms|
### ✅ <a id="user-content-r0s0" href="#user-content-r0s0">CommentsTest</a>
```
✅ testGoodDoc
✅ testComments
✅ testTODOs
```
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">DeprecationTest</a>
```
✅ testDeprecations
```
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">GoodTest</a>
```
✅ testGood
✅ testDoWhile
✅ testAnonymousFunction
✅ testException
```
### ✅ <a id="user-content-r0s3" href="#user-content-r0s3">IndentationTest</a>
```
✅ testTabIndentation
✅ testSpaceIndentation
✅ testSpaceIndentationArray
✅ testGoodSpaceIndentationArray
✅ testGoodIndentationNewLine
✅ testGoodIndentationSpaces
✅ testBadSpaces
✅ testBadSpaceAfterControl
```
### ✅ <a id="user-content-r0s4" href="#user-content-r0s4">MetricsTest</a>
```
✅ testMetrics
```
### ✅ <a id="user-content-r0s5" href="#user-content-r0s5">NamingTest</a>
```
✅ testNaming
✅ testFunctionNaming
```
### ✅ <a id="user-content-r0s6" href="#user-content-r0s6">OptimizationTest</a>
```
✅ testTextAfterClosingTag
```
### ❌ <a id="user-content-r0s7" href="#user-content-r0s7">OtherTest</a>
```
❌ testOther
PHPUnit\Framework\ExpectationFailedException
❌ testException
PHPUnit\Framework\ExpectationFailedException
✅ testEmpty
✅ testSwitchCaseNeedBreak
```
### ✅ <a id="user-content-r0s8" href="#user-content-r0s8">PHPTagsTest</a>
```
✅ testTextAfterClosingTag
✅ testClosingTagNotNeeded
```
### ✅ <a id="user-content-r0s9" href="#user-content-r0s9">ProhibitedTest</a>
```
✅ testProhibited
```
### ✅ <a id="user-content-r0s10" href="#user-content-r0s10">StrictCompareTest</a>
```
✅ testStrictCompare
```
### ✅ <a id="user-content-r0s11" href="#user-content-r0s11">UnusedTest</a>
```
✅ testGoodUnused
✅ testBadUnused
```

View File

@@ -0,0 +1,41 @@
![Tests failed](https://img.shields.io/badge/tests-10%20passed%2C%202%20failed-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/phpunit/phpunit.xml](#user-content-r0)|10 ✅|2 ❌||148ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/phpunit/phpunit.xml</a>
**12** tests were completed in **148ms** with **10** passed, **2** failed and **0** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[CLI Arguments](#user-content-r0s0)||2 ❌||140ms|
|[PHPUnit\Event\CollectingDispatcherTest](#user-content-r0s1)|2 ✅|||4ms|
|[PHPUnit\Event\DeferringDispatcherTest](#user-content-r0s2)|4 ✅|||3ms|
|[PHPUnit\Event\DirectDispatcherTest](#user-content-r0s3)|4 ✅|||1ms|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">CLI Arguments</a>
```
❌ targeting-traits-with-coversclass-attribute-is-deprecated.phpt
PHPUnit\Framework\PhptAssertionFailedError
❌ targeting-traits-with-usesclass-attribute-is-deprecated.phpt
PHPUnit\Framework\PhptAssertionFailedError
```
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">PHPUnit\Event\CollectingDispatcherTest</a>
```
PHPUnit.Event.CollectingDispatcherTest
✅ testHasNoCollectedEventsWhenFlushedImmediatelyAfterCreation
✅ testCollectsDispatchedEventsUntilFlushed
```
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">PHPUnit\Event\DeferringDispatcherTest</a>
```
PHPUnit.Event.DeferringDispatcherTest
✅ testCollectsEventsUntilFlush
✅ testFlushesCollectedEvents
✅ testSubscriberCanBeRegistered
✅ testTracerCanBeRegistered
```
### ✅ <a id="user-content-r0s3" href="#user-content-r0s3">PHPUnit\Event\DirectDispatcherTest</a>
```
PHPUnit.Event.DirectDispatcherTest
✅ testDispatchesEventToKnownSubscribers
✅ testDispatchesEventToTracers
✅ testRegisterRejectsUnknownSubscriber
✅ testDispatchRejectsUnknownEventType
```

View File

@@ -0,0 +1,26 @@
![Tests failed](https://img.shields.io/badge/tests-6%20passed%2C%202%20failed%2C%202%20skipped-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/python-xunit-pytest.xml](#user-content-r0)|6 ✅|2 ❌|2 ⚪|19ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit-pytest.xml</a>
**10** tests were completed in **19ms** with **6** passed, **2** failed and **2** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[pytest](#user-content-r0s0)|6 ✅|2 ❌|2 ⚪|19ms|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">pytest</a>
```
tests.test_lib
✅ test_always_pass
✅ test_with_subtests
✅ test_parameterized[param1]
✅ test_parameterized[param2]
⚪ test_always_skip
❌ test_always_fail
assert False
⚪ test_expected_failure
❌ test_error
Exception: error
✅ test_with_record_property
custom_classname
✅ test_with_record_xml_attribute
```

View File

@@ -0,0 +1,23 @@
![Tests failed](https://img.shields.io/badge/tests-4%20passed%2C%202%20failed%2C%202%20skipped-critical)
|Report|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[fixtures/python-xunit-unittest.xml](#user-content-r0)|4 ✅|2 ❌|2 ⚪|1ms|
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit-unittest.xml</a>
**8** tests were completed in **1ms** with **4** passed, **2** failed and **2** skipped.
|Test suite|Passed|Failed|Skipped|Time|
|:---|---:|---:|---:|---:|
|[TestAcme-20251114214921](#user-content-r0s0)|4 ✅|2 ❌|2 ⚪|1ms|
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">TestAcme-20251114214921</a>
```
TestAcme
✅ test_always_pass
✅ test_parameterized_0_param1
✅ test_parameterized_1_param2
✅ test_with_subtests
❌ test_always_fail
AssertionError: failed
❌ test_error
Exception: error
⚪ test_always_skip
⚪ test_expected_failure
```

View File

@@ -6878,3 +6878,153 @@ at java.lang.Thread.run(Thread.java:748)
"totalTime": 2126531.0000000005, "totalTime": 2126531.0000000005,
} }
`; `;
exports[`java-junit tests report from testmo/junitxml basic example matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/java/junit4-basic.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase1",
"result": "success",
"time": 2113.871,
},
TestCaseResult {
"error": undefined,
"name": "testCase2",
"result": "success",
"time": 1051,
},
TestCaseResult {
"error": undefined,
"name": "testCase3",
"result": "success",
"time": 3441,
},
],
},
],
"name": "Tests.Registration",
"totalTime": 6605.870999999999,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase7",
"result": "success",
"time": 2508,
},
TestCaseResult {
"error": undefined,
"name": "testCase8",
"result": "success",
"time": 1230.8159999999998,
},
TestCaseResult {
"error": {
"details": undefined,
"line": undefined,
"message": "AssertionError: Assertion error message",
"path": undefined,
},
"name": "testCase9",
"result": "failed",
"time": 982,
},
],
},
],
"name": "Tests.Authentication",
"totalTime": 9076.816,
},
],
"totalTime": 15682.687,
}
`;
exports[`java-junit tests report from testmo/junitxml complete example matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/java/junit4-complete.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase1",
"result": "success",
"time": 2436,
},
TestCaseResult {
"error": undefined,
"name": "testCase2",
"result": "success",
"time": 1534,
},
TestCaseResult {
"error": undefined,
"name": "testCase3",
"result": "success",
"time": 822,
},
TestCaseResult {
"error": undefined,
"name": "testCase4",
"result": "skipped",
"time": 0,
},
TestCaseResult {
"error": {
"details": undefined,
"line": undefined,
"message": "AssertionError: Expected value did not match.",
"path": undefined,
},
"name": "testCase5",
"result": "failed",
"time": 2902.412,
},
TestCaseResult {
"error": {
"details": undefined,
"line": undefined,
"message": "ArithmeticError: Division by zero.",
"path": undefined,
},
"name": "testCase6",
"result": "failed",
"time": 3819,
},
TestCaseResult {
"error": undefined,
"name": "testCase7",
"result": "success",
"time": 2944,
},
TestCaseResult {
"error": undefined,
"name": "testCase8",
"result": "success",
"time": 1625.275,
},
],
},
],
"name": "Tests.Registration",
"totalTime": 16082.687,
},
],
"totalTime": 16082.687,
}
`;

View File

@@ -0,0 +1,628 @@
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`phpunit-junit tests report from junit-basic.xml matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/phpunit/junit-basic.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase1",
"result": "success",
"time": 2113.871,
},
TestCaseResult {
"error": undefined,
"name": "testCase2",
"result": "success",
"time": 1051,
},
TestCaseResult {
"error": undefined,
"name": "testCase3",
"result": "success",
"time": 3441,
},
],
},
],
"name": "Tests.Registration",
"totalTime": 6605.870999999999,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase4",
"result": "success",
"time": 2244,
},
TestCaseResult {
"error": undefined,
"name": "testCase5",
"result": "success",
"time": 781,
},
TestCaseResult {
"error": undefined,
"name": "testCase6",
"result": "success",
"time": 1331,
},
],
},
],
"name": "Tests.Authentication.Login",
"totalTime": 4356,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCase7",
"result": "success",
"time": 2508,
},
TestCaseResult {
"error": undefined,
"name": "testCase8",
"result": "success",
"time": 1230.8159999999998,
},
TestCaseResult {
"error": {
"details": "",
"line": undefined,
"message": "AssertionError: Assertion error message",
"path": undefined,
},
"name": "testCase9",
"result": "failed",
"time": 982,
},
],
},
],
"name": "Tests.Authentication",
"totalTime": 9076.816,
},
],
"totalTime": 15682.687,
}
`;
exports[`phpunit-junit tests report from phpcheckstyle-phpunit.xml matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/external/phpunit/phpcheckstyle-phpunit.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testGoodDoc",
"result": "success",
"time": 5.093,
},
TestCaseResult {
"error": undefined,
"name": "testComments",
"result": "success",
"time": 0.921,
},
TestCaseResult {
"error": undefined,
"name": "testTODOs",
"result": "success",
"time": 0.6880000000000001,
},
],
},
],
"name": "CommentsTest",
"totalTime": 6.702,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testDeprecations",
"result": "success",
"time": 0.9740000000000001,
},
],
},
],
"name": "DeprecationTest",
"totalTime": 0.9740000000000001,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testGood",
"result": "success",
"time": 2.6470000000000002,
},
TestCaseResult {
"error": undefined,
"name": "testDoWhile",
"result": "success",
"time": 1.0219999999999998,
},
TestCaseResult {
"error": undefined,
"name": "testAnonymousFunction",
"result": "success",
"time": 0.8,
},
TestCaseResult {
"error": undefined,
"name": "testException",
"result": "success",
"time": 0.888,
},
],
},
],
"name": "GoodTest",
"totalTime": 5.357,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testTabIndentation",
"result": "success",
"time": 0.857,
},
TestCaseResult {
"error": undefined,
"name": "testSpaceIndentation",
"result": "success",
"time": 0.929,
},
TestCaseResult {
"error": undefined,
"name": "testSpaceIndentationArray",
"result": "success",
"time": 0.975,
},
TestCaseResult {
"error": undefined,
"name": "testGoodSpaceIndentationArray",
"result": "success",
"time": 1.212,
},
TestCaseResult {
"error": undefined,
"name": "testGoodIndentationNewLine",
"result": "success",
"time": 0.859,
},
TestCaseResult {
"error": undefined,
"name": "testGoodIndentationSpaces",
"result": "success",
"time": 0.78,
},
TestCaseResult {
"error": undefined,
"name": "testBadSpaces",
"result": "success",
"time": 1.1199999999999999,
},
TestCaseResult {
"error": undefined,
"name": "testBadSpaceAfterControl",
"result": "success",
"time": 0.9219999999999999,
},
],
},
],
"name": "IndentationTest",
"totalTime": 7.654,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testMetrics",
"result": "success",
"time": 4.146999999999999,
},
],
},
],
"name": "MetricsTest",
"totalTime": 4.146999999999999,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testNaming",
"result": "success",
"time": 1.426,
},
TestCaseResult {
"error": undefined,
"name": "testFunctionNaming",
"result": "success",
"time": 1.271,
},
],
},
],
"name": "NamingTest",
"totalTime": 2.697,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testTextAfterClosingTag",
"result": "success",
"time": 0.9940000000000001,
},
],
},
],
"name": "OptimizationTest",
"totalTime": 0.9940000000000001,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": {
"details": "OtherTest::testOther
We expect 20 warnings
Failed asserting that 19 matches expected 20.
/workspace/phpcheckstyle/test/OtherTest.php:24",
"line": 12,
"message": "PHPUnit\\Framework\\ExpectationFailedException",
"path": undefined,
},
"name": "testOther",
"result": "failed",
"time": 5.2509999999999994,
},
TestCaseResult {
"error": {
"details": "OtherTest::testException
We expect 1 error
Failed asserting that 0 matches expected 1.
/workspace/phpcheckstyle/test/OtherTest.php:40",
"line": 31,
"message": "PHPUnit\\Framework\\ExpectationFailedException",
"path": undefined,
},
"name": "testException",
"result": "failed",
"time": 0.751,
},
TestCaseResult {
"error": undefined,
"name": "testEmpty",
"result": "success",
"time": 0.42700000000000005,
},
TestCaseResult {
"error": undefined,
"name": "testSwitchCaseNeedBreak",
"result": "success",
"time": 0.901,
},
],
},
],
"name": "OtherTest",
"totalTime": 7.329,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testTextAfterClosingTag",
"result": "success",
"time": 0.641,
},
TestCaseResult {
"error": undefined,
"name": "testClosingTagNotNeeded",
"result": "success",
"time": 0.631,
},
],
},
],
"name": "PHPTagsTest",
"totalTime": 1.272,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testProhibited",
"result": "success",
"time": 0.9380000000000001,
},
],
},
],
"name": "ProhibitedTest",
"totalTime": 0.9380000000000001,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testStrictCompare",
"result": "success",
"time": 1.578,
},
],
},
],
"name": "StrictCompareTest",
"totalTime": 1.578,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testGoodUnused",
"result": "success",
"time": 0.94,
},
TestCaseResult {
"error": undefined,
"name": "testBadUnused",
"result": "success",
"time": 0.895,
},
],
},
],
"name": "UnusedTest",
"totalTime": 1.835,
},
],
"totalTime": undefined,
}
`;
exports[`phpunit-junit tests report from phpunit test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/phpunit/phpunit.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "PHPUnit.Event.CollectingDispatcherTest",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testHasNoCollectedEventsWhenFlushedImmediatelyAfterCreation",
"result": "success",
"time": 1.441,
},
TestCaseResult {
"error": undefined,
"name": "testCollectsDispatchedEventsUntilFlushed",
"result": "success",
"time": 2.815,
},
],
},
],
"name": "PHPUnit\\Event\\CollectingDispatcherTest",
"totalTime": 4.256,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "PHPUnit.Event.DeferringDispatcherTest",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testCollectsEventsUntilFlush",
"result": "success",
"time": 1.6720000000000002,
},
TestCaseResult {
"error": undefined,
"name": "testFlushesCollectedEvents",
"result": "success",
"time": 0.661,
},
TestCaseResult {
"error": undefined,
"name": "testSubscriberCanBeRegistered",
"result": "success",
"time": 0.33399999999999996,
},
TestCaseResult {
"error": undefined,
"name": "testTracerCanBeRegistered",
"result": "success",
"time": 0.262,
},
],
},
],
"name": "PHPUnit\\Event\\DeferringDispatcherTest",
"totalTime": 2.928,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "PHPUnit.Event.DirectDispatcherTest",
"tests": [
TestCaseResult {
"error": undefined,
"name": "testDispatchesEventToKnownSubscribers",
"result": "success",
"time": 0.17,
},
TestCaseResult {
"error": undefined,
"name": "testDispatchesEventToTracers",
"result": "success",
"time": 0.248,
},
TestCaseResult {
"error": undefined,
"name": "testRegisterRejectsUnknownSubscriber",
"result": "success",
"time": 0.257,
},
TestCaseResult {
"error": undefined,
"name": "testDispatchRejectsUnknownEventType",
"result": "success",
"time": 0.11900000000000001,
},
],
},
],
"name": "PHPUnit\\Event\\DirectDispatcherTest",
"totalTime": 0.794,
},
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "",
"tests": [
TestCaseResult {
"error": {
"details": "targeting-traits-with-coversclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
--- Expected
+++ Actual
@@ @@
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
Test Runner Configured
Test Suite Loaded (1 test)
+Test Runner Triggered Warning (No code coverage driver available)
Event Facade Sealed
Test Runner Started
Test Suite Sorted
@@ @@
Test Preparation Started (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
Test Prepared (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
Test Passed (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\\TestFixture\\CoveredTrait with #[CoversClass] is deprecated, please refactor your test to use #[CoversTrait] instead.)
Test Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
Test Suite Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest, 1 test)
Test Runner Execution Finished
Test Runner Finished
-PHPUnit Finished (Shell Exit Code: 0)
+PHPUnit Finished (Shell Exit Code: 1)
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-coversclass-attribute-is-deprecated.phpt:28
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200",
"line": undefined,
"message": "PHPUnit\\Framework\\PhptAssertionFailedError",
"path": undefined,
},
"name": "targeting-traits-with-coversclass-attribute-is-deprecated.phpt",
"result": "failed",
"time": 68.151,
},
TestCaseResult {
"error": {
"details": "targeting-traits-with-usesclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
--- Expected
+++ Actual
@@ @@
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
Test Runner Configured
Test Suite Loaded (1 test)
+Test Runner Triggered Warning (No code coverage driver available)
Event Facade Sealed
Test Runner Started
Test Suite Sorted
@@ @@
Test Preparation Started (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
Test Prepared (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
Test Passed (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\\TestFixture\\CoveredTrait with #[UsesClass] is deprecated, please refactor your test to use #[UsesTrait] instead.)
Test Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
Test Suite Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest, 1 test)
Test Runner Execution Finished
Test Runner Finished
-PHPUnit Finished (Shell Exit Code: 0)
+PHPUnit Finished (Shell Exit Code: 1)
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-usesclass-attribute-is-deprecated.phpt:28
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200",
"line": undefined,
"message": "PHPUnit\\Framework\\PhptAssertionFailedError",
"path": undefined,
},
"name": "targeting-traits-with-usesclass-attribute-is-deprecated.phpt",
"result": "failed",
"time": 64.268,
},
],
},
],
"name": "CLI Arguments",
"totalTime": 140.397,
},
],
"totalTime": undefined,
}
`;

View File

@@ -0,0 +1,192 @@
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
exports[`python-xunit pytest report report from python test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/python-xunit-pytest.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "tests.test_lib",
"tests": [
TestCaseResult {
"error": undefined,
"name": "test_always_pass",
"result": "success",
"time": 2,
},
TestCaseResult {
"error": undefined,
"name": "test_with_subtests",
"result": "success",
"time": 5,
},
TestCaseResult {
"error": undefined,
"name": "test_parameterized[param1]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_parameterized[param2]",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_always_skip",
"result": "skipped",
"time": 0,
},
TestCaseResult {
"error": {
"details": "def test_always_fail():
> assert False
E assert False
tests/test_lib.py:25: AssertionError
",
"line": undefined,
"message": "assert False",
"path": undefined,
},
"name": "test_always_fail",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_expected_failure",
"result": "skipped",
"time": 0,
},
TestCaseResult {
"error": {
"details": "def test_error():
> raise Exception("error")
E Exception: error
tests/test_lib.py:32: Exception
",
"line": undefined,
"message": "Exception: error",
"path": undefined,
},
"name": "test_error",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_with_record_property",
"result": "success",
"time": 0,
},
],
},
TestGroupResult {
"name": "custom_classname",
"tests": [
TestCaseResult {
"error": undefined,
"name": "test_with_record_xml_attribute",
"result": "success",
"time": 0,
},
],
},
],
"name": "pytest",
"totalTime": 19,
},
],
"totalTime": undefined,
}
`;
exports[`python-xunit unittest report report from python test results matches snapshot 1`] = `
TestRunResult {
"path": "fixtures/python-xunit-unittest.xml",
"suites": [
TestSuiteResult {
"groups": [
TestGroupResult {
"name": "TestAcme",
"tests": [
TestCaseResult {
"error": undefined,
"name": "test_always_pass",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_parameterized_0_param1",
"result": "success",
"time": 1,
},
TestCaseResult {
"error": undefined,
"name": "test_parameterized_1_param2",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_with_subtests",
"result": "success",
"time": 0,
},
TestCaseResult {
"error": {
"details": "Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail
self.fail("failed")
AssertionError: failed
",
"line": undefined,
"message": "AssertionError: failed",
"path": undefined,
},
"name": "test_always_fail",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": {
"details": "Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error
raise Exception("error")
Exception: error
",
"line": undefined,
"message": "Exception: error",
"path": undefined,
},
"name": "test_error",
"result": "failed",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_always_skip",
"result": "skipped",
"time": 0,
},
TestCaseResult {
"error": undefined,
"name": "test_expected_failure",
"result": "skipped",
"time": 0,
},
],
},
],
"name": "TestAcme-20251114214921",
"totalTime": 1,
},
],
"totalTime": 1,
}
`;

View File

@@ -0,0 +1,2 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuites/>

View File

@@ -0,0 +1,31 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
This is a basic JUnit-style XML example to highlight the basis structure.
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
Testmo test management software - https://www.testmo.com/
-->
<testsuites time="15.682687">
<testsuite name="Tests.Registration" time="6.605871">
<testcase name="testCase1" classname="Tests.Registration" time="2.113871" />
<testcase name="testCase2" classname="Tests.Registration" time="1.051" />
<testcase name="testCase3" classname="Tests.Registration" time="3.441" />
</testsuite>
<testsuite name="Tests.Authentication" time="9.076816">
<!-- Java JUni4 XML files does not nest <testsuite> elements -->
<!--
<testsuite name="Tests.Authentication.Login" time="4.356">
<testcase name="testCase4" classname="Tests.Authentication.Login" time="2.244" />
<testcase name="testCase5" classname="Tests.Authentication.Login" time="0.781" />
<testcase name="testCase6" classname="Tests.Authentication.Login" time="1.331" />
</testsuite>
-->
<testcase name="testCase7" classname="Tests.Authentication" time="2.508" />
<testcase name="testCase8" classname="Tests.Authentication" time="1.230816" />
<testcase name="testCase9" classname="Tests.Authentication" time="0.982">
<failure message="Assertion error message" type="AssertionError">
<!-- Call stack printed here -->
</failure>
</testcase>
</testsuite>
</testsuites>

View File

@@ -0,0 +1,141 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
This is a JUnit-style XML example with commonly used tags and attributes.
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
Testmo test management software - https://www.testmo.com/
-->
<!-- <testsuites> Usually the root element of a JUnit XML file. Some tools leave out
the <testsuites> element if there is only a single top-level <testsuite> element (which
is then used as the root element).
name Name of the entire test run
tests Total number of tests in this file
failures Total number of failed tests in this file
errors Total number of errored tests in this file
skipped Total number of skipped tests in this file
assertions Total number of assertions for all tests in this file
time Aggregated time of all tests in this file in seconds
timestamp Date and time of when the test run was executed (in ISO 8601 format)
-->
<testsuites name="Test run" tests="8" failures="1" errors="1" skipped="1"
assertions="20" time="16.082687" timestamp="2021-04-02T15:48:23">
<!-- <testsuite> A test suite usually represents a class, folder or group of tests.
There can be many test suites in an XML file, and there can be test suites under other
test suites.
name Name of the test suite (e.g. class name or folder name)
tests Total number of tests in this suite
failures Total number of failed tests in this suite
errors Total number of errored tests in this suite
skipped Total number of skipped tests in this suite
assertions Total number of assertions for all tests in this suite
time Aggregated time of all tests in this file in seconds
timestamp Date and time of when the test suite was executed (in ISO 8601 format)
file Source code file of this test suite
-->
<testsuite name="Tests.Registration" tests="8" failures="1" errors="1" skipped="1"
assertions="20" time="16.082687" timestamp="2021-04-02T15:48:23"
file="tests/registration.code">
<!-- <properties> Test suites (and test cases, see below) can have additional
properties such as environment variables or version numbers. -->
<properties>
<!-- <property> Each property has a name and value. Some tools also support
properties with text values instead of value attributes. -->
<property name="version" value="1.774" />
<property name="commit" value="ef7bebf" />
<property name="browser" value="Google Chrome" />
<property name="ci" value="https://github.com/actions/runs/1234" />
<property name="config">
Config line #1
Config line #2
Config line #3
</property>
</properties>
<!-- <system-out> Optionally data written to standard out for the suite.
Also supported on a test case level, see below. -->
<system-out>Data written to standard out.</system-out>
<!-- <system-err> Optionally data written to standard error for the suite.
Also supported on a test case level, see below. -->
<system-err>Data written to standard error.</system-err>
<!-- <testcase> There are one or more test cases in a test suite. A test passed
if there isn't an additional result element (skipped, failure, error).
name The name of this test case, often the method name
classname The name of the parent class/folder, often the same as the suite's name
assertions Number of assertions checked during test case execution
time Execution time of the test in seconds
file Source code file of this test case
line Source code line number of the start of this test case
-->
<testcase name="testCase1" classname="Tests.Registration" assertions="2"
time="2.436" file="tests/registration.code" line="24" />
<testcase name="testCase2" classname="Tests.Registration" assertions="6"
time="1.534" file="tests/registration.code" line="62" />
<testcase name="testCase3" classname="Tests.Registration" assertions="3"
time="0.822" file="tests/registration.code" line="102" />
<!-- Example of a test case that was skipped -->
<testcase name="testCase4" classname="Tests.Registration" assertions="0"
time="0" file="tests/registration.code" line="164">
<!-- <skipped> Indicates that the test was not executed. Can have an optional
message describing why the test was skipped. -->
<skipped message="Test was skipped." />
</testcase>
<!-- Example of a test case that failed. -->
<testcase name="testCase5" classname="Tests.Registration" assertions="2"
time="2.902412" file="tests/registration.code" line="202">
<!-- <failure> The test failed because one of the assertions/checks failed.
Can have a message and failure type, often the assertion type or class. The text
content of the element often includes the failure description or stack trace. -->
<failure message="Expected value did not match." type="AssertionError">
<!-- Failure description or stack trace -->
</failure>
</testcase>
<!-- Example of a test case that had errors. -->
<testcase name="testCase6" classname="Tests.Registration" assertions="0"
time="3.819" file="tests/registration.code" line="235">
<!-- <error> The test had an unexpected error during execution. Can have a
message and error type, often the exception type or class. The text
content of the element often includes the error description or stack trace. -->
<error message="Division by zero." type="ArithmeticError">
<!-- Error description or stack trace -->
</error>
</testcase>
<!-- Example of a test case with outputs. -->
<testcase name="testCase7" classname="Tests.Registration" assertions="3"
time="2.944" file="tests/registration.code" line="287">
<!-- <system-out> Optional data written to standard out for the test case. -->
<system-out>Data written to standard out.</system-out>
<!-- <system-err> Optional data written to standard error for the test case. -->
<system-err>Data written to standard error.</system-err>
</testcase>
<!-- Example of a test case with properties -->
<testcase name="testCase8" classname="Tests.Registration" assertions="4"
time="1.625275" file="tests/registration.code" line="302">
<!-- <properties> Some tools also support properties for test cases. -->
<properties>
<property name="priority" value="high" />
<property name="language" value="english" />
<property name="author" value="Adrian" />
<property name="attachment" value="screenshots/dashboard.png" />
<property name="attachment" value="screenshots/users.png" />
<property name="description">
This text describes the purpose of this test case and provides
an overview of what the test does and how it works.
</property>
</properties>
</testcase>
</testsuite>
</testsuites>

View File

@@ -0,0 +1,28 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
This is a basic JUnit-style XML example to highlight the basis structure.
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
Testmo test management software - https://www.testmo.com/
-->
<testsuites time="15.682687">
<testsuite name="Tests.Registration" time="6.605871">
<testcase name="testCase1" classname="Tests.Registration" time="2.113871" />
<testcase name="testCase2" classname="Tests.Registration" time="1.051" />
<testcase name="testCase3" classname="Tests.Registration" time="3.441" />
</testsuite>
<testsuite name="Tests.Authentication" time="9.076816">
<testsuite name="Tests.Authentication.Login" time="4.356">
<testcase name="testCase4" classname="Tests.Authentication.Login" time="2.244" />
<testcase name="testCase5" classname="Tests.Authentication.Login" time="0.781" />
<testcase name="testCase6" classname="Tests.Authentication.Login" time="1.331" />
</testsuite>
<testcase name="testCase7" classname="Tests.Authentication" time="2.508" />
<testcase name="testCase8" classname="Tests.Authentication" time="1.230816" />
<testcase name="testCase9" classname="Tests.Authentication" time="0.982">
<failure message="Assertion error message" type="AssertionError">
<!-- Call stack printed here -->
</failure>
</testcase>
</testsuite>
</testsuites>

View File

@@ -0,0 +1,212 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuites>
<testsuite name="/workspace/phpcheckstyle/phpunit.xml" tests="30" assertions="117" errors="0" failures="2" skipped="0" time="0.041478">
<testsuite name="PHPUnitTestSuite" tests="30" assertions="117" errors="0" failures="2" skipped="0" time="0.041478">
<testsuite name="CommentsTest" file="/workspace/phpcheckstyle/test/CommentsTest.php" tests="3" assertions="12" errors="0" failures="0" skipped="0" time="0.006702">
<testcase name="testGoodDoc" file="/workspace/phpcheckstyle/test/CommentsTest.php" line="12" class="CommentsTest" classname="CommentsTest" assertions="4" time="0.005093"/>
<testcase name="testComments" file="/workspace/phpcheckstyle/test/CommentsTest.php" line="30" class="CommentsTest" classname="CommentsTest" assertions="4" time="0.000921">
<system-out>File "./test/sample/bad_comments.php" warning, line 4 - Avoid Shell/Perl like comments.
File "./test/sample/bad_comments.php" warning, line 6 - The class Comments must have a docblock comment.
File "./test/sample/bad_comments.php" warning, line 10 - The function testComment must have a docblock comment.
File "./test/sample/bad_comments.php" warning, line 18 - The function testComment returns a value and must include @returns in its docblock.
File "./test/sample/bad_comments.php" warning, line 18 - The function testComment parameters must match those in its docblock @param.
File "./test/sample/bad_comments.php" warning, line 18 - The function testComment throws an exception and must include @throws in its docblock.
</system-out>
</testcase>
<testcase name="testTODOs" file="/workspace/phpcheckstyle/test/CommentsTest.php" line="48" class="CommentsTest" classname="CommentsTest" assertions="4" time="0.000688">
<system-out>File "./test/sample/todo.php" warning, line 3 - TODO: The todo message.
</system-out>
</testcase>
</testsuite>
<testsuite name="DeprecationTest" file="/workspace/phpcheckstyle/test/DeprecationTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.000974">
<testcase name="testDeprecations" file="/workspace/phpcheckstyle/test/DeprecationTest.php" line="12" class="DeprecationTest" classname="DeprecationTest" assertions="4" time="0.000974">
<system-out>File "./test/sample/bad_deprecation.php" warning, line 17 - split is deprecated since PHP 5.3. explode($pattern, $string) or preg_split('@'.$pattern.'@', $string) must be used instead.
File "./test/sample/bad_deprecation.php" warning, line 19 - ereg is deprecated since PHP 5.3. preg_match('@'.$pattern.'@', $string) must be used instead.
File "./test/sample/bad_deprecation.php" warning, line 21 - session_register is deprecated since PHP 5.3. $_SESSION must be used instead.
File "./test/sample/bad_deprecation.php" warning, line 23 - mysql_db_query is deprecated since PHP 5.3. mysql_select_db and mysql_query must be used instead.
File "./test/sample/bad_deprecation.php" warning, line 25 - $HTTP_GET_VARS is deprecated since PHP 5.3. $_GET must be used instead.
</system-out>
</testcase>
</testsuite>
<testsuite name="GoodTest" file="/workspace/phpcheckstyle/test/GoodTest.php" tests="4" assertions="16" errors="0" failures="0" skipped="0" time="0.005357">
<testcase name="testGood" file="/workspace/phpcheckstyle/test/GoodTest.php" line="12" class="GoodTest" classname="GoodTest" assertions="4" time="0.002647"/>
<testcase name="testDoWhile" file="/workspace/phpcheckstyle/test/GoodTest.php" line="32" class="GoodTest" classname="GoodTest" assertions="4" time="0.001022"/>
<testcase name="testAnonymousFunction" file="/workspace/phpcheckstyle/test/GoodTest.php" line="50" class="GoodTest" classname="GoodTest" assertions="4" time="0.000800"/>
<testcase name="testException" file="/workspace/phpcheckstyle/test/GoodTest.php" line="68" class="GoodTest" classname="GoodTest" assertions="4" time="0.000888"/>
</testsuite>
<testsuite name="IndentationTest" file="/workspace/phpcheckstyle/test/IndentationTest.php" tests="8" assertions="32" errors="0" failures="0" skipped="0" time="0.007654">
<testcase name="testTabIndentation" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="12" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000857">
<system-out>File "./test/sample/bad_indentation.php" warning, line 8 - Whitespace indentation must not be used.
File "./test/sample/bad_indentation.php" warning, line 15 - Whitespace indentation must not be used.
File "./test/sample/bad_indentation.php" warning, line 17 - Whitespace indentation must not be used.
File "./test/sample/bad_indentation.php" warning, line 18 - Whitespace indentation must not be used.
File "./test/sample/bad_indentation.php" warning, line 19 - Whitespace indentation must not be used.
File "./test/sample/bad_indentation.php" warning, line 20 - Whitespace indentation must not be used.
</system-out>
</testcase>
<testcase name="testSpaceIndentation" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="30" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000929">
<system-out>File "./test/sample/bad_indentation.php" warning, line 10 - Tab indentation must not be used.
File "./test/sample/bad_indentation.php" warning, line 10 - The indentation level must be 4 but was 1.
File "./test/sample/bad_indentation.php" warning, line 13 - Tab indentation must not be used.
File "./test/sample/bad_indentation.php" warning, line 13 - The indentation level must be 4 but was 1.
File "./test/sample/bad_indentation.php" warning, line 15 - The indentation level must be 8 but was 4.
File "./test/sample/bad_indentation.php" warning, line 16 - Tab indentation must not be used.
File "./test/sample/bad_indentation.php" warning, line 16 - The indentation level must be 8 but was 1.
File "./test/sample/bad_indentation.php" warning, line 17 - The indentation level must be 8 but was 3.
File "./test/sample/bad_indentation.php" warning, line 18 - The indentation level must be 8 but was 5.
File "./test/sample/bad_indentation.php" warning, line 19 - The indentation level must be 8 but was 6.
File "./test/sample/bad_indentation.php" warning, line 20 - The indentation level must be 4 but was 1.
</system-out>
</testcase>
<testcase name="testSpaceIndentationArray" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="51" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000975">
<system-out>File "./test/sample/bad_indentation_array.php" warning, line 10 - Tab indentation must not be used.
File "./test/sample/bad_indentation_array.php" warning, line 10 - The indentation level must be 4 but was 1.
File "./test/sample/bad_indentation_array.php" warning, line 13 - Tab indentation must not be used.
File "./test/sample/bad_indentation_array.php" warning, line 13 - The indentation level must be 4 but was 1.
File "./test/sample/bad_indentation_array.php" warning, line 16 - The indentation level must be 12 but was 8.
File "./test/sample/bad_indentation_array.php" warning, line 24 - The indentation level must be 12 but was 8.
File "./test/sample/bad_indentation_array.php" warning, line 29 - The indentation level must be 8 but was 12.
File "./test/sample/bad_indentation_array.php" warning, line 15 - Undeclared or unused variable: $aVar.
File "./test/sample/bad_indentation_array.php" warning, line 19 - Undeclared or unused variable: $bVar.
File "./test/sample/bad_indentation_array.php" warning, line 23 - Undeclared or unused variable: $cVar.
File "./test/sample/bad_indentation_array.php" warning, line 27 - Undeclared or unused variable: $dVar.
</system-out>
</testcase>
<testcase name="testGoodSpaceIndentationArray" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="72" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.001212"/>
<testcase name="testGoodIndentationNewLine" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="93" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000859"/>
<testcase name="testGoodIndentationSpaces" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="116" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000780"/>
<testcase name="testBadSpaces" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="137" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.001120">
<system-out>File "./test/sample/bad_spaces.php" warning, line 17 - Whitespace must follow ,.
File "./test/sample/bad_spaces.php" warning, line 17 - Whitespace must precede {.
File "./test/sample/bad_spaces.php" warning, line 19 - Whitespace must follow if.
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must precede =.
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must follow =.
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must precede +.
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must follow +.
File "./test/sample/bad_spaces.php" info, line 25 - Whitespace must not precede ,.
File "./test/sample/bad_spaces.php" info, line 26 - Whitespace must not follow !.
</system-out>
</testcase>
<testcase name="testBadSpaceAfterControl" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="155" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000922">
<system-out>File "./test/sample/bad_space_after_control.php" warning, line 19 - Whitespace must not follow if.
</system-out>
</testcase>
</testsuite>
<testsuite name="MetricsTest" file="/workspace/phpcheckstyle/test/MetricsTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.004147">
<testcase name="testMetrics" file="/workspace/phpcheckstyle/test/MetricsTest.php" line="12" class="MetricsTest" classname="MetricsTest" assertions="4" time="0.004147">
<system-out>File "./test/sample/bad_metrics.php" warning, line 21 - The function testMetrics's number of parameters (6) must not exceed 4.
File "./test/sample/bad_metrics.php" info, line 55 - Line is too long. [233/160]
File "./test/sample/bad_metrics.php" warning, line 21 - The Cyclomatic Complexity of function testMetrics is too high. [15/10]
File "./test/sample/bad_metrics.php" warning, line 244 - The testMetrics function body length is too long. [223/200]
</system-out>
</testcase>
</testsuite>
<testsuite name="NamingTest" file="/workspace/phpcheckstyle/test/NamingTest.php" tests="2" assertions="8" errors="0" failures="0" skipped="0" time="0.002697">
<testcase name="testNaming" file="/workspace/phpcheckstyle/test/NamingTest.php" line="12" class="NamingTest" classname="NamingTest" assertions="4" time="0.001426">
<system-out>File "./test/sample/_bad_naming.php" error, line 11 - Constant _badly_named_constant name should follow the pattern /^[A-Z][A-Z0-9_]*$/.
File "./test/sample/_bad_naming.php" error, line 13 - Constant bad_CONST name should follow the pattern /^[A-Z][A-Z0-9_]*$/.
File "./test/sample/_bad_naming.php" warning, line 17 - Top level variable $XXX name should follow the pattern /^[a-z_][a-zA-Z0-9]*$/.
File "./test/sample/_bad_naming.php" warning, line 20 - Variable x name length is too short.
File "./test/sample/_bad_naming.php" error, line 28 - Class badlynamedclass name should follow the pattern /^[A-Z][a-zA-Z0-9_]*$/.
File "./test/sample/_bad_naming.php" warning, line 32 - Member variable $YYY name should follow the pattern /^[a-z_][a-zA-Z0-9]*$/.
File "./test/sample/_bad_naming.php" warning, line 37 - The constructor name must be __construct().
File "./test/sample/_bad_naming.php" error, line 44 - Function Badlynamedfunction name should follow the pattern /^[a-z][a-zA-Z0-9]*$/.
File "./test/sample/_bad_naming.php" warning, line 47 - Local variable $ZZZ name should follow the pattern /^[a-z_][a-zA-Z0-9]*$/.
File "./test/sample/_bad_naming.php" error, line 54 - Protected function Badlynamedfunction2 name should follow the pattern /^[a-z][a-zA-Z0-9]*$/.
File "./test/sample/_bad_naming.php" error, line 61 - Private function badlynamedfunction3 name should follow the pattern /^_[a-z][a-zA-Z0-9]*$/.
File "./test/sample/_bad_naming.php" error, line 70 - Interface _badlynamedinterface name should follow the pattern /^[A-Z][a-zA-Z0-9_]*$/.
File "./test/sample/_bad_naming.php" error, line 75 - File _bad_naming.php name should follow the pattern /^[a-zA-Z][a-zA-Z0-9._]*$/.
</system-out>
</testcase>
<testcase name="testFunctionNaming" file="/workspace/phpcheckstyle/test/NamingTest.php" line="32" class="NamingTest" classname="NamingTest" assertions="4" time="0.001271"/>
</testsuite>
<testsuite name="OptimizationTest" file="/workspace/phpcheckstyle/test/OptimizationTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.000994">
<testcase name="testTextAfterClosingTag" file="/workspace/phpcheckstyle/test/OptimizationTest.php" line="12" class="OptimizationTest" classname="OptimizationTest" assertions="4" time="0.000994">
<system-out>File "./test/sample/bad_optimisation.php" warning, line 18 - count function must not be used inside a loop.
File "./test/sample/bad_optimisation.php" warning, line 23 - count function must not be used inside a loop.
</system-out>
</testcase>
</testsuite>
<testsuite name="OtherTest" file="/workspace/phpcheckstyle/test/OtherTest.php" tests="4" assertions="13" errors="0" failures="2" skipped="0" time="0.007329">
<testcase name="testOther" file="/workspace/phpcheckstyle/test/OtherTest.php" line="12" class="OtherTest" classname="OtherTest" assertions="4" time="0.005251">
<failure type="PHPUnit\Framework\ExpectationFailedException">OtherTest::testOther
We expect 20 warnings
Failed asserting that 19 matches expected 20.
/workspace/phpcheckstyle/test/OtherTest.php:24</failure>
<system-out>File "./test/sample/bad_other.php" warning, line 17 - All arguments with default values must be at the end of the block or statement.
File "./test/sample/bad_other.php" warning, line 21 - Errors must not be silenced when calling a function.
File "./test/sample/bad_other.php" warning, line 23 - Prefer single-quoted strings when you don't need string interpolation.
File "./test/sample/bad_other.php" warning, line 23 - Encapsed variables must not be used inside a string.
File "./test/sample/bad_other.php" warning, line 23 - Encapsed variables must not be used inside a string.
File "./test/sample/bad_other.php" warning, line 23 - Prefer single-quoted strings when you don't need string interpolation.
File "./test/sample/bad_other.php" warning, line 37 - TODO: Show todos
File "./test/sample/bad_other.php" warning, line 40 - Avoid empty statements (;;).
File "./test/sample/bad_other.php" warning, line 42 - Boolean operators (&amp;&amp;) must be used instead of logical operators (AND).
File "./test/sample/bad_other.php" warning, line 42 - Empty if block.
File "./test/sample/bad_other.php" warning, line 48 - Heredoc syntax must not be used.
File "./test/sample/bad_other.php" warning, line 52 - The statement if must contain its code within a {} block.
File "./test/sample/bad_other.php" warning, line 54 - Consider using a strict comparison operator instead of ==.
File "./test/sample/bad_other.php" warning, line 54 - The statement while must contain its code within a {} block.
File "./test/sample/bad_other.php" warning, line 66 - The switch statement must have a default case.
File "./test/sample/bad_other.php" warning, line 79 - The default case of a switch statement must be located after all other cases.
File "./test/sample/bad_other.php" warning, line 93 - Unary operators (++ or --) must not be used inside a control statement
File "./test/sample/bad_other.php" warning, line 95 - Assigments (=) must not be used inside a control statement.
File "./test/sample/bad_other.php" warning, line 106 - File ./test/sample/bad_other.php must not have multiple class declarations.
</system-out>
</testcase>
<testcase name="testException" file="/workspace/phpcheckstyle/test/OtherTest.php" line="31" class="OtherTest" classname="OtherTest" assertions="1" time="0.000751">
<failure type="PHPUnit\Framework\ExpectationFailedException">OtherTest::testException
We expect 1 error
Failed asserting that 0 matches expected 1.
/workspace/phpcheckstyle/test/OtherTest.php:40</failure>
</testcase>
<testcase name="testEmpty" file="/workspace/phpcheckstyle/test/OtherTest.php" line="50" class="OtherTest" classname="OtherTest" assertions="4" time="0.000427">
<system-out>File "./test/sample/empty.php" warning, line 1 - The file ./test/sample/empty.php is empty.
</system-out>
</testcase>
<testcase name="testSwitchCaseNeedBreak" file="/workspace/phpcheckstyle/test/OtherTest.php" line="69" class="OtherTest" classname="OtherTest" assertions="4" time="0.000901">
<system-out>File "./test/sample/switch_multi_case.php" warning, line 10 - The case statement must contain a break.
</system-out>
</testcase>
</testsuite>
<testsuite name="PHPTagsTest" file="/workspace/phpcheckstyle/test/PHPTagsTest.php" tests="2" assertions="8" errors="0" failures="0" skipped="0" time="0.001272">
<testcase name="testTextAfterClosingTag" file="/workspace/phpcheckstyle/test/PHPTagsTest.php" line="12" class="PHPTagsTest" classname="PHPTagsTest" assertions="4" time="0.000641">
<system-out>File "./test/sample/bad_php_tags_text_after_end.php" warning, line 9 - A PHP close tag must not be included at the end of the file.
</system-out>
</testcase>
<testcase name="testClosingTagNotNeeded" file="/workspace/phpcheckstyle/test/PHPTagsTest.php" line="30" class="PHPTagsTest" classname="PHPTagsTest" assertions="4" time="0.000631">
<system-out>File "./test/sample/bad_php_tags_end_not_needed.php" warning, line 1 - PHP tag should be at the beginning of the line.
</system-out>
</testcase>
</testsuite>
<testsuite name="ProhibitedTest" file="/workspace/phpcheckstyle/test/ProhibitedTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.000938">
<testcase name="testProhibited" file="/workspace/phpcheckstyle/test/ProhibitedTest.php" line="13" class="ProhibitedTest" classname="ProhibitedTest" assertions="4" time="0.000938">
<system-out>File "./test/sample/bad_prohibited.php" warning, line 18 - The function exec must not be called.
File "./test/sample/bad_prohibited.php" warning, line 20 - Token T_PRINT must not be used.
</system-out>
</testcase>
</testsuite>
<testsuite name="StrictCompareTest" file="/workspace/phpcheckstyle/test/StrictCompareTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.001578">
<testcase name="testStrictCompare" file="/workspace/phpcheckstyle/test/StrictCompareTest.php" line="12" class="StrictCompareTest" classname="StrictCompareTest" assertions="4" time="0.001578">
<system-out>File "./test/sample/bad_strictcompare.php" warning, line 14 - Consider using a strict comparison operator instead of ==.
File "./test/sample/bad_strictcompare.php" warning, line 19 - Consider using a strict comparison operator instead of !=.
File "./test/sample/bad_strictcompare.php" warning, line 24 - Consider using a strict comparison operator instead of ==.
File "./test/sample/bad_strictcompare.php" warning, line 29 - Consider using a strict comparison operator instead of ==.
</system-out>
</testcase>
</testsuite>
<testsuite name="UnusedTest" file="/workspace/phpcheckstyle/test/UnusedTest.php" tests="2" assertions="8" errors="0" failures="0" skipped="0" time="0.001835">
<testcase name="testGoodUnused" file="/workspace/phpcheckstyle/test/UnusedTest.php" line="13" class="UnusedTest" classname="UnusedTest" assertions="4" time="0.000940"/>
<testcase name="testBadUnused" file="/workspace/phpcheckstyle/test/UnusedTest.php" line="32" class="UnusedTest" classname="UnusedTest" assertions="4" time="0.000895">
<system-out>File "./test/sample/bad_unused.php" warning, line 23 - Function _testUnused has unused code after RETURN.
File "./test/sample/bad_unused.php" warning, line 27 - The function _testUnused parameter $b is not used.
File "./test/sample/bad_unused.php" warning, line 18 - Unused private function: _testUnused.
File "./test/sample/bad_unused.php" warning, line 20 - Undeclared or unused variable: $c.
</system-out>
</testcase>
</testsuite>
</testsuite>
</testsuite>
</testsuites>

View File

@@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuites>
<testsuite name="SampleSuite" tests="6" failures="6" time="0.006">
<testcase name="testFailure" classname="SampleSuite" file="/home/runner/work/repo/src/Fake.php" line="42" time="0.001">
<failure type="Exception" message="Boom">/home/runner/work/repo/src/Fake.php:42</failure>
</testcase>
<testcase name="testStringFailure" classname="SampleSuite" file="/home/runner/work/repo/src/Other.php" line="10" time="0.001">
<failure>/home/runner/work/repo/src/Other.php:10</failure>
</testcase>
<testcase name="testParenFailure" classname="SampleSuite" file="/home/runner/work/repo/src/Paren.php" line="123" time="0.001">
<failure>at /home/runner/work/repo/src/Paren.php(123)</failure>
</testcase>
<testcase name="testWindowsFailure" classname="SampleSuite" file="C:\repo\src\Win.php" line="77" time="0.001">
<failure>C:\repo\src\Win.php:77</failure>
</testcase>
<testcase name="testWindowsParenFailure" classname="SampleSuite" file="C:\repo\src\WinParen.php" line="88" time="0.001">
<failure>at C:\repo\src\WinParen.php(88)</failure>
</testcase>
<testcase name="testPhptFailure" classname="SampleSuite" file="/home/runner/work/repo/tests/Sample.phpt" line="12" time="0.001">
<failure>/home/runner/work/repo/tests/Sample.phpt:12</failure>
</testcase>
</testsuite>
</testsuites>

View File

@@ -0,0 +1,79 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuites>
<testsuite name="CLI Arguments" tests="12" assertions="12" errors="0" failures="2" skipped="0" time="0.140397">
<testcase name="targeting-traits-with-coversclass-attribute-is-deprecated.phpt" file="/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-coversclass-attribute-is-deprecated.phpt" assertions="1" time="0.068151">
<failure type="PHPUnit\Framework\PhptAssertionFailedError">targeting-traits-with-coversclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
--- Expected
+++ Actual
@@ @@
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
Test Runner Configured
Test Suite Loaded (1 test)
+Test Runner Triggered Warning (No code coverage driver available)
Event Facade Sealed
Test Runner Started
Test Suite Sorted
@@ @@
Test Preparation Started (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
Test Prepared (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
Test Passed (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\TestFixture\CoveredTrait with #[CoversClass] is deprecated, please refactor your test to use #[CoversTrait] instead.)
Test Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
Test Suite Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest, 1 test)
Test Runner Execution Finished
Test Runner Finished
-PHPUnit Finished (Shell Exit Code: 0)
+PHPUnit Finished (Shell Exit Code: 1)
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-coversclass-attribute-is-deprecated.phpt:28
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200</failure>
</testcase>
<testcase name="targeting-traits-with-usesclass-attribute-is-deprecated.phpt" file="/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-usesclass-attribute-is-deprecated.phpt" assertions="1" time="0.064268">
<failure type="PHPUnit\Framework\PhptAssertionFailedError">targeting-traits-with-usesclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
--- Expected
+++ Actual
@@ @@
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
Test Runner Configured
Test Suite Loaded (1 test)
+Test Runner Triggered Warning (No code coverage driver available)
Event Facade Sealed
Test Runner Started
Test Suite Sorted
@@ @@
Test Preparation Started (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
Test Prepared (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
Test Passed (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\TestFixture\CoveredTrait with #[UsesClass] is deprecated, please refactor your test to use #[UsesTrait] instead.)
Test Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
Test Suite Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest, 1 test)
Test Runner Execution Finished
Test Runner Finished
-PHPUnit Finished (Shell Exit Code: 0)
+PHPUnit Finished (Shell Exit Code: 1)
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-usesclass-attribute-is-deprecated.phpt:28
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200</failure>
</testcase>
<testsuite name="PHPUnit\Event\CollectingDispatcherTest" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/CollectingDispatcherTest.php" tests="2" assertions="2" errors="0" failures="0" skipped="0" time="0.004256">
<testcase name="testHasNoCollectedEventsWhenFlushedImmediatelyAfterCreation" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/CollectingDispatcherTest.php" line="20" class="PHPUnit\Event\CollectingDispatcherTest" classname="PHPUnit.Event.CollectingDispatcherTest" assertions="1" time="0.001441"/>
<testcase name="testCollectsDispatchedEventsUntilFlushed" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/CollectingDispatcherTest.php" line="27" class="PHPUnit\Event\CollectingDispatcherTest" classname="PHPUnit.Event.CollectingDispatcherTest" assertions="1" time="0.002815"/>
</testsuite>
<testsuite name="PHPUnit\Event\DeferringDispatcherTest" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" tests="4" assertions="4" errors="0" failures="0" skipped="0" time="0.002928">
<testcase name="testCollectsEventsUntilFlush" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="22" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.001672"/>
<testcase name="testFlushesCollectedEvents" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="35" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.000661"/>
<testcase name="testSubscriberCanBeRegistered" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="53" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.000334"/>
<testcase name="testTracerCanBeRegistered" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="69" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.000262"/>
</testsuite>
<testsuite name="PHPUnit\Event\DirectDispatcherTest" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" tests="4" assertions="4" errors="0" failures="0" skipped="0" time="0.000794">
<testcase name="testDispatchesEventToKnownSubscribers" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="24" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000170"/>
<testcase name="testDispatchesEventToTracers" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="43" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000248"/>
<testcase name="testRegisterRejectsUnknownSubscriber" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="62" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000257"/>
<testcase name="testDispatchRejectsUnknownEventType" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="73" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000119"/>
</testsuite>
</testsuite>
</testsuites>

View File

@@ -0,0 +1,42 @@
<?xml version="1.0" encoding="utf-8"?>
<testsuites name="pytest tests">
<testsuite name="pytest" errors="0" failures="2" skipped="2" tests="15" time="0.019"
timestamp="2025-11-15T11:51:49.548396-05:00" hostname="Mac.hsd1.va.comcast.net">
<properties>
<property name="custom_prop" value="custom_val"/>
</properties>
<testcase classname="tests.test_lib" name="test_always_pass" time="0.002"/>
<testcase classname="tests.test_lib" name="test_with_subtests" time="0.005"/>
<testcase classname="tests.test_lib" name="test_parameterized[param1]" time="0.000"/>
<testcase classname="tests.test_lib" name="test_parameterized[param2]" time="0.000"/>
<testcase classname="tests.test_lib" name="test_always_skip" time="0.000">
<skipped type="pytest.skip" message="skipped">/Users/mike/Projects/python-test/tests/test_lib.py:20: skipped
</skipped>
</testcase>
<testcase classname="tests.test_lib" name="test_always_fail" time="0.000">
<failure message="assert False">def test_always_fail():
&gt; assert False
E assert False
tests/test_lib.py:25: AssertionError
</failure>
</testcase>
<testcase classname="tests.test_lib" name="test_expected_failure" time="0.000">
<skipped type="pytest.xfail" message=""/>
</testcase>
<testcase classname="tests.test_lib" name="test_error" time="0.000">
<failure message="Exception: error">def test_error():
&gt; raise Exception("error")
E Exception: error
tests/test_lib.py:32: Exception
</failure>
</testcase>
<testcase classname="tests.test_lib" name="test_with_record_property" time="0.000">
<properties>
<property name="example_key" value="1"/>
</properties>
</testcase>
<testcase classname="custom_classname" name="test_with_record_xml_attribute" time="0.000"/>
</testsuite>
</testsuites>

View File

@@ -0,0 +1,27 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuite name="TestAcme-20251114214921" tests="8" file=".py" time="0.001" timestamp="2025-11-14T21:49:22" failures="1" errors="1" skipped="2">
<testcase classname="TestAcme" name="test_always_pass" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="8"/>
<testcase classname="TestAcme" name="test_parameterized_0_param1" time="0.001" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="618"/>
<testcase classname="TestAcme" name="test_parameterized_1_param2" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="618"/>
<testcase classname="TestAcme" name="test_with_subtests" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="11"/>
<testcase classname="TestAcme" name="test_always_fail" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="23">
<failure type="AssertionError" message="failed"><![CDATA[Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail
self.fail("failed")
AssertionError: failed
]]></failure>
</testcase>
<testcase classname="TestAcme" name="test_error" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="30">
<error type="Exception" message="error"><![CDATA[Traceback (most recent call last):
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error
raise Exception("error")
Exception: error
]]></error>
</testcase>
<testcase classname="TestAcme" name="test_always_skip" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="20">
<skipped type="skip" message="skipped"/>
</testcase>
<testcase classname="TestAcme" name="test_expected_failure" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="26">
<skipped type="XFAIL" message="expected failure: (&lt;class 'AssertionError'&gt;, AssertionError('expected failure'), &lt;traceback object at 0x100c125c0&gt;)"/>
</testcase>
</testsuite>

View File

@@ -73,6 +73,46 @@ describe('java-junit tests', () => {
fs.writeFileSync(outputPath, report) fs.writeFileSync(outputPath, report)
}) })
it('report from testmo/junitxml basic example matches snapshot', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'junit4-basic.xml')
const outputPath = path.join(__dirname, '__outputs__', 'junit-basic.md')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JavaJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
it('report from testmo/junitxml complete example matches snapshot', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'junit4-complete.xml')
const outputPath = path.join(__dirname, '__outputs__', 'junit-complete.md')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JavaJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
it('parses empty failures in test results', async () => { it('parses empty failures in test results', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'empty_failures.xml') const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'empty_failures.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath)) const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))

View File

@@ -303,4 +303,47 @@ describe('jest-junit tests', () => {
expect(report).not.toContain('<details><summary>Expand for details</summary>') expect(report).not.toContain('<details><summary>Expand for details</summary>')
expect(report).not.toContain('</details>') expect(report).not.toContain('</details>')
}) })
it('report includes the short summary', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JestJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
const shortSummary = '1 passed, 4 failed and 1 skipped'
const report = getReport([result], DEFAULT_OPTIONS, shortSummary)
// Report should have the title as the first line
expect(report).toMatch(/^## 1 passed, 4 failed and 1 skipped\n/)
})
it('report includes a custom report title and short summary', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new JestJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
const shortSummary = '1 passed, 4 failed and 1 skipped'
const report = getReport(
[result],
{
...DEFAULT_OPTIONS,
reportTitle: 'My Custom Title'
},
shortSummary
)
// Report should have the title as the first line
expect(report).toMatch(/^# My Custom Title\n## 1 passed, 4 failed and 1 skipped\n/)
})
}) })

View File

@@ -0,0 +1,347 @@
import * as fs from 'fs'
import * as path from 'path'
import {PhpunitJunitParser} from '../src/parsers/phpunit-junit/phpunit-junit-parser'
import {ParseOptions} from '../src/test-parser'
import {getReport} from '../src/report/get-report'
import {normalizeFilePath} from '../src/utils/path-utils'
describe('phpunit-junit tests', () => {
it('produces empty test run result when there are no test cases', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'empty', 'phpunit-empty.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result.tests).toBe(0)
expect(result.result).toBe('success')
})
it('report from phpunit test results matches snapshot', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit.xml')
const outputPath = path.join(__dirname, '__outputs__', 'phpunit-test-results.md')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
it('parses nested test suites correctly', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
// Should have 4 test suites (3 nested ones plus the parent with direct testcases)
expect(result.suites.length).toBe(4)
// Verify suite names
const suiteNames = result.suites.map(s => s.name)
expect(suiteNames).toContain('PHPUnit\\Event\\CollectingDispatcherTest')
expect(suiteNames).toContain('PHPUnit\\Event\\DeferringDispatcherTest')
expect(suiteNames).toContain('PHPUnit\\Event\\DirectDispatcherTest')
expect(suiteNames).toContain('CLI Arguments')
// Verify total test count
expect(result.tests).toBe(12)
expect(result.passed).toBe(10)
expect(result.failed).toBe(2)
})
it('extracts error details from failures', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
// Find the CLI Arguments suite which has failures
const cliSuite = result.suites.find(s => s.name === 'CLI Arguments')
expect(cliSuite).toBeDefined()
// Get the failed tests
const failedTests = cliSuite!.groups.flatMap(g => g.tests).filter(t => t.result === 'failed')
expect(failedTests.length).toBe(2)
// Verify error details are captured
for (const test of failedTests) {
expect(test.error).toBeDefined()
expect(test.error!.details).toContain('Failed asserting that string matches format description')
}
})
it('maps absolute paths to tracked files for annotations', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit-paths.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: [
'src/Fake.php',
'src/Other.php',
'src/Paren.php',
'src/Win.php',
'src/WinParen.php',
'tests/Sample.phpt'
]
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
const suite = result.suites.find(s => s.name === 'SampleSuite')
expect(suite).toBeDefined()
const tests = suite!.groups.flatMap(g => g.tests)
const fileFailure = tests.find(t => t.name === 'testFailure')
expect(fileFailure).toBeDefined()
expect(fileFailure!.error).toBeDefined()
expect(fileFailure!.error!.path).toBe('src/Fake.php')
expect(fileFailure!.error!.line).toBe(42)
const stringFailure = tests.find(t => t.name === 'testStringFailure')
expect(stringFailure).toBeDefined()
expect(stringFailure!.error).toBeDefined()
expect(stringFailure!.error!.path).toBe('src/Other.php')
expect(stringFailure!.error!.line).toBe(10)
const parenFailure = tests.find(t => t.name === 'testParenFailure')
expect(parenFailure).toBeDefined()
expect(parenFailure!.error).toBeDefined()
expect(parenFailure!.error!.path).toBe('src/Paren.php')
expect(parenFailure!.error!.line).toBe(123)
const windowsFailure = tests.find(t => t.name === 'testWindowsFailure')
expect(windowsFailure).toBeDefined()
expect(windowsFailure!.error).toBeDefined()
expect(windowsFailure!.error!.path).toBe('src/Win.php')
expect(windowsFailure!.error!.line).toBe(77)
const windowsParenFailure = tests.find(t => t.name === 'testWindowsParenFailure')
expect(windowsParenFailure).toBeDefined()
expect(windowsParenFailure!.error).toBeDefined()
expect(windowsParenFailure!.error!.path).toBe('src/WinParen.php')
expect(windowsParenFailure!.error!.line).toBe(88)
const phptFailure = tests.find(t => t.name === 'testPhptFailure')
expect(phptFailure).toBeDefined()
expect(phptFailure!.error).toBeDefined()
expect(phptFailure!.error!.path).toBe('tests/Sample.phpt')
expect(phptFailure!.error!.line).toBe(12)
})
it('parses junit-basic.xml with nested suites and failure', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'junit-basic.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
// Verify test counts
expect(result.tests).toBe(9)
expect(result.passed).toBe(8)
expect(result.failed).toBe(1)
expect(result.result).toBe('failed')
// Verify suites - should have Tests.Registration, Tests.Authentication.Login, and Tests.Authentication
expect(result.suites.length).toBe(3)
const suiteNames = result.suites.map(s => s.name)
expect(suiteNames).toContain('Tests.Registration')
expect(suiteNames).toContain('Tests.Authentication.Login')
expect(suiteNames).toContain('Tests.Authentication')
// Verify the Registration suite has 3 tests
const registrationSuite = result.suites.find(s => s.name === 'Tests.Registration')
expect(registrationSuite).toBeDefined()
const registrationTests = registrationSuite!.groups.flatMap(g => g.tests)
expect(registrationTests.length).toBe(3)
// Verify the Authentication suite has 3 direct tests (not counting nested suite)
const authSuite = result.suites.find(s => s.name === 'Tests.Authentication')
expect(authSuite).toBeDefined()
const authTests = authSuite!.groups.flatMap(g => g.tests)
expect(authTests.length).toBe(3)
// Verify the Login nested suite has 3 tests
const loginSuite = result.suites.find(s => s.name === 'Tests.Authentication.Login')
expect(loginSuite).toBeDefined()
const loginTests = loginSuite!.groups.flatMap(g => g.tests)
expect(loginTests.length).toBe(3)
// Verify failure is captured
const failedTest = authTests.find(t => t.name === 'testCase9')
expect(failedTest).toBeDefined()
expect(failedTest!.result).toBe('failed')
expect(failedTest!.error).toBeDefined()
expect(failedTest!.error!.message).toBe('AssertionError: Assertion error message')
})
it('parses phpcheckstyle-phpunit.xml with deeply nested suites', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
// Verify test counts from the XML: tests="30", failures="2"
expect(result.tests).toBe(30)
expect(result.passed).toBe(28)
expect(result.failed).toBe(2)
expect(result.result).toBe('failed')
// Verify the number of test suites extracted (leaf suites with testcases)
// CommentsTest, DeprecationTest, GoodTest, IndentationTest, MetricsTest,
// NamingTest, OptimizationTest, OtherTest, PHPTagsTest, ProhibitedTest,
// StrictCompareTest, UnusedTest = 12 suites
expect(result.suites.length).toBe(12)
const suiteNames = result.suites.map(s => s.name)
expect(suiteNames).toContain('CommentsTest')
expect(suiteNames).toContain('GoodTest')
expect(suiteNames).toContain('IndentationTest')
expect(suiteNames).toContain('OtherTest')
})
it('extracts test data from phpcheckstyle-phpunit.xml', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
// Find the CommentsTest suite
const commentsSuite = result.suites.find(s => s.name === 'CommentsTest')
expect(commentsSuite).toBeDefined()
// Verify tests are extracted correctly
const tests = commentsSuite!.groups.flatMap(g => g.tests)
expect(tests.length).toBe(3)
const testGoodDoc = tests.find(t => t.name === 'testGoodDoc')
expect(testGoodDoc).toBeDefined()
expect(testGoodDoc!.result).toBe('success')
})
it('captures failure details from phpcheckstyle-phpunit.xml', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
// Find the OtherTest suite which has failures
const otherSuite = result.suites.find(s => s.name === 'OtherTest')
expect(otherSuite).toBeDefined()
const failedTests = otherSuite!.groups.flatMap(g => g.tests).filter(t => t.result === 'failed')
expect(failedTests.length).toBe(2)
// Verify failure details
const testOther = failedTests.find(t => t.name === 'testOther')
expect(testOther).toBeDefined()
expect(testOther!.error).toBeDefined()
expect(testOther!.error!.details).toContain('We expect 20 warnings')
expect(testOther!.error!.details).toContain('Failed asserting that 19 matches expected 20')
const testException = failedTests.find(t => t.name === 'testException')
expect(testException).toBeDefined()
expect(testException!.error).toBeDefined()
expect(testException!.error!.details).toContain('We expect 1 error')
})
it('report from junit-basic.xml matches snapshot', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'junit-basic.xml')
const outputPath = path.join(__dirname, '__outputs__', 'phpunit-junit-basic-results.md')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
it('report from phpcheckstyle-phpunit.xml matches snapshot', async () => {
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
const outputPath = path.join(__dirname, '__outputs__', 'phpunit-phpcheckstyle-results.md')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const opts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
const parser = new PhpunitJunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
})

View File

@@ -0,0 +1,93 @@
import * as fs from 'fs'
import * as path from 'path'
import {PythonXunitParser} from '../src/parsers/python-xunit/python-xunit-parser'
import {ParseOptions} from '../src/test-parser'
import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report'
import {normalizeFilePath} from '../src/utils/path-utils'
const defaultOpts: ParseOptions = {
parseErrors: true,
trackedFiles: []
}
describe('python-xunit unittest report', () => {
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-unittest.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-unittest.md')
it('report from python test results matches snapshot', async () => {
const trackedFiles = ['tests/test_lib.py']
const opts: ParseOptions = {
...defaultOpts,
trackedFiles
}
const parser = new PythonXunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
it('report does not include a title by default', async () => {
const parser = new PythonXunitParser(defaultOpts)
const result = await parser.parse(filePath, fileContent)
const report = getReport([result])
// Report should have the badge as the first line
expect(report).toMatch(/^!\[Tests failed]/)
})
it.each([
['empty string', ''],
['space', ' '],
['tab', '\t'],
['newline', '\n']
])('report does not include a title when configured value is %s', async (_, reportTitle) => {
const parser = new PythonXunitParser(defaultOpts)
const result = await parser.parse(filePath, fileContent)
const report = getReport([result], {
...DEFAULT_OPTIONS,
reportTitle
})
// Report should have the badge as the first line
expect(report).toMatch(/^!\[Tests failed]/)
})
it('report includes a custom report title', async () => {
const parser = new PythonXunitParser(defaultOpts)
const result = await parser.parse(filePath, fileContent)
const report = getReport([result], {
...DEFAULT_OPTIONS,
reportTitle: 'My Custom Title'
})
// Report should have the title as the first line
expect(report).toMatch(/^# My Custom Title\n/)
})
})
describe('python-xunit pytest report', () => {
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-pytest.xml')
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-pytest.md')
it('report from python test results matches snapshot', async () => {
const trackedFiles = ['tests/test_lib.py']
const opts: ParseOptions = {
...defaultOpts,
trackedFiles
}
const parser = new PythonXunitParser(opts)
const result = await parser.parse(filePath, fileContent)
expect(result).toMatchSnapshot()
const report = getReport([result])
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
fs.writeFileSync(outputPath, report)
})
})

View File

@@ -1,6 +1,5 @@
name: Test Reporter name: Test Reporter
description: | description: Displays test results from popular testing frameworks directly in GitHub
Shows test results in GitHub UI: .NET (xUnit, NUnit, MSTest), Dart, Flutter, Java (JUnit), JavaScript (JEST, Mocha)
author: Michal Dorner <dorner.michal@gmail.com> author: Michal Dorner <dorner.michal@gmail.com>
inputs: inputs:
artifact: artifact:
@@ -29,9 +28,12 @@ inputs:
- dotnet-nunit - dotnet-nunit
- dotnet-trx - dotnet-trx
- flutter-json - flutter-json
- golang-json
- java-junit - java-junit
- jest-junit - jest-junit
- mocha-json - mocha-json
- phpunit-junit
- python-xunit
- rspec-json - rspec-json
- swift-xunit - swift-xunit
required: true required: true

1106
dist/index.js generated vendored

File diff suppressed because it is too large Load Diff

80
dist/licenses.txt generated vendored
View File

@@ -1350,48 +1350,62 @@ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
sax sax
ISC BlueOak-1.0.0
The ISC License # Blue Oak Model License
Copyright (c) Isaac Z. Schlueter and Contributors Version 1.0.0
Permission to use, copy, modify, and/or distribute this software for any ## Purpose
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES This license gives everyone as much permission to work with
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF this software as possible, while protecting contributors
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR from liability.
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
==== ## Acceptance
`String.fromCodePoint` by Mathias Bynens used according to terms of MIT In order to receive this license, you must agree to its
License, as follows: rules. The rules of this license are both obligations
under that agreement and conditions to your license.
You must not do anything with this software that triggers
a rule that you cannot or will not follow.
Copyright Mathias Bynens <https://mathiasbynens.be/> ## Copyright
Permission is hereby granted, free of charge, to any person obtaining Each contributor licenses you to do everything with this
a copy of this software and associated documentation files (the software that would otherwise infringe that contributor's
"Software"), to deal in the Software without restriction, including copyright in it.
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be ## Notices
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, You must ensure that everyone who gets a copy of
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF any part of this software from you, with or without
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND changes, also gets the text of this license or a link to
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE <https://blueoakcouncil.org/license/1.0.0>.
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION ## Excuse
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
If anyone notifies you in writing that you have not
complied with [Notices](#notices), you can keep your
license by taking all practical steps to comply within 30
days after the notice. If you do not do so, your license
ends immediately.
## Patent
Each contributor licenses you to do everything with this
software that would otherwise infringe any patent claims
they can license or become able to license.
## Reliability
No contributor can revoke this license.
## No Liability
***As far as the law allows, this software comes as is,
without any warranty or condition, and no contributor
will be liable to anyone for any damages related to this
software or this license, under any kind of legal claim.***
to-regex-range to-regex-range

11
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{ {
"name": "test-reporter", "name": "test-reporter",
"version": "2.2.0", "version": "2.3.0",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "test-reporter", "name": "test-reporter",
"version": "2.2.0", "version": "2.3.0",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.11.1", "@actions/core": "^1.11.1",
@@ -7580,9 +7580,10 @@
} }
}, },
"node_modules/sax": { "node_modules/sax": {
"version": "1.2.4", "version": "1.4.3",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", "resolved": "https://registry.npmjs.org/sax/-/sax-1.4.3.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" "integrity": "sha512-yqYn1JhPczigF94DMS+shiDMjDowYO6y9+wB/4WgO0Y19jWYk0lQ4tuG5KI7kj4FTp1wxPj5IFfcrz/s1c3jjQ==",
"license": "BlueOak-1.0.0"
}, },
"node_modules/semver": { "node_modules/semver": {
"version": "7.7.3", "version": "7.7.3",

View File

@@ -1,6 +1,6 @@
{ {
"name": "test-reporter", "name": "test-reporter",
"version": "2.2.0", "version": "2.3.0",
"private": true, "private": true,
"description": "Presents test results from popular testing frameworks as Github check run", "description": "Presents test results from popular testing frameworks as Github check run",
"main": "lib/main.js", "main": "lib/main.js",
@@ -69,6 +69,9 @@
"ts-jest": "^29.4.5", "ts-jest": "^29.4.5",
"typescript": "^5.9.3" "typescript": "^5.9.3"
}, },
"overrides": {
"sax": "^1.4.3"
},
"jest-junit": { "jest-junit": {
"suiteName": "jest tests", "suiteName": "jest tests",
"outputDirectory": "__tests__/__results__", "outputDirectory": "__tests__/__results__",

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -9,6 +9,6 @@
"author": "Michal Dorner <dorner.michal@gmail.com>", "author": "Michal Dorner <dorner.michal@gmail.com>",
"license": "MIT", "license": "MIT",
"devDependencies": { "devDependencies": {
"mocha": "^8.3.0" "mocha": "^11.7.5"
} }
} }

View File

@@ -17,9 +17,10 @@ import {GolangJsonParser} from './parsers/golang-json/golang-json-parser'
import {JavaJunitParser} from './parsers/java-junit/java-junit-parser' import {JavaJunitParser} from './parsers/java-junit/java-junit-parser'
import {JestJunitParser} from './parsers/jest-junit/jest-junit-parser' import {JestJunitParser} from './parsers/jest-junit/jest-junit-parser'
import {MochaJsonParser} from './parsers/mocha-json/mocha-json-parser' import {MochaJsonParser} from './parsers/mocha-json/mocha-json-parser'
import {PhpunitJunitParser} from './parsers/phpunit-junit/phpunit-junit-parser'
import {PythonXunitParser} from './parsers/python-xunit/python-xunit-parser'
import {RspecJsonParser} from './parsers/rspec-json/rspec-json-parser' import {RspecJsonParser} from './parsers/rspec-json/rspec-json-parser'
import {SwiftXunitParser} from './parsers/swift-xunit/swift-xunit-parser' import {SwiftXunitParser} from './parsers/swift-xunit/swift-xunit-parser'
import {normalizeDirPath, normalizeFilePath} from './utils/path-utils' import {normalizeDirPath, normalizeFilePath} from './utils/path-utils'
import {getCheckRunContext} from './utils/github-utils' import {getCheckRunContext} from './utils/github-utils'
@@ -181,7 +182,9 @@ class TestReporter {
let baseUrl = '' let baseUrl = ''
if (this.useActionsSummary) { if (this.useActionsSummary) {
const summary = getReport(results, { const summary = getReport(
results,
{
listSuites, listSuites,
listTests, listTests,
baseUrl, baseUrl,
@@ -190,11 +193,12 @@ class TestReporter {
badgeTitle, badgeTitle,
reportTitle, reportTitle,
collapsed collapsed
}) },
shortSummary
)
core.info('Summary content:') core.info('Summary content:')
core.info(summary) core.info(summary)
core.summary.addRaw(`# ${shortSummary}`)
await core.summary.addRaw(summary).write() await core.summary.addRaw(summary).write()
} else { } else {
core.info(`Creating check run ${name}`) core.info(`Creating check run ${name}`)
@@ -268,6 +272,10 @@ class TestReporter {
return new JestJunitParser(options) return new JestJunitParser(options)
case 'mocha-json': case 'mocha-json':
return new MochaJsonParser(options) return new MochaJsonParser(options)
case 'phpunit-junit':
return new PhpunitJunitParser(options)
case 'python-xunit':
return new PythonXunitParser(options)
case 'rspec-json': case 'rspec-json':
return new RspecJsonParser(options) return new RspecJsonParser(options)
case 'swift-xunit': case 'swift-xunit':

View File

@@ -242,13 +242,13 @@ export class DartJsonParser implements TestParser {
private getRelativePath(path: string): string { private getRelativePath(path: string): string {
const prefix = 'file://' const prefix = 'file://'
if (path.startsWith(prefix)) { if (path.startsWith(prefix)) {
path = path.substr(prefix.length) path = path.substring(prefix.length)
} }
path = normalizeFilePath(path) path = normalizeFilePath(path)
const workDir = this.getWorkDir(path) const workDir = this.getWorkDir(path)
if (workDir !== undefined && path.startsWith(workDir)) { if (workDir !== undefined && path.startsWith(workDir)) {
path = path.substr(workDir.length) path = path.substring(workDir.length)
} }
return path return path
} }

View File

@@ -136,7 +136,7 @@ export class DotnetNunitParser implements TestParser {
path = normalizeFilePath(path) path = normalizeFilePath(path)
const workDir = this.getWorkDir(path) const workDir = this.getWorkDir(path)
if (workDir !== undefined && path.startsWith(workDir)) { if (workDir !== undefined && path.startsWith(workDir)) {
path = path.substr(workDir.length) path = path.substring(workDir.length)
} }
return path return path
} }

View File

@@ -94,7 +94,7 @@ export class DotnetTrxParser implements TestParser {
const resultTestName = r.result.$.testName const resultTestName = r.result.$.testName
const testName = const testName =
resultTestName.startsWith(className) && resultTestName[className.length] === '.' resultTestName.startsWith(className) && resultTestName[className.length] === '.'
? resultTestName.substr(className.length + 1) ? resultTestName.substring(className.length + 1)
: resultTestName : resultTestName
const test = new Test(testName, r.result.$.outcome, duration, error) const test = new Test(testName, r.result.$.outcome, duration, error)
@@ -177,7 +177,7 @@ export class DotnetTrxParser implements TestParser {
const filePath = normalizeFilePath(fileStr) const filePath = normalizeFilePath(fileStr)
const workDir = this.getWorkDir(filePath) const workDir = this.getWorkDir(filePath)
if (workDir) { if (workDir) {
const file = filePath.substr(workDir.length) const file = filePath.substring(workDir.length)
if (trackedFiles.includes(file)) { if (trackedFiles.includes(file)) {
const line = parseInt(lineStr) const line = parseInt(lineStr)
return {path: file, line} return {path: file, line}

View File

@@ -106,7 +106,7 @@ export class JestJunitParser implements TestParser {
path = normalizeFilePath(path) path = normalizeFilePath(path)
const workDir = this.getWorkDir(path) const workDir = this.getWorkDir(path)
if (workDir !== undefined && path.startsWith(workDir)) { if (workDir !== undefined && path.startsWith(workDir)) {
path = path.substr(workDir.length) path = path.substring(workDir.length)
} }
return path return path
} }

View File

@@ -61,7 +61,7 @@ export class MochaJsonParser implements TestParser {
private processTest(suite: TestSuiteResult, test: MochaJsonTest, result: TestExecutionResult): void { private processTest(suite: TestSuiteResult, test: MochaJsonTest, result: TestExecutionResult): void {
const groupName = const groupName =
test.fullTitle !== test.title test.fullTitle !== test.title
? test.fullTitle.substr(0, test.fullTitle.length - test.title.length).trimEnd() ? test.fullTitle.substring(0, test.fullTitle.length - test.title.length).trimEnd()
: null : null
let group = suite.groups.find(grp => grp.name === groupName) let group = suite.groups.find(grp => grp.name === groupName)
@@ -103,7 +103,7 @@ export class MochaJsonParser implements TestParser {
path = normalizeFilePath(path) path = normalizeFilePath(path)
const workDir = this.getWorkDir(path) const workDir = this.getWorkDir(path)
if (workDir !== undefined && path.startsWith(workDir)) { if (workDir !== undefined && path.startsWith(workDir)) {
path = path.substr(workDir.length) path = path.substring(workDir.length)
} }
return path return path
} }

View File

@@ -0,0 +1,258 @@
import {ParseOptions, TestParser} from '../../test-parser'
import {parseStringPromise} from 'xml2js'
import {PhpunitReport, SingleSuiteReport, TestCase, TestSuite} from './phpunit-junit-types'
import {getBasePath, normalizeFilePath} from '../../utils/path-utils'
import {
TestExecutionResult,
TestRunResult,
TestSuiteResult,
TestGroupResult,
TestCaseResult,
TestCaseError
} from '../../test-results'
export class PhpunitJunitParser implements TestParser {
readonly trackedFiles: Set<string>
readonly trackedFilesList: string[]
private assumedWorkDir: string | undefined
constructor(readonly options: ParseOptions) {
this.trackedFilesList = options.trackedFiles.map(f => normalizeFilePath(f))
this.trackedFiles = new Set(this.trackedFilesList)
}
async parse(filePath: string, content: string): Promise<TestRunResult> {
const reportOrSuite = await this.getPhpunitReport(filePath, content)
const isReport = (reportOrSuite as PhpunitReport).testsuites !== undefined
// XML might contain:
// - multiple suites under <testsuites> root node
// - single <testsuite> as root node
let report: PhpunitReport
if (isReport) {
report = reportOrSuite as PhpunitReport
} else {
// Make it behave the same way as if suite was inside <testsuites> root node
const suite = (reportOrSuite as SingleSuiteReport).testsuite
report = {
testsuites: {
$: {time: suite.$.time},
testsuite: [suite]
}
}
}
return this.getTestRunResult(filePath, report)
}
private async getPhpunitReport(filePath: string, content: string): Promise<PhpunitReport | SingleSuiteReport> {
try {
return await parseStringPromise(content)
} catch (e) {
throw new Error(`Invalid XML at ${filePath}\n\n${e}`)
}
}
private getTestRunResult(filePath: string, report: PhpunitReport): TestRunResult {
const suites: TestSuiteResult[] = []
this.collectSuites(suites, report.testsuites.testsuite ?? [])
const seconds = parseFloat(report.testsuites.$?.time ?? '')
const time = isNaN(seconds) ? undefined : seconds * 1000
return new TestRunResult(filePath, suites, time)
}
private collectSuites(results: TestSuiteResult[], testsuites: TestSuite[]): void {
for (const ts of testsuites) {
// Recursively process nested test suites first (depth-first)
if (ts.testsuite) {
this.collectSuites(results, ts.testsuite)
}
// Only add suites that have direct test cases
// This avoids adding container suites that only hold nested suites
if (ts.testcase && ts.testcase.length > 0) {
const name = ts.$.name.trim()
const time = parseFloat(ts.$.time) * 1000
results.push(new TestSuiteResult(name, this.getGroups(ts), time))
}
}
}
private getGroups(suite: TestSuite): TestGroupResult[] {
if (!suite.testcase || suite.testcase.length === 0) {
return []
}
const groups: {name: string; tests: TestCase[]}[] = []
for (const tc of suite.testcase) {
// Use classname (PHPUnit style) for grouping
// If classname matches suite name, use empty string to avoid redundancy
const className = tc.$.classname ?? tc.$.class ?? ''
const groupName = className === suite.$.name ? '' : className
let grp = groups.find(g => g.name === groupName)
if (grp === undefined) {
grp = {name: groupName, tests: []}
groups.push(grp)
}
grp.tests.push(tc)
}
return groups.map(grp => {
const tests = grp.tests.map(tc => {
const name = tc.$.name.trim()
const result = this.getTestCaseResult(tc)
const time = parseFloat(tc.$.time) * 1000
const error = this.getTestCaseError(tc)
return new TestCaseResult(name, result, time, error)
})
return new TestGroupResult(grp.name, tests)
})
}
private getTestCaseResult(test: TestCase): TestExecutionResult {
if (test.failure || test.error) return 'failed'
if (test.skipped) return 'skipped'
return 'success'
}
private getTestCaseError(tc: TestCase): TestCaseError | undefined {
if (!this.options.parseErrors) {
return undefined
}
// We process <error> and <failure> the same way
const failures = tc.failure ?? tc.error
if (!failures || failures.length === 0) {
return undefined
}
const failure = failures[0]
const details = typeof failure === 'string' ? failure : failure._ ?? ''
// PHPUnit provides file path directly in testcase attributes
let filePath: string | undefined
let line: number | undefined
if (tc.$.file) {
const relativePath = this.getRelativePath(tc.$.file)
if (this.trackedFiles.has(relativePath)) {
filePath = relativePath
}
if (tc.$.line) {
line = parseInt(tc.$.line)
}
}
// If file not in tracked files, try to extract from error details
if (!filePath && details) {
const extracted = this.extractFileAndLine(details)
if (extracted) {
filePath = extracted.filePath
line = extracted.line
}
}
let message: string | undefined
if (typeof failure !== 'string' && failure.$) {
message = failure.$.message
if (failure.$.type) {
message = message ? `${failure.$.type}: ${message}` : failure.$.type
}
}
return {
path: filePath,
line,
details,
message
}
}
private extractFileAndLine(details: string): {filePath: string; line: number} | undefined {
// PHPUnit stack traces typically have format: /path/to/file.php:123
const lines = details.split(/\r?\n/)
for (const str of lines) {
// Match patterns like /path/to/file.php:123 or at /path/to/file.php(123)
const matchColon = str.match(/((?:[A-Za-z]:)?[^\s:()]+?\.(?:php|phpt)):(\d+)/)
if (matchColon) {
const relativePath = this.getRelativePath(matchColon[1])
if (this.trackedFiles.has(relativePath)) {
return {filePath: relativePath, line: parseInt(matchColon[2])}
}
}
const matchParen = str.match(/((?:[A-Za-z]:)?[^\s:()]+?\.(?:php|phpt))\((\d+)\)/)
if (matchParen) {
const relativePath = this.getRelativePath(matchParen[1])
if (this.trackedFiles.has(relativePath)) {
return {filePath: relativePath, line: parseInt(matchParen[2])}
}
}
}
return undefined
}
/**
* Converts an absolute file path to a relative path by stripping the working directory prefix.
*
* @param path - The absolute file path from PHPUnit output (e.g., `/home/runner/work/repo/src/Test.php`)
* @returns The relative path (e.g., `src/Test.php`) if a working directory can be determined,
* otherwise returns the normalized original path
*/
private getRelativePath(path: string): string {
path = normalizeFilePath(path)
const workDir = this.getWorkDir(path)
if (workDir !== undefined && path.startsWith(workDir)) {
path = path.substring(workDir.length)
}
return path
}
/**
* Determines the working directory prefix to strip from absolute file paths.
*
* The working directory is resolved using the following priority:
*
* 1. **Explicit configuration** - If `options.workDir` is set, it takes precedence.
* This allows users to explicitly specify the working directory.
*
* 2. **Cached assumption** - If we've previously determined a working directory
* (`assumedWorkDir`) and the current path starts with it, we reuse that value.
* This avoids redundant computation for subsequent paths.
*
* 3. **Heuristic detection** - Uses `getBasePath()` to find the common prefix between
* the absolute path and the list of tracked files in the repository. For example:
* - Absolute path: `/home/runner/work/repo/src/Test.php`
* - Tracked file: `src/Test.php`
* - Detected workDir: `/home/runner/work/repo/`
*
* Once detected, the working directory is cached in `assumedWorkDir` for efficiency.
*
* @param path - The normalized absolute file path to analyze
* @returns The working directory prefix (with trailing slash), or `undefined` if it cannot be determined
*
* @example
* // With tracked file 'src/Foo.php' and path '/home/runner/work/repo/src/Foo.php'
* // Returns: '/home/runner/work/repo/'
*/
private getWorkDir(path: string): string | undefined {
if (this.options.workDir) {
return this.options.workDir
}
if (this.assumedWorkDir && path.startsWith(this.assumedWorkDir)) {
return this.assumedWorkDir
}
const basePath = getBasePath(path, this.trackedFilesList)
if (basePath !== undefined) {
this.assumedWorkDir = basePath
}
return basePath
}
}

View File

@@ -0,0 +1,52 @@
export interface PhpunitReport {
testsuites: TestSuites
}
export interface SingleSuiteReport {
testsuite: TestSuite
}
export interface TestSuites {
$?: {
time?: string
}
testsuite?: TestSuite[]
}
export interface TestSuite {
$: {
name: string
tests?: string
assertions?: string
errors?: string
failures?: string
skipped?: string
time: string
file?: string
}
testcase?: TestCase[]
testsuite?: TestSuite[]
}
export interface TestCase {
$: {
name: string
class?: string
classname?: string
file?: string
line?: string
assertions?: string
time: string
}
failure?: Failure[]
error?: Failure[]
skipped?: string[]
}
export interface Failure {
_: string
$?: {
type?: string
message?: string
}
}

View File

@@ -0,0 +1,8 @@
import {ParseOptions} from '../../test-parser'
import {JavaJunitParser} from '../java-junit/java-junit-parser'
export class PythonXunitParser extends JavaJunitParser {
constructor(readonly options: ParseOptions) {
super(options)
}
}

View File

@@ -55,7 +55,7 @@ export class RspecJsonParser implements TestParser {
private processTest(suite: TestSuiteResult, test: RspecExample, result: TestExecutionResult): void { private processTest(suite: TestSuiteResult, test: RspecExample, result: TestExecutionResult): void {
const groupName = const groupName =
test.full_description !== test.description test.full_description !== test.description
? test.full_description.substr(0, test.full_description.length - test.description.length).trimEnd() ? test.full_description.substring(0, test.full_description.length - test.description.length).trimEnd()
: null : null
let group = suite.groups.find(grp => grp.name === groupName) let group = suite.groups.find(grp => grp.name === groupName)

View File

@@ -30,13 +30,15 @@ export const DEFAULT_OPTIONS: ReportOptions = {
collapsed: 'auto' collapsed: 'auto'
} }
export function getReport(results: TestRunResult[], options: ReportOptions = DEFAULT_OPTIONS): string { export function getReport(
core.info('Generating check run summary') results: TestRunResult[],
options: ReportOptions = DEFAULT_OPTIONS,
shortSummary = ''
): string {
applySort(results) applySort(results)
const opts = {...options} const opts = {...options}
let lines = renderReport(results, opts) let lines = renderReport(results, opts, shortSummary)
let report = lines.join('\n') let report = lines.join('\n')
if (getByteLength(report) <= getMaxReportLength(options)) { if (getByteLength(report) <= getMaxReportLength(options)) {
@@ -46,7 +48,7 @@ export function getReport(results: TestRunResult[], options: ReportOptions = DEF
if (opts.listTests === 'all') { if (opts.listTests === 'all') {
core.info("Test report summary is too big - setting 'listTests' to 'failed'") core.info("Test report summary is too big - setting 'listTests' to 'failed'")
opts.listTests = 'failed' opts.listTests = 'failed'
lines = renderReport(results, opts) lines = renderReport(results, opts, shortSummary)
report = lines.join('\n') report = lines.join('\n')
if (getByteLength(report) <= getMaxReportLength(options)) { if (getByteLength(report) <= getMaxReportLength(options)) {
return report return report
@@ -103,7 +105,7 @@ function getByteLength(text: string): number {
return Buffer.byteLength(text, 'utf8') return Buffer.byteLength(text, 'utf8')
} }
function renderReport(results: TestRunResult[], options: ReportOptions): string[] { function renderReport(results: TestRunResult[], options: ReportOptions, shortSummary: string): string[] {
const sections: string[] = [] const sections: string[] = []
const reportTitle: string = options.reportTitle.trim() const reportTitle: string = options.reportTitle.trim()
@@ -111,6 +113,10 @@ function renderReport(results: TestRunResult[], options: ReportOptions): string[
sections.push(`# ${reportTitle}`) sections.push(`# ${reportTitle}`)
} }
if (shortSummary) {
sections.push(`## ${shortSummary}`)
}
const badge = getReportBadge(results, options) const badge = getReportBadge(results, options)
sections.push(badge) sections.push(badge)

View File

@@ -36,7 +36,7 @@ export function ellipsis(text: string, maxLength: number): string {
return text return text
} }
return text.substr(0, maxLength - 3) + '...' return text.substring(0, maxLength - 3) + '...'
} }
export function formatTime(ms: number): string { export function formatTime(ms: number): string {

View File

@@ -34,6 +34,6 @@ export function getBasePath(path: string, trackedFiles: string[]): string | unde
return undefined return undefined
} }
const base = path.substr(0, path.length - max.length) const base = path.substring(0, path.length - max.length)
return base return base
} }