mirror of
https://github.com/dorny/test-reporter.git
synced 2026-02-01 10:55:23 -08:00
Compare commits
89 Commits
feature/63
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a810f9bf83 | ||
|
|
b082adf0ec | ||
|
|
bcafc9fcbe | ||
|
|
b0cbac615f | ||
|
|
c92a2893a9 | ||
|
|
6697ec40e6 | ||
|
|
63870298f5 | ||
|
|
68967725f7 | ||
|
|
e17be7e007 | ||
|
|
6efb86e1f8 | ||
|
|
055bc8c025 | ||
|
|
17c900ba4e | ||
|
|
ff2d13cc36 | ||
|
|
20823bb69a | ||
|
|
0be3971fec | ||
|
|
4ee97617f7 | ||
|
|
a97700c53c | ||
|
|
837045e72b | ||
|
|
d1de4d5f06 | ||
|
|
f24c625f56 | ||
|
|
6a8a429644 | ||
|
|
ee446707ff | ||
|
|
fe45e95373 | ||
|
|
e40a1da745 | ||
|
|
3445860437 | ||
|
|
9ef5c136b2 | ||
|
|
83e20c1534 | ||
|
|
4331a3b620 | ||
|
|
04232af26f | ||
|
|
cf146f4036 | ||
|
|
33fc27cf09 | ||
|
|
8fd5fc58ca | ||
|
|
fc80cb4400 | ||
|
|
79ea6a9d0e | ||
|
|
aef3d726a6 | ||
|
|
c1a56edcfe | ||
|
|
3b9dad208e | ||
|
|
7c636a991c | ||
|
|
cfce4bda71 | ||
|
|
fe87682515 | ||
|
|
9b8d3b002e | ||
|
|
e2f0ff6339 | ||
|
|
bc8c29617e | ||
|
|
9aef9d168f | ||
|
|
6b64465c34 | ||
|
|
6617053f9c | ||
|
|
43a747d94c | ||
|
|
7b7927aa7d | ||
|
|
eeac280b8e | ||
|
|
6939db53fb | ||
|
|
b3812e0f5b | ||
|
|
cd299561e7 | ||
|
|
c7935221e6 | ||
|
|
5fb0582760 | ||
|
|
7148297f02 | ||
|
|
828632acd0 | ||
|
|
4a41472ca4 | ||
|
|
22dc7b52f4 | ||
|
|
bed521d765 | ||
|
|
6079ce3d17 | ||
|
|
de77f76b7e | ||
|
|
c883ae9738 | ||
|
|
35be98f7e7 | ||
|
|
f372a8338e | ||
|
|
948dd03d7b | ||
|
|
cf9db500ed | ||
|
|
ba33405987 | ||
|
|
34d8269ede | ||
|
|
fd1c798d8d | ||
|
|
2211cf1035 | ||
|
|
be3721d54a | ||
|
|
d171d89cd4 | ||
|
|
661decd3af | ||
|
|
bd9e36bf0c | ||
|
|
9642942c97 | ||
|
|
aa953f36f9 | ||
|
|
f686ce916a | ||
|
|
b14337a039 | ||
|
|
ec1e910416 | ||
|
|
353a438514 | ||
|
|
dc3a92680f | ||
|
|
e8e27361af | ||
|
|
ec9d9d2459 | ||
|
|
be36461fba | ||
|
|
07e5c648b5 | ||
|
|
1c33c4c823 | ||
|
|
eea8b67eb1 | ||
|
|
4128d36b92 | ||
|
|
d1504ea554 |
6
.github/workflows/check-dist.yml
vendored
6
.github/workflows/check-dist.yml
vendored
@@ -21,10 +21,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Set Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
id: diff
|
||||
|
||||
# If index.js was different than expected, upload the expected version as an artifact
|
||||
- uses: actions/upload-artifact@v4
|
||||
- uses: actions/upload-artifact@v6
|
||||
if: ${{ failure() && steps.diff.conclusion == 'failure' }}
|
||||
with:
|
||||
name: dist
|
||||
|
||||
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
@@ -13,8 +13,8 @@ jobs:
|
||||
name: Build & Test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
- run: npm ci
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
|
||||
- name: Upload test results
|
||||
if: ${{ !cancelled() }}
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: test-results
|
||||
path: __tests__/__results__/*.xml
|
||||
|
||||
2
.github/workflows/manual-run.yml
vendored
2
.github/workflows/manual-run.yml
vendored
@@ -8,7 +8,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: npm test
|
||||
|
||||
2
.github/workflows/test-report.yml
vendored
2
.github/workflows/test-report.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
name: Workflow test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
- uses: ./
|
||||
with:
|
||||
artifact: test-results
|
||||
|
||||
34
CHANGELOG.md
34
CHANGELOG.md
@@ -1,5 +1,39 @@
|
||||
# Changelog
|
||||
|
||||
## 2.5.0
|
||||
* Feature: Add Nette Tester support with `tester-junit` reporter https://github.com/dorny/test-reporter/pull/707
|
||||
* Maintenance: Bump actions/upload-artifact from 5 to 6 https://github.com/dorny/test-reporter/pull/695
|
||||
|
||||
## 2.4.0
|
||||
* Feature: Add PHPUnit support with JUnit XML dialect parser https://github.com/dorny/test-reporter/pull/422
|
||||
* Feature: Add JUnit XML sample files and tests for validation https://github.com/dorny/test-reporter/pull/701
|
||||
* Fix: Refactor deprecated `String.substr()` function to use `String.substring()` https://github.com/dorny/test-reporter/pull/704
|
||||
|
||||
## 2.3.0
|
||||
* Feature: Add Python support with `python-xunit` reporter (pytest) https://github.com/dorny/test-reporter/pull/643
|
||||
* Feature: Add pytest traceback parsing and `directory-mapping` option https://github.com/dorny/test-reporter/pull/238
|
||||
* Performance: Update sax.js to fix large XML file parsing https://github.com/dorny/test-reporter/pull/681
|
||||
* Documentation: Complete documentation for all supported reporters https://github.com/dorny/test-reporter/pull/691
|
||||
* Security: Bump js-yaml and mocha in /reports/mocha (fixes prototype pollution) https://github.com/dorny/test-reporter/pull/682
|
||||
|
||||
## 2.2.0
|
||||
* Feature: Add collapsed option to control report summary visibility https://github.com/dorny/test-reporter/pull/664
|
||||
* Fix badge encoding for values including underscore and hyphens https://github.com/dorny/test-reporter/pull/672
|
||||
* Fix missing `report-title` attribute in action definition https://github.com/dorny/test-reporter/pull/637
|
||||
* Refactor variable names to fix shadowing issues https://github.com/dorny/test-reporter/pull/630
|
||||
|
||||
## 2.1.1
|
||||
* Fix error when a TestMethod element does not have a className attribute in a trx file https://github.com/dorny/test-reporter/pull/623
|
||||
* Add stack trace from trx to summary https://github.com/dorny/test-reporter/pull/615
|
||||
* List only failed tests https://github.com/dorny/test-reporter/pull/606
|
||||
* Add type definitions to `github-utils.ts` https://github.com/dorny/test-reporter/pull/604
|
||||
* Avoid split on undefined https://github.com/dorny/test-reporter/pull/258
|
||||
* Return links to summary report https://github.com/dorny/test-reporter/pull/588
|
||||
* Add step summary short summary https://github.com/dorny/test-reporter/pull/589
|
||||
* Fix for empty TRX TestDefinitions https://github.com/dorny/test-reporter/pull/582
|
||||
* Increase step summary limit to 1MiB https://github.com/dorny/test-reporter/pull/581
|
||||
* Fix input description for list options https://github.com/dorny/test-reporter/pull/572
|
||||
|
||||
## 2.1.0
|
||||
* Feature: Add summary title https://github.com/dorny/test-reporter/pull/568
|
||||
* Feature: Add Golang test parser https://github.com/dorny/test-reporter/pull/571
|
||||
|
||||
76
README.md
76
README.md
@@ -19,6 +19,9 @@ This [Github Action](https://github.com/features/actions) displays test results
|
||||
- Go / [go test](https://pkg.go.dev/testing)
|
||||
- Java / [JUnit](https://junit.org/)
|
||||
- JavaScript / [JEST](https://jestjs.io/) / [Mocha](https://mochajs.org/)
|
||||
- Python / [pytest](https://docs.pytest.org/en/stable/) / [unittest](https://docs.python.org/3/library/unittest.html)
|
||||
- PHP / [PHPUnit](https://phpunit.de/) / [Nette Tester](https://tester.nette.org/)
|
||||
- Ruby / [RSpec](https://rspec.info/)
|
||||
- Swift / xUnit
|
||||
|
||||
For more information see [Supported formats](#supported-formats) section.
|
||||
@@ -145,7 +148,10 @@ jobs:
|
||||
# java-junit
|
||||
# jest-junit
|
||||
# mocha-json
|
||||
# phpunit-junit
|
||||
# python-xunit
|
||||
# rspec-json
|
||||
# swift-xunit
|
||||
reporter: ''
|
||||
|
||||
# Allows you to generate only the summary.
|
||||
@@ -253,6 +259,20 @@ Supported testing frameworks:
|
||||
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>dotnet-nunit</summary>
|
||||
|
||||
Test execution must be configured to generate [NUnit3](https://docs.nunit.org/articles/nunit/technical-notes/usage/Test-Result-XML-Format.html) XML test results.
|
||||
Install the [NUnit3TestAdapter](https://www.nuget.org/packages/NUnit3TestAdapter) package (required; it registers the `nunit` logger for `dotnet test`), then run tests with:
|
||||
|
||||
`dotnet test --logger "nunit;LogFileName=test-results.xml"`
|
||||
|
||||
Supported testing frameworks:
|
||||
- [NUnit](https://nunit.org/)
|
||||
|
||||
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>flutter-json</summary>
|
||||
|
||||
@@ -296,6 +316,27 @@ This is due to the fact Java stack traces don't contain a full path to the sourc
|
||||
Some heuristic was necessary to figure out the mapping between the line in the stack trace and an actual source file.
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>phpunit-junit</summary>
|
||||
|
||||
[PHPUnit](https://phpunit.de/) can generate JUnit XML via CLI:
|
||||
`phpunit --log-junit reports/phpunit-junit.xml`
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>tester-junit</summary>
|
||||
|
||||
[Nette Tester](https://tester.nette.org/) can generate JUnit XML via CLI:
|
||||
|
||||
```bash
|
||||
tester -s -o junit tests/ > reports/tester-junit.xml
|
||||
```
|
||||
|
||||
**Note:** Nette Tester's JUnit output doesn't include test suite names. The parser will use the report file name as the suite name and automatically group tests by directory structure.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>jest-junit</summary>
|
||||
|
||||
@@ -349,6 +390,41 @@ Before version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0), M
|
||||
Please update Mocha to version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0) or above if you encounter this issue.
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>python-xunit (Experimental)</summary>
|
||||
|
||||
Support for Python test results in xUnit format is experimental - should work but it was not extensively tested.
|
||||
|
||||
For **pytest** support, configure [JUnit XML output](https://docs.pytest.org/en/stable/how-to/output.html#creating-junitxml-format-files) and run with the `--junit-xml` option, which also lets you specify the output path for test results.
|
||||
|
||||
```shell
|
||||
pytest --junit-xml=test-report.xml
|
||||
```
|
||||
|
||||
For **unittest** support, use a test runner that outputs the JUnit report format, such as [unittest-xml-reporting](https://pypi.org/project/unittest-xml-reporting/).
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>rspec-json</summary>
|
||||
|
||||
[RSpec](https://rspec.info/) testing framework support requires the usage of JSON formatter.
|
||||
You can configure RSpec to output JSON format by using the `--format json` option and redirecting to a file:
|
||||
|
||||
```shell
|
||||
rspec --format json --out rspec-results.json
|
||||
```
|
||||
|
||||
Or configure it in `.rspec` file:
|
||||
```
|
||||
--format json
|
||||
--out rspec-results.json
|
||||
```
|
||||
|
||||
For more information see:
|
||||
- [RSpec documentation](https://rspec.info/)
|
||||
- [RSpec Formatters](https://relishapp.com/rspec/rspec-core/docs/formatters)
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>swift-xunit (Experimental)</summary>
|
||||
|
||||
|
||||
26
__tests__/__outputs__/dotnet-xunitv3.md
Normal file
26
__tests__/__outputs__/dotnet-xunitv3.md
Normal file
@@ -0,0 +1,26 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/dotnet-xunitv3.trx](#user-content-r0)|1 ✅|3 ❌||267ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/dotnet-xunitv3.trx</a>
|
||||
**4** tests were completed in **267ms** with **1** passed, **3** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[DotnetTests.XUnitV3Tests.FixtureTests](#user-content-r0s0)|1 ✅|1 ❌||18ms|
|
||||
|[Unclassified](#user-content-r0s1)||2 ❌||0ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">DotnetTests.XUnitV3Tests.FixtureTests</a>
|
||||
```
|
||||
❌ Failing_Test
|
||||
Assert.Null() Failure: Value is not null
|
||||
Expected: null
|
||||
Actual: Fixture { }
|
||||
at DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test() in /_/reports/dotnet/DotnetTests.XUnitV3Tests/FixtureTests.cs:line 25
|
||||
at System.RuntimeMethodHandle.InvokeMethod(Object target, Void** arguments, Signature sig, Boolean isConstructor)
|
||||
at System.Reflection.MethodBaseInvoker.InvokeWithNoArgs(Object obj, BindingFlags invokeAttr)
|
||||
✅ Passing_Test
|
||||
```
|
||||
### ❌ <a id="user-content-r0s1" href="#user-content-r0s1">Unclassified</a>
|
||||
```
|
||||
❌ [Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test)]
|
||||
❌ [Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test)]
|
||||
```
|
||||
23
__tests__/__outputs__/junit-basic.md
Normal file
23
__tests__/__outputs__/junit-basic.md
Normal file
@@ -0,0 +1,23 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/external/java/junit4-basic.xml](#user-content-r0)|5 ✅|1 ❌||16s|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/junit4-basic.xml</a>
|
||||
**6** tests were completed in **16s** with **5** passed, **1** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[Tests.Authentication](#user-content-r0s0)|2 ✅|1 ❌||9s|
|
||||
|[Tests.Registration](#user-content-r0s1)|3 ✅|||7s|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Authentication</a>
|
||||
```
|
||||
✅ testCase7
|
||||
✅ testCase8
|
||||
❌ testCase9
|
||||
AssertionError: Assertion error message
|
||||
```
|
||||
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">Tests.Registration</a>
|
||||
```
|
||||
✅ testCase1
|
||||
✅ testCase2
|
||||
✅ testCase3
|
||||
```
|
||||
22
__tests__/__outputs__/junit-complete.md
Normal file
22
__tests__/__outputs__/junit-complete.md
Normal file
@@ -0,0 +1,22 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/external/java/junit4-complete.xml](#user-content-r0)|5 ✅|2 ❌|1 ⚪|16s|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/junit4-complete.xml</a>
|
||||
**8** tests were completed in **16s** with **5** passed, **2** failed and **1** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[Tests.Registration](#user-content-r0s0)|5 ✅|2 ❌|1 ⚪|16s|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Registration</a>
|
||||
```
|
||||
✅ testCase1
|
||||
✅ testCase2
|
||||
✅ testCase3
|
||||
⚪ testCase4
|
||||
❌ testCase5
|
||||
AssertionError: Expected value did not match.
|
||||
❌ testCase6
|
||||
ArithmeticError: Division by zero.
|
||||
✅ testCase7
|
||||
✅ testCase8
|
||||
```
|
||||
30
__tests__/__outputs__/phpunit-junit-basic-results.md
Normal file
30
__tests__/__outputs__/phpunit-junit-basic-results.md
Normal file
@@ -0,0 +1,30 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/external/phpunit/junit-basic.xml](#user-content-r0)|8 ✅|1 ❌||16s|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/phpunit/junit-basic.xml</a>
|
||||
**9** tests were completed in **16s** with **8** passed, **1** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[Tests.Authentication](#user-content-r0s0)|2 ✅|1 ❌||9s|
|
||||
|[Tests.Authentication.Login](#user-content-r0s1)|3 ✅|||4s|
|
||||
|[Tests.Registration](#user-content-r0s2)|3 ✅|||7s|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Authentication</a>
|
||||
```
|
||||
✅ testCase7
|
||||
✅ testCase8
|
||||
❌ testCase9
|
||||
AssertionError: Assertion error message
|
||||
```
|
||||
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">Tests.Authentication.Login</a>
|
||||
```
|
||||
✅ testCase4
|
||||
✅ testCase5
|
||||
✅ testCase6
|
||||
```
|
||||
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">Tests.Registration</a>
|
||||
```
|
||||
✅ testCase1
|
||||
✅ testCase2
|
||||
✅ testCase3
|
||||
```
|
||||
88
__tests__/__outputs__/phpunit-phpcheckstyle-results.md
Normal file
88
__tests__/__outputs__/phpunit-phpcheckstyle-results.md
Normal file
@@ -0,0 +1,88 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/external/phpunit/phpcheckstyle-phpunit.xml](#user-content-r0)|28 ✅|2 ❌||41ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/phpunit/phpcheckstyle-phpunit.xml</a>
|
||||
**30** tests were completed in **41ms** with **28** passed, **2** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[CommentsTest](#user-content-r0s0)|3 ✅|||7ms|
|
||||
|[DeprecationTest](#user-content-r0s1)|1 ✅|||1ms|
|
||||
|[GoodTest](#user-content-r0s2)|4 ✅|||5ms|
|
||||
|[IndentationTest](#user-content-r0s3)|8 ✅|||8ms|
|
||||
|[MetricsTest](#user-content-r0s4)|1 ✅|||4ms|
|
||||
|[NamingTest](#user-content-r0s5)|2 ✅|||3ms|
|
||||
|[OptimizationTest](#user-content-r0s6)|1 ✅|||1ms|
|
||||
|[OtherTest](#user-content-r0s7)|2 ✅|2 ❌||7ms|
|
||||
|[PHPTagsTest](#user-content-r0s8)|2 ✅|||1ms|
|
||||
|[ProhibitedTest](#user-content-r0s9)|1 ✅|||1ms|
|
||||
|[StrictCompareTest](#user-content-r0s10)|1 ✅|||2ms|
|
||||
|[UnusedTest](#user-content-r0s11)|2 ✅|||2ms|
|
||||
### ✅ <a id="user-content-r0s0" href="#user-content-r0s0">CommentsTest</a>
|
||||
```
|
||||
✅ testGoodDoc
|
||||
✅ testComments
|
||||
✅ testTODOs
|
||||
```
|
||||
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">DeprecationTest</a>
|
||||
```
|
||||
✅ testDeprecations
|
||||
```
|
||||
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">GoodTest</a>
|
||||
```
|
||||
✅ testGood
|
||||
✅ testDoWhile
|
||||
✅ testAnonymousFunction
|
||||
✅ testException
|
||||
```
|
||||
### ✅ <a id="user-content-r0s3" href="#user-content-r0s3">IndentationTest</a>
|
||||
```
|
||||
✅ testTabIndentation
|
||||
✅ testSpaceIndentation
|
||||
✅ testSpaceIndentationArray
|
||||
✅ testGoodSpaceIndentationArray
|
||||
✅ testGoodIndentationNewLine
|
||||
✅ testGoodIndentationSpaces
|
||||
✅ testBadSpaces
|
||||
✅ testBadSpaceAfterControl
|
||||
```
|
||||
### ✅ <a id="user-content-r0s4" href="#user-content-r0s4">MetricsTest</a>
|
||||
```
|
||||
✅ testMetrics
|
||||
```
|
||||
### ✅ <a id="user-content-r0s5" href="#user-content-r0s5">NamingTest</a>
|
||||
```
|
||||
✅ testNaming
|
||||
✅ testFunctionNaming
|
||||
```
|
||||
### ✅ <a id="user-content-r0s6" href="#user-content-r0s6">OptimizationTest</a>
|
||||
```
|
||||
✅ testTextAfterClosingTag
|
||||
```
|
||||
### ❌ <a id="user-content-r0s7" href="#user-content-r0s7">OtherTest</a>
|
||||
```
|
||||
❌ testOther
|
||||
PHPUnit\Framework\ExpectationFailedException
|
||||
❌ testException
|
||||
PHPUnit\Framework\ExpectationFailedException
|
||||
✅ testEmpty
|
||||
✅ testSwitchCaseNeedBreak
|
||||
```
|
||||
### ✅ <a id="user-content-r0s8" href="#user-content-r0s8">PHPTagsTest</a>
|
||||
```
|
||||
✅ testTextAfterClosingTag
|
||||
✅ testClosingTagNotNeeded
|
||||
```
|
||||
### ✅ <a id="user-content-r0s9" href="#user-content-r0s9">ProhibitedTest</a>
|
||||
```
|
||||
✅ testProhibited
|
||||
```
|
||||
### ✅ <a id="user-content-r0s10" href="#user-content-r0s10">StrictCompareTest</a>
|
||||
```
|
||||
✅ testStrictCompare
|
||||
```
|
||||
### ✅ <a id="user-content-r0s11" href="#user-content-r0s11">UnusedTest</a>
|
||||
```
|
||||
✅ testGoodUnused
|
||||
✅ testBadUnused
|
||||
```
|
||||
41
__tests__/__outputs__/phpunit-test-results.md
Normal file
41
__tests__/__outputs__/phpunit-test-results.md
Normal file
@@ -0,0 +1,41 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/phpunit/phpunit.xml](#user-content-r0)|10 ✅|2 ❌||148ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/phpunit/phpunit.xml</a>
|
||||
**12** tests were completed in **148ms** with **10** passed, **2** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[CLI Arguments](#user-content-r0s0)||2 ❌||140ms|
|
||||
|[PHPUnit\Event\CollectingDispatcherTest](#user-content-r0s1)|2 ✅|||4ms|
|
||||
|[PHPUnit\Event\DeferringDispatcherTest](#user-content-r0s2)|4 ✅|||3ms|
|
||||
|[PHPUnit\Event\DirectDispatcherTest](#user-content-r0s3)|4 ✅|||1ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">CLI Arguments</a>
|
||||
```
|
||||
❌ targeting-traits-with-coversclass-attribute-is-deprecated.phpt
|
||||
PHPUnit\Framework\PhptAssertionFailedError
|
||||
❌ targeting-traits-with-usesclass-attribute-is-deprecated.phpt
|
||||
PHPUnit\Framework\PhptAssertionFailedError
|
||||
```
|
||||
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">PHPUnit\Event\CollectingDispatcherTest</a>
|
||||
```
|
||||
PHPUnit.Event.CollectingDispatcherTest
|
||||
✅ testHasNoCollectedEventsWhenFlushedImmediatelyAfterCreation
|
||||
✅ testCollectsDispatchedEventsUntilFlushed
|
||||
```
|
||||
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">PHPUnit\Event\DeferringDispatcherTest</a>
|
||||
```
|
||||
PHPUnit.Event.DeferringDispatcherTest
|
||||
✅ testCollectsEventsUntilFlush
|
||||
✅ testFlushesCollectedEvents
|
||||
✅ testSubscriberCanBeRegistered
|
||||
✅ testTracerCanBeRegistered
|
||||
```
|
||||
### ✅ <a id="user-content-r0s3" href="#user-content-r0s3">PHPUnit\Event\DirectDispatcherTest</a>
|
||||
```
|
||||
PHPUnit.Event.DirectDispatcherTest
|
||||
✅ testDispatchesEventToKnownSubscribers
|
||||
✅ testDispatchesEventToTracers
|
||||
✅ testRegisterRejectsUnknownSubscriber
|
||||
✅ testDispatchRejectsUnknownEventType
|
||||
```
|
||||
26
__tests__/__outputs__/python-xunit-pytest.md
Normal file
26
__tests__/__outputs__/python-xunit-pytest.md
Normal file
@@ -0,0 +1,26 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/python-xunit-pytest.xml](#user-content-r0)|6 ✅|2 ❌|2 ⚪|19ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit-pytest.xml</a>
|
||||
**10** tests were completed in **19ms** with **6** passed, **2** failed and **2** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[pytest](#user-content-r0s0)|6 ✅|2 ❌|2 ⚪|19ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">pytest</a>
|
||||
```
|
||||
tests.test_lib
|
||||
✅ test_always_pass
|
||||
✅ test_with_subtests
|
||||
✅ test_parameterized[param1]
|
||||
✅ test_parameterized[param2]
|
||||
⚪ test_always_skip
|
||||
❌ test_always_fail
|
||||
assert False
|
||||
⚪ test_expected_failure
|
||||
❌ test_error
|
||||
Exception: error
|
||||
✅ test_with_record_property
|
||||
custom_classname
|
||||
✅ test_with_record_xml_attribute
|
||||
```
|
||||
23
__tests__/__outputs__/python-xunit-unittest.md
Normal file
23
__tests__/__outputs__/python-xunit-unittest.md
Normal file
@@ -0,0 +1,23 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/python-xunit-unittest.xml](#user-content-r0)|4 ✅|2 ❌|2 ⚪|1ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit-unittest.xml</a>
|
||||
**8** tests were completed in **1ms** with **4** passed, **2** failed and **2** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[TestAcme-20251114214921](#user-content-r0s0)|4 ✅|2 ❌|2 ⚪|1ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">TestAcme-20251114214921</a>
|
||||
```
|
||||
TestAcme
|
||||
✅ test_always_pass
|
||||
✅ test_parameterized_0_param1
|
||||
✅ test_parameterized_1_param2
|
||||
✅ test_with_subtests
|
||||
❌ test_always_fail
|
||||
AssertionError: failed
|
||||
❌ test_error
|
||||
Exception: error
|
||||
⚪ test_always_skip
|
||||
⚪ test_expected_failure
|
||||
```
|
||||
20
__tests__/__outputs__/tester-bootstrap-test-results.md
Normal file
20
__tests__/__outputs__/tester-bootstrap-test-results.md
Normal file
@@ -0,0 +1,20 @@
|
||||

|
||||
<details><summary>Expand for details</summary>
|
||||
|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/nette-tester/BootstrapFormRenderer-report.xml](#user-content-r0)|4 ✅|||300ms|
|
||||
## ✅ <a id="user-content-r0" href="#user-content-r0">fixtures/nette-tester/BootstrapFormRenderer-report.xml</a>
|
||||
**4** tests were completed in **300ms** with **4** passed, **0** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[BootstrapFormRenderer-report.xml](#user-content-r0s0)|4 ✅|||300ms|
|
||||
### ✅ <a id="user-content-r0s0" href="#user-content-r0s0">BootstrapFormRenderer-report.xml</a>
|
||||
```
|
||||
KdybyTests/BootstrapFormRenderer
|
||||
✅ BootstrapRendererTest.phpt::testRenderingBasics
|
||||
✅ BootstrapRendererTest.phpt::testRenderingIndividual
|
||||
✅ BootstrapRendererTest.phpt::testRenderingComponents
|
||||
✅ BootstrapRendererTest.phpt::testMultipleFormsInTemplate
|
||||
```
|
||||
</details>
|
||||
87
__tests__/__outputs__/tester-v1.7-test-results.md
Normal file
87
__tests__/__outputs__/tester-v1.7-test-results.md
Normal file
@@ -0,0 +1,87 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/nette-tester/tester-v1.7-report.xml](#user-content-r0)|61 ✅|1 ❌|3 ⚪|2s|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/nette-tester/tester-v1.7-report.xml</a>
|
||||
**65** tests were completed in **2s** with **61** passed, **1** failed and **3** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[tester-v1.7-report.xml](#user-content-r0s0)|61 ✅|1 ❌|3 ⚪|2s|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">tester-v1.7-report.xml</a>
|
||||
```
|
||||
tests/Framework
|
||||
⚪ Dumper.toPhp.php7.phpt
|
||||
✅ Assert.contains.phpt
|
||||
✅ Assert.count.phpt
|
||||
✅ Assert.equal.phpt
|
||||
✅ Assert.equal.recursive.phpt::testSimple
|
||||
✅ Assert.equal.recursive.phpt::testMultiple
|
||||
✅ Assert.equal.recursive.phpt::testDeep
|
||||
✅ Assert.equal.recursive.phpt::testCross
|
||||
✅ Assert.equal.recursive.phpt::testThirdParty
|
||||
✅ Assert.error.phpt
|
||||
✅ Assert.exception.phpt
|
||||
✅ Assert.false.phpt
|
||||
✅ Assert.match.phpt
|
||||
✅ Assert.match.regexp.phpt
|
||||
✅ Assert.nan.phpt
|
||||
✅ Assert.noError.phpt
|
||||
✅ Assert.same.phpt
|
||||
✅ Assert.null.phpt
|
||||
✅ Assert.true.phpt
|
||||
✅ Assert.truthy.phpt
|
||||
✅ DataProvider.load.phpt
|
||||
✅ Assert.type.phpt
|
||||
✅ DataProvider.parseAnnotation.phpt
|
||||
✅ DataProvider.testQuery.phpt
|
||||
✅ DomQuery.css2Xpath.phpt
|
||||
✅ DomQuery.fromHtml.phpt
|
||||
✅ DomQuery.fromXml.phpt
|
||||
✅ Dumper.dumpException.phpt
|
||||
✅ Dumper.color.phpt
|
||||
✅ Dumper.toLine.phpt
|
||||
✅ Dumper.toPhp.recursion.phpt
|
||||
✅ Dumper.toPhp.phpt
|
||||
✅ FileMock.phpt
|
||||
✅ Helpers.escapeArg.phpt
|
||||
✅ Helpers.parseDocComment.phpt
|
||||
✅ TestCase.annotationThrows.phpt
|
||||
✅ TestCase.annotationThrows.setUp.tearDown.phpt
|
||||
✅ TestCase.annotationThrows.syntax.phpt
|
||||
✅ TestCase.basic.phpt
|
||||
✅ TestCase.dataProvider.generator.phpt
|
||||
✅ TestCase.dataProvider.phpt
|
||||
✅ TestCase.invalidMethods.phpt
|
||||
✅ TestCase.invalidProvider.phpt
|
||||
✅ TestCase.order.error.phpt
|
||||
✅ TestCase.order.errorMuted.phpt
|
||||
✅ TestCase.order.phpt
|
||||
✅ Prevent loop in error handling. The #268 regression. (TestCase.ownErrorHandler.phpt)
|
||||
tests/CodeCoverage
|
||||
⚪ Collector.start.phpt
|
||||
✅ PhpParser.parse.lines.phpt
|
||||
✅ PhpParser.parse.methods.phpt
|
||||
✅ CloverXMLGenerator.phpt
|
||||
✅ PhpParser.parse.edge.phpt
|
||||
✅ PhpParser.parse.lines-of-code.phpt
|
||||
✅ PhpParser.parse.namespaces.phpt
|
||||
tests/Runner
|
||||
✅ CommandLine.phpt
|
||||
⚪ HhvmPhpInterpreter.phpt
|
||||
✅ Runner.find-tests.phpt
|
||||
✅ Job.phpt
|
||||
✅ ZendPhpExecutable.phpt
|
||||
✅ Runner.multiple.phpt
|
||||
✅ Runner.edge.phpt
|
||||
✅ Runner.stop-on-fail.phpt
|
||||
❌ Runner.multiple-fails.phpt
|
||||
Failed: '... in /Users/izso/Developer/nette/tester/tests/Runner/multiple-fails/...' should match
|
||||
... '..., unexpected end of file in %a%testcase-syntax-error.phptx on line ...'
|
||||
|
||||
diff '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.expected' '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.actual'
|
||||
|
||||
in tests/Runner/Runner.multiple-fails.phpt(78) Tester\Assert::match()
|
||||
✅ Runner.annotations.phpt
|
||||
tests/RunnerOutput
|
||||
✅ JUnitPrinter.phpt
|
||||
```
|
||||
@@ -1,4 +1,4 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`dart-json tests matches report snapshot 1`] = `
|
||||
TestRunResult {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`dotnet-nunit tests report from ./reports/dotnet test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`dotnet-trx tests matches report snapshot (only failed tests) 1`] = `
|
||||
exports[`dotnet-trx tests matches dotnet-trx report snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/dotnet-trx.trx",
|
||||
"suites": [
|
||||
@@ -135,7 +135,77 @@ Actual: False
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`dotnet-trx tests matches report snapshot 1`] = `
|
||||
exports[`dotnet-trx tests matches dotnet-xunitv3 report snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/dotnet-xunitv3.trx",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": null,
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "Assert.Null() Failure: Value is not null
|
||||
Expected: null
|
||||
Actual: Fixture { }
|
||||
at DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test() in /_/reports/dotnet/DotnetTests.XUnitV3Tests/FixtureTests.cs:line 25
|
||||
at System.RuntimeMethodHandle.InvokeMethod(Object target, Void** arguments, Signature sig, Boolean isConstructor)
|
||||
at System.Reflection.MethodBaseInvoker.InvokeWithNoArgs(Object obj, BindingFlags invokeAttr)",
|
||||
"line": 25,
|
||||
"message": "Assert.Null() Failure: Value is not null
|
||||
Expected: null
|
||||
Actual: Fixture { }
|
||||
at DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test() in /_/reports/dotnet/DotnetTests.XUnitV3Tests/FixtureTests.cs:line 25
|
||||
at System.RuntimeMethodHandle.InvokeMethod(Object target, Void** arguments, Signature sig, Boolean isConstructor)
|
||||
at System.Reflection.MethodBaseInvoker.InvokeWithNoArgs(Object obj, BindingFlags invokeAttr)",
|
||||
"path": "DotnetTests.XUnitV3Tests/FixtureTests.cs",
|
||||
},
|
||||
"name": "Failing_Test",
|
||||
"result": "failed",
|
||||
"time": 17.0545,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Passing_Test",
|
||||
"result": "success",
|
||||
"time": 0.8786,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "DotnetTests.XUnitV3Tests.FixtureTests",
|
||||
"totalTime": undefined,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": null,
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test)]",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test)]",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Unclassified",
|
||||
"totalTime": undefined,
|
||||
},
|
||||
],
|
||||
"totalTime": 267,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`dotnet-trx tests matches report snapshot (only failed tests) 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/dotnet-trx.trx",
|
||||
"suites": [
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`golang-json tests report from ./reports/dotnet test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`java-junit tests report from apache/pulsar single suite test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
@@ -6878,3 +6878,153 @@ at java.lang.Thread.run(Thread.java:748)
|
||||
"totalTime": 2126531.0000000005,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`java-junit tests report from testmo/junitxml basic example matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/external/java/junit4-basic.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase1",
|
||||
"result": "success",
|
||||
"time": 2113.871,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase2",
|
||||
"result": "success",
|
||||
"time": 1051,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase3",
|
||||
"result": "success",
|
||||
"time": 3441,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Registration",
|
||||
"totalTime": 6605.870999999999,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase7",
|
||||
"result": "success",
|
||||
"time": 2508,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase8",
|
||||
"result": "success",
|
||||
"time": 1230.8159999999998,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": undefined,
|
||||
"line": undefined,
|
||||
"message": "AssertionError: Assertion error message",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testCase9",
|
||||
"result": "failed",
|
||||
"time": 982,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Authentication",
|
||||
"totalTime": 9076.816,
|
||||
},
|
||||
],
|
||||
"totalTime": 15682.687,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`java-junit tests report from testmo/junitxml complete example matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/external/java/junit4-complete.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase1",
|
||||
"result": "success",
|
||||
"time": 2436,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase2",
|
||||
"result": "success",
|
||||
"time": 1534,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase3",
|
||||
"result": "success",
|
||||
"time": 822,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase4",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": undefined,
|
||||
"line": undefined,
|
||||
"message": "AssertionError: Expected value did not match.",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testCase5",
|
||||
"result": "failed",
|
||||
"time": 2902.412,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": undefined,
|
||||
"line": undefined,
|
||||
"message": "ArithmeticError: Division by zero.",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testCase6",
|
||||
"result": "failed",
|
||||
"time": 3819,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase7",
|
||||
"result": "success",
|
||||
"time": 2944,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase8",
|
||||
"result": "success",
|
||||
"time": 1625.275,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Registration",
|
||||
"totalTime": 16082.687,
|
||||
},
|
||||
],
|
||||
"totalTime": 16082.687,
|
||||
}
|
||||
`;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`jest-junit tests parsing ESLint report without timing information works - PR #134 1`] = `
|
||||
TestRunResult {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`mocha-json tests report from ./reports/mocha-json test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
|
||||
628
__tests__/__snapshots__/phpunit-junit.test.ts.snap
Normal file
628
__tests__/__snapshots__/phpunit-junit.test.ts.snap
Normal file
@@ -0,0 +1,628 @@
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`phpunit-junit tests report from junit-basic.xml matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/external/phpunit/junit-basic.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase1",
|
||||
"result": "success",
|
||||
"time": 2113.871,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase2",
|
||||
"result": "success",
|
||||
"time": 1051,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase3",
|
||||
"result": "success",
|
||||
"time": 3441,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Registration",
|
||||
"totalTime": 6605.870999999999,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase4",
|
||||
"result": "success",
|
||||
"time": 2244,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase5",
|
||||
"result": "success",
|
||||
"time": 781,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase6",
|
||||
"result": "success",
|
||||
"time": 1331,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Authentication.Login",
|
||||
"totalTime": 4356,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase7",
|
||||
"result": "success",
|
||||
"time": 2508,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase8",
|
||||
"result": "success",
|
||||
"time": 1230.8159999999998,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "",
|
||||
"line": undefined,
|
||||
"message": "AssertionError: Assertion error message",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testCase9",
|
||||
"result": "failed",
|
||||
"time": 982,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Authentication",
|
||||
"totalTime": 9076.816,
|
||||
},
|
||||
],
|
||||
"totalTime": 15682.687,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`phpunit-junit tests report from phpcheckstyle-phpunit.xml matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/external/phpunit/phpcheckstyle-phpunit.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGoodDoc",
|
||||
"result": "success",
|
||||
"time": 5.093,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testComments",
|
||||
"result": "success",
|
||||
"time": 0.921,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testTODOs",
|
||||
"result": "success",
|
||||
"time": 0.6880000000000001,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "CommentsTest",
|
||||
"totalTime": 6.702,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testDeprecations",
|
||||
"result": "success",
|
||||
"time": 0.9740000000000001,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "DeprecationTest",
|
||||
"totalTime": 0.9740000000000001,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGood",
|
||||
"result": "success",
|
||||
"time": 2.6470000000000002,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testDoWhile",
|
||||
"result": "success",
|
||||
"time": 1.0219999999999998,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testAnonymousFunction",
|
||||
"result": "success",
|
||||
"time": 0.8,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testException",
|
||||
"result": "success",
|
||||
"time": 0.888,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "GoodTest",
|
||||
"totalTime": 5.357,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testTabIndentation",
|
||||
"result": "success",
|
||||
"time": 0.857,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testSpaceIndentation",
|
||||
"result": "success",
|
||||
"time": 0.929,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testSpaceIndentationArray",
|
||||
"result": "success",
|
||||
"time": 0.975,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGoodSpaceIndentationArray",
|
||||
"result": "success",
|
||||
"time": 1.212,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGoodIndentationNewLine",
|
||||
"result": "success",
|
||||
"time": 0.859,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGoodIndentationSpaces",
|
||||
"result": "success",
|
||||
"time": 0.78,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testBadSpaces",
|
||||
"result": "success",
|
||||
"time": 1.1199999999999999,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testBadSpaceAfterControl",
|
||||
"result": "success",
|
||||
"time": 0.9219999999999999,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "IndentationTest",
|
||||
"totalTime": 7.654,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testMetrics",
|
||||
"result": "success",
|
||||
"time": 4.146999999999999,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "MetricsTest",
|
||||
"totalTime": 4.146999999999999,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testNaming",
|
||||
"result": "success",
|
||||
"time": 1.426,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testFunctionNaming",
|
||||
"result": "success",
|
||||
"time": 1.271,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "NamingTest",
|
||||
"totalTime": 2.697,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testTextAfterClosingTag",
|
||||
"result": "success",
|
||||
"time": 0.9940000000000001,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "OptimizationTest",
|
||||
"totalTime": 0.9940000000000001,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "OtherTest::testOther
|
||||
We expect 20 warnings
|
||||
Failed asserting that 19 matches expected 20.
|
||||
|
||||
/workspace/phpcheckstyle/test/OtherTest.php:24",
|
||||
"line": 12,
|
||||
"message": "PHPUnit\\Framework\\ExpectationFailedException",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testOther",
|
||||
"result": "failed",
|
||||
"time": 5.2509999999999994,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "OtherTest::testException
|
||||
We expect 1 error
|
||||
Failed asserting that 0 matches expected 1.
|
||||
|
||||
/workspace/phpcheckstyle/test/OtherTest.php:40",
|
||||
"line": 31,
|
||||
"message": "PHPUnit\\Framework\\ExpectationFailedException",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testException",
|
||||
"result": "failed",
|
||||
"time": 0.751,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testEmpty",
|
||||
"result": "success",
|
||||
"time": 0.42700000000000005,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testSwitchCaseNeedBreak",
|
||||
"result": "success",
|
||||
"time": 0.901,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "OtherTest",
|
||||
"totalTime": 7.329,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testTextAfterClosingTag",
|
||||
"result": "success",
|
||||
"time": 0.641,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testClosingTagNotNeeded",
|
||||
"result": "success",
|
||||
"time": 0.631,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "PHPTagsTest",
|
||||
"totalTime": 1.272,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testProhibited",
|
||||
"result": "success",
|
||||
"time": 0.9380000000000001,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "ProhibitedTest",
|
||||
"totalTime": 0.9380000000000001,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testStrictCompare",
|
||||
"result": "success",
|
||||
"time": 1.578,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "StrictCompareTest",
|
||||
"totalTime": 1.578,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGoodUnused",
|
||||
"result": "success",
|
||||
"time": 0.94,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testBadUnused",
|
||||
"result": "success",
|
||||
"time": 0.895,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "UnusedTest",
|
||||
"totalTime": 1.835,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`phpunit-junit tests report from phpunit test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/phpunit/phpunit.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "PHPUnit.Event.CollectingDispatcherTest",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testHasNoCollectedEventsWhenFlushedImmediatelyAfterCreation",
|
||||
"result": "success",
|
||||
"time": 1.441,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCollectsDispatchedEventsUntilFlushed",
|
||||
"result": "success",
|
||||
"time": 2.815,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "PHPUnit\\Event\\CollectingDispatcherTest",
|
||||
"totalTime": 4.256,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "PHPUnit.Event.DeferringDispatcherTest",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCollectsEventsUntilFlush",
|
||||
"result": "success",
|
||||
"time": 1.6720000000000002,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testFlushesCollectedEvents",
|
||||
"result": "success",
|
||||
"time": 0.661,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testSubscriberCanBeRegistered",
|
||||
"result": "success",
|
||||
"time": 0.33399999999999996,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testTracerCanBeRegistered",
|
||||
"result": "success",
|
||||
"time": 0.262,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "PHPUnit\\Event\\DeferringDispatcherTest",
|
||||
"totalTime": 2.928,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "PHPUnit.Event.DirectDispatcherTest",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testDispatchesEventToKnownSubscribers",
|
||||
"result": "success",
|
||||
"time": 0.17,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testDispatchesEventToTracers",
|
||||
"result": "success",
|
||||
"time": 0.248,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testRegisterRejectsUnknownSubscriber",
|
||||
"result": "success",
|
||||
"time": 0.257,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testDispatchRejectsUnknownEventType",
|
||||
"result": "success",
|
||||
"time": 0.11900000000000001,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "PHPUnit\\Event\\DirectDispatcherTest",
|
||||
"totalTime": 0.794,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "targeting-traits-with-coversclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
|
||||
--- Expected
|
||||
+++ Actual
|
||||
@@ @@
|
||||
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
|
||||
Test Runner Configured
|
||||
Test Suite Loaded (1 test)
|
||||
+Test Runner Triggered Warning (No code coverage driver available)
|
||||
Event Facade Sealed
|
||||
Test Runner Started
|
||||
Test Suite Sorted
|
||||
@@ @@
|
||||
Test Preparation Started (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Prepared (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Passed (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
|
||||
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\\TestFixture\\CoveredTrait with #[CoversClass] is deprecated, please refactor your test to use #[CoversTrait] instead.)
|
||||
Test Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Suite Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest, 1 test)
|
||||
Test Runner Execution Finished
|
||||
Test Runner Finished
|
||||
-PHPUnit Finished (Shell Exit Code: 0)
|
||||
+PHPUnit Finished (Shell Exit Code: 1)
|
||||
|
||||
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-coversclass-attribute-is-deprecated.phpt:28
|
||||
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
|
||||
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
|
||||
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200",
|
||||
"line": undefined,
|
||||
"message": "PHPUnit\\Framework\\PhptAssertionFailedError",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "targeting-traits-with-coversclass-attribute-is-deprecated.phpt",
|
||||
"result": "failed",
|
||||
"time": 68.151,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "targeting-traits-with-usesclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
|
||||
--- Expected
|
||||
+++ Actual
|
||||
@@ @@
|
||||
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
|
||||
Test Runner Configured
|
||||
Test Suite Loaded (1 test)
|
||||
+Test Runner Triggered Warning (No code coverage driver available)
|
||||
Event Facade Sealed
|
||||
Test Runner Started
|
||||
Test Suite Sorted
|
||||
@@ @@
|
||||
Test Preparation Started (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Prepared (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Passed (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
|
||||
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\\TestFixture\\CoveredTrait with #[UsesClass] is deprecated, please refactor your test to use #[UsesTrait] instead.)
|
||||
Test Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Suite Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest, 1 test)
|
||||
Test Runner Execution Finished
|
||||
Test Runner Finished
|
||||
-PHPUnit Finished (Shell Exit Code: 0)
|
||||
+PHPUnit Finished (Shell Exit Code: 1)
|
||||
|
||||
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-usesclass-attribute-is-deprecated.phpt:28
|
||||
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
|
||||
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
|
||||
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200",
|
||||
"line": undefined,
|
||||
"message": "PHPUnit\\Framework\\PhptAssertionFailedError",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "targeting-traits-with-usesclass-attribute-is-deprecated.phpt",
|
||||
"result": "failed",
|
||||
"time": 64.268,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "CLI Arguments",
|
||||
"totalTime": 140.397,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
192
__tests__/__snapshots__/python-xunit.test.ts.snap
Normal file
192
__tests__/__snapshots__/python-xunit.test.ts.snap
Normal file
@@ -0,0 +1,192 @@
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`python-xunit pytest report report from python test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/python-xunit-pytest.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "tests.test_lib",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_always_pass",
|
||||
"result": "success",
|
||||
"time": 2,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_with_subtests",
|
||||
"result": "success",
|
||||
"time": 5,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_parameterized[param1]",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_parameterized[param2]",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_always_skip",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "def test_always_fail():
|
||||
> assert False
|
||||
E assert False
|
||||
|
||||
tests/test_lib.py:25: AssertionError
|
||||
",
|
||||
"line": undefined,
|
||||
"message": "assert False",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "test_always_fail",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_expected_failure",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "def test_error():
|
||||
> raise Exception("error")
|
||||
E Exception: error
|
||||
|
||||
tests/test_lib.py:32: Exception
|
||||
",
|
||||
"line": undefined,
|
||||
"message": "Exception: error",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "test_error",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_with_record_property",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
TestGroupResult {
|
||||
"name": "custom_classname",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_with_record_xml_attribute",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "pytest",
|
||||
"totalTime": 19,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`python-xunit unittest report report from python test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/python-xunit-unittest.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "TestAcme",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_always_pass",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_parameterized_0_param1",
|
||||
"result": "success",
|
||||
"time": 1,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_parameterized_1_param2",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_with_subtests",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "Traceback (most recent call last):
|
||||
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail
|
||||
self.fail("failed")
|
||||
AssertionError: failed
|
||||
",
|
||||
"line": undefined,
|
||||
"message": "AssertionError: failed",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "test_always_fail",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "Traceback (most recent call last):
|
||||
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error
|
||||
raise Exception("error")
|
||||
Exception: error
|
||||
",
|
||||
"line": undefined,
|
||||
"message": "Exception: error",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "test_error",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_always_skip",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_expected_failure",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "TestAcme-20251114214921",
|
||||
"totalTime": 1,
|
||||
},
|
||||
],
|
||||
"totalTime": 1,
|
||||
}
|
||||
`;
|
||||
@@ -1,4 +1,4 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`rspec-json tests report from ./reports/rspec-json test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`swift-xunit tests report from swift test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
|
||||
485
__tests__/__snapshots__/tester-junit.test.ts.snap
Normal file
485
__tests__/__snapshots__/tester-junit.test.ts.snap
Normal file
@@ -0,0 +1,485 @@
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`tester-junit tests parses complex test names from BootstrapFormRenderer-report.xml 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/nette-tester/BootstrapFormRenderer-report.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "KdybyTests/BootstrapFormRenderer",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "BootstrapRendererTest.phpt::testRenderingBasics",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "BootstrapRendererTest.phpt::testRenderingIndividual",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "BootstrapRendererTest.phpt::testRenderingComponents",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "BootstrapRendererTest.phpt::testMultipleFormsInTemplate",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "BootstrapFormRenderer-report.xml",
|
||||
"totalTime": 300,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`tester-junit tests report from tester-v1.7-report.xml matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/nette-tester/tester-v1.7-report.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "tests/Framework",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.toPhp.php7.phpt",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.contains.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.count.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.recursive.phpt::testSimple",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.recursive.phpt::testMultiple",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.recursive.phpt::testDeep",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.recursive.phpt::testCross",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.recursive.phpt::testThirdParty",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.error.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.exception.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.false.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.match.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.match.regexp.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.nan.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.noError.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.same.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.null.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.true.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.truthy.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DataProvider.load.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.type.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DataProvider.parseAnnotation.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DataProvider.testQuery.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DomQuery.css2Xpath.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DomQuery.fromHtml.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DomQuery.fromXml.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.dumpException.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.color.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.toLine.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.toPhp.recursion.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.toPhp.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "FileMock.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Helpers.escapeArg.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Helpers.parseDocComment.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.annotationThrows.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.annotationThrows.setUp.tearDown.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.annotationThrows.syntax.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.basic.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.dataProvider.generator.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.dataProvider.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.invalidMethods.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.invalidProvider.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.order.error.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.order.errorMuted.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.order.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Prevent loop in error handling. The #268 regression. (TestCase.ownErrorHandler.phpt)",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
TestGroupResult {
|
||||
"name": "tests/CodeCoverage",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Collector.start.phpt",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "PhpParser.parse.lines.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "PhpParser.parse.methods.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "CloverXMLGenerator.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "PhpParser.parse.edge.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "PhpParser.parse.lines-of-code.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "PhpParser.parse.namespaces.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
TestGroupResult {
|
||||
"name": "tests/Runner",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "CommandLine.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "HhvmPhpInterpreter.phpt",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Runner.find-tests.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Job.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "ZendPhpExecutable.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Runner.multiple.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Runner.edge.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Runner.stop-on-fail.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "Failed: '... in /Users/izso/Developer/nette/tester/tests/Runner/multiple-fails/...' should match
|
||||
... '..., unexpected end of file in %a%testcase-syntax-error.phptx on line ...'
|
||||
|
||||
diff '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.expected' '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.actual'
|
||||
|
||||
in tests/Runner/Runner.multiple-fails.phpt(78) Tester\\Assert::match()",
|
||||
"line": undefined,
|
||||
"message": "Failed: '... in /Users/izso/Developer/nette/tester/tests/Runner/multiple-fails/...' should match
|
||||
... '..., unexpected end of file in %a%testcase-syntax-error.phptx on line ...'
|
||||
|
||||
diff '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.expected' '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.actual'
|
||||
|
||||
in tests/Runner/Runner.multiple-fails.phpt(78) Tester\\Assert::match()",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "Runner.multiple-fails.phpt",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Runner.annotations.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
TestGroupResult {
|
||||
"name": "tests/RunnerOutput",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "JUnitPrinter.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "tester-v1.7-report.xml",
|
||||
"totalTime": 2100,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
@@ -39,15 +39,19 @@ describe('dotnet-trx tests', () => {
|
||||
expect(result.result).toBe('success')
|
||||
})
|
||||
|
||||
it('matches report snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'dotnet-trx.trx')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'dotnet-trx.md')
|
||||
it.each([['dotnet-trx'], ['dotnet-xunitv3']])('matches %s report snapshot', async reportName => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', `${reportName}.trx`)
|
||||
const outputPath = path.join(__dirname, '__outputs__', `${reportName}.md`)
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: ['DotnetTests.Unit/Calculator.cs', 'DotnetTests.XUnitTests/CalculatorTests.cs']
|
||||
trackedFiles: [
|
||||
'DotnetTests.Unit/Calculator.cs',
|
||||
'DotnetTests.XUnitTests/CalculatorTests.cs',
|
||||
'DotnetTests.XUnitV3Tests/FixtureTests.cs'
|
||||
]
|
||||
//workDir: 'C:/Users/Michal/Workspace/dorny/test-check/reports/dotnet/'
|
||||
}
|
||||
|
||||
|
||||
60
__tests__/fixtures/dotnet-xunitv3.trx
Normal file
60
__tests__/fixtures/dotnet-xunitv3.trx
Normal file
@@ -0,0 +1,60 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<TestRun id="54e29175-539e-48a3-a634-3a1855a0ed38" name="@Asterix 2025-06-22 14:17:12.022" xmlns="http://microsoft.com/schemas/VisualStudio/TeamTest/2010">
|
||||
<Times creation="2025-06-22T14:17:11.756535Z" queuing="2025-06-22T14:17:11.756535Z" start="2025-06-22T14:17:11.756535Z" finish="2025-06-22T14:17:12.023063Z" />
|
||||
<TestSettings name="default" id="932e6c6f-3e5b-4392-ad65-e04c1ef476b5">
|
||||
<Deployment runDeploymentRoot="_Asterix_2025-06-22_14_17_12.022" />
|
||||
</TestSettings>
|
||||
<Results>
|
||||
<UnitTestResult executionId="37242a1f-ca3e-44b3-8142-71e510480975" testId="f846a1e6-0b68-2ac6-9a66-f417926e3238" testName="DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test" computerName="Asterix" duration="00:00:00.0170545" startTime="2025-06-22T14:17:11.9339840+00:00" endTime="2025-06-22T14:17:11.9750850+00:00" testType="13CDC9D9-DDB5-4fa4-A97D-D965CCFC6D4B" outcome="Failed" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" relativeResultsDirectory="37242a1f-ca3e-44b3-8142-71e510480975">
|
||||
<Output>
|
||||
<ErrorInfo>
|
||||
<Message>Assert.Null() Failure: Value is not null
|
||||
Expected: null
|
||||
Actual: Fixture { }</Message>
|
||||
<StackTrace> at DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test() in /_/reports/dotnet/DotnetTests.XUnitV3Tests/FixtureTests.cs:line 25
|
||||
at System.RuntimeMethodHandle.InvokeMethod(Object target, Void** arguments, Signature sig, Boolean isConstructor)
|
||||
at System.Reflection.MethodBaseInvoker.InvokeWithNoArgs(Object obj, BindingFlags invokeAttr)</StackTrace>
|
||||
</ErrorInfo>
|
||||
</Output>
|
||||
</UnitTestResult>
|
||||
<UnitTestResult executionId="592aaafb-4dc0-49dc-b3c7-bcd81218d58a" testId="3ee930dd-8a75-92a0-0d90-373833166db1" testName="DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test" computerName="Asterix" duration="00:00:00.0008786" startTime="2025-06-22T14:17:11.9819890+00:00" endTime="2025-06-22T14:17:11.9833560+00:00" testType="13CDC9D9-DDB5-4fa4-A97D-D965CCFC6D4B" outcome="Passed" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" relativeResultsDirectory="592aaafb-4dc0-49dc-b3c7-bcd81218d58a" />
|
||||
<UnitTestResult executionId="19c42d36-f4d7-4046-bcc6-dd9b85c9ca2b" testId="372fb60f-1f5b-a52e-032e-41a7556021e8" testName="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test)]" computerName="Asterix" duration="00:00:00" startTime="2025-06-22T14:17:12.0320280+00:00" endTime="2025-06-22T14:17:12.0320290+00:00" testType="13CDC9D9-DDB5-4fa4-A97D-D965CCFC6D4B" outcome="Failed" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" relativeResultsDirectory="19c42d36-f4d7-4046-bcc6-dd9b85c9ca2b" />
|
||||
<UnitTestResult executionId="b7f40170-1e2c-45ce-b5e4-5bf49fd4c360" testId="a69083a1-56b4-3da3-2d7c-66fda374fd8e" testName="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test)]" computerName="Asterix" duration="00:00:00" startTime="2025-06-22T14:17:12.0320420+00:00" endTime="2025-06-22T14:17:12.0320430+00:00" testType="13CDC9D9-DDB5-4fa4-A97D-D965CCFC6D4B" outcome="Failed" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" relativeResultsDirectory="b7f40170-1e2c-45ce-b5e4-5bf49fd4c360" />
|
||||
</Results>
|
||||
<TestDefinitions>
|
||||
<UnitTest name="DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test" storage="~/test-reporter/reports/dotnet/dotnettests.xunitv3tests/bin/debug/net8.0/dotnettests.xunitv3tests.dll" id="f846a1e6-0b68-2ac6-9a66-f417926e3238">
|
||||
<Execution id="37242a1f-ca3e-44b3-8142-71e510480975" />
|
||||
<TestMethod codeBase="~/test-reporter/reports/dotnet/DotnetTests.XUnitV3Tests/bin/Debug/net8.0/DotnetTests.XUnitV3Tests.dll" adapterTypeName="executor://30ea7c6e-dd24-4152-a360-1387158cd41d/2.0.3" className="DotnetTests.XUnitV3Tests.FixtureTests" name="DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test" />
|
||||
</UnitTest>
|
||||
<UnitTest name="DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test" storage="~/test-reporter/reports/dotnet/dotnettests.xunitv3tests/bin/debug/net8.0/dotnettests.xunitv3tests.dll" id="3ee930dd-8a75-92a0-0d90-373833166db1">
|
||||
<Execution id="592aaafb-4dc0-49dc-b3c7-bcd81218d58a" />
|
||||
<TestMethod codeBase="~/test-reporter/reports/dotnet/DotnetTests.XUnitV3Tests/bin/Debug/net8.0/DotnetTests.XUnitV3Tests.dll" adapterTypeName="executor://30ea7c6e-dd24-4152-a360-1387158cd41d/2.0.3" className="DotnetTests.XUnitV3Tests.FixtureTests" name="DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test" />
|
||||
</UnitTest>
|
||||
<UnitTest name="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test)]" storage="~/test-reporter/reports/dotnet/dotnettests.xunitv3tests/bin/debug/net8.0/dotnettests.xunitv3tests.dll" id="372fb60f-1f5b-a52e-032e-41a7556021e8">
|
||||
<Execution id="19c42d36-f4d7-4046-bcc6-dd9b85c9ca2b" />
|
||||
<TestMethod codeBase="~/test-reporter/reports/dotnet/DotnetTests.XUnitV3Tests/bin/Debug/net8.0/DotnetTests.XUnitV3Tests.dll" adapterTypeName="executor://30ea7c6e-dd24-4152-a360-1387158cd41d/2.0.3" name="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test)]" />
|
||||
</UnitTest>
|
||||
<UnitTest name="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test)]" storage="~/test-reporter/reports/dotnet/dotnettests.xunitv3tests/bin/debug/net8.0/dotnettests.xunitv3tests.dll" id="a69083a1-56b4-3da3-2d7c-66fda374fd8e">
|
||||
<Execution id="b7f40170-1e2c-45ce-b5e4-5bf49fd4c360" />
|
||||
<TestMethod codeBase="~/test-reporter/reports/dotnet/DotnetTests.XUnitV3Tests/bin/Debug/net8.0/DotnetTests.XUnitV3Tests.dll" adapterTypeName="executor://30ea7c6e-dd24-4152-a360-1387158cd41d/2.0.3" name="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test)]" />
|
||||
</UnitTest>
|
||||
</TestDefinitions>
|
||||
<TestEntries>
|
||||
<TestEntry testId="f846a1e6-0b68-2ac6-9a66-f417926e3238" executionId="37242a1f-ca3e-44b3-8142-71e510480975" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" />
|
||||
<TestEntry testId="3ee930dd-8a75-92a0-0d90-373833166db1" executionId="592aaafb-4dc0-49dc-b3c7-bcd81218d58a" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" />
|
||||
<TestEntry testId="372fb60f-1f5b-a52e-032e-41a7556021e8" executionId="19c42d36-f4d7-4046-bcc6-dd9b85c9ca2b" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" />
|
||||
<TestEntry testId="a69083a1-56b4-3da3-2d7c-66fda374fd8e" executionId="b7f40170-1e2c-45ce-b5e4-5bf49fd4c360" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" />
|
||||
</TestEntries>
|
||||
<TestLists>
|
||||
<TestList name="Results Not in a List" id="8C84FA94-04C1-424b-9868-57A2D4851A1D" />
|
||||
<TestList name="All Loaded Results" id="19431567-8539-422a-85d7-44ee4e166bda" />
|
||||
</TestLists>
|
||||
<ResultSummary outcome="Failed">
|
||||
<Counters total="4" executed="4" passed="1" failed="3" error="0" timeout="0" aborted="0" inconclusive="0" passedButRunAborted="0" notRunnable="0" notExecuted="0" disconnected="0" warning="0" completed="0" inProgress="0" pending="0" />
|
||||
<RunInfos>
|
||||
<RunInfo computerName="Asterix" outcome="Error" timestamp="2025-06-22T14:17:12.033401">
|
||||
<Text>Exit code indicates failure: '2'. Please refer to https://aka.ms/testingplatform/exitcodes for more information.</Text>
|
||||
</RunInfo>
|
||||
</RunInfos>
|
||||
</ResultSummary>
|
||||
</TestRun>
|
||||
2
__tests__/fixtures/empty/phpunit-empty.xml
Normal file
2
__tests__/fixtures/empty/phpunit-empty.xml
Normal file
@@ -0,0 +1,2 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites/>
|
||||
31
__tests__/fixtures/external/java/junit4-basic.xml
vendored
Normal file
31
__tests__/fixtures/external/java/junit4-basic.xml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
This is a basic JUnit-style XML example to highlight the basis structure.
|
||||
|
||||
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
|
||||
Testmo test management software - https://www.testmo.com/
|
||||
-->
|
||||
<testsuites time="15.682687">
|
||||
<testsuite name="Tests.Registration" time="6.605871">
|
||||
<testcase name="testCase1" classname="Tests.Registration" time="2.113871" />
|
||||
<testcase name="testCase2" classname="Tests.Registration" time="1.051" />
|
||||
<testcase name="testCase3" classname="Tests.Registration" time="3.441" />
|
||||
</testsuite>
|
||||
<testsuite name="Tests.Authentication" time="9.076816">
|
||||
<!-- Java JUni4 XML files does not nest <testsuite> elements -->
|
||||
<!--
|
||||
<testsuite name="Tests.Authentication.Login" time="4.356">
|
||||
<testcase name="testCase4" classname="Tests.Authentication.Login" time="2.244" />
|
||||
<testcase name="testCase5" classname="Tests.Authentication.Login" time="0.781" />
|
||||
<testcase name="testCase6" classname="Tests.Authentication.Login" time="1.331" />
|
||||
</testsuite>
|
||||
-->
|
||||
<testcase name="testCase7" classname="Tests.Authentication" time="2.508" />
|
||||
<testcase name="testCase8" classname="Tests.Authentication" time="1.230816" />
|
||||
<testcase name="testCase9" classname="Tests.Authentication" time="0.982">
|
||||
<failure message="Assertion error message" type="AssertionError">
|
||||
<!-- Call stack printed here -->
|
||||
</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
141
__tests__/fixtures/external/java/junit4-complete.xml
vendored
Normal file
141
__tests__/fixtures/external/java/junit4-complete.xml
vendored
Normal file
@@ -0,0 +1,141 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
This is a JUnit-style XML example with commonly used tags and attributes.
|
||||
|
||||
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
|
||||
Testmo test management software - https://www.testmo.com/
|
||||
-->
|
||||
|
||||
<!-- <testsuites> Usually the root element of a JUnit XML file. Some tools leave out
|
||||
the <testsuites> element if there is only a single top-level <testsuite> element (which
|
||||
is then used as the root element).
|
||||
|
||||
name Name of the entire test run
|
||||
tests Total number of tests in this file
|
||||
failures Total number of failed tests in this file
|
||||
errors Total number of errored tests in this file
|
||||
skipped Total number of skipped tests in this file
|
||||
assertions Total number of assertions for all tests in this file
|
||||
time Aggregated time of all tests in this file in seconds
|
||||
timestamp Date and time of when the test run was executed (in ISO 8601 format)
|
||||
-->
|
||||
<testsuites name="Test run" tests="8" failures="1" errors="1" skipped="1"
|
||||
assertions="20" time="16.082687" timestamp="2021-04-02T15:48:23">
|
||||
|
||||
<!-- <testsuite> A test suite usually represents a class, folder or group of tests.
|
||||
There can be many test suites in an XML file, and there can be test suites under other
|
||||
test suites.
|
||||
|
||||
name Name of the test suite (e.g. class name or folder name)
|
||||
tests Total number of tests in this suite
|
||||
failures Total number of failed tests in this suite
|
||||
errors Total number of errored tests in this suite
|
||||
skipped Total number of skipped tests in this suite
|
||||
assertions Total number of assertions for all tests in this suite
|
||||
time Aggregated time of all tests in this file in seconds
|
||||
timestamp Date and time of when the test suite was executed (in ISO 8601 format)
|
||||
file Source code file of this test suite
|
||||
-->
|
||||
<testsuite name="Tests.Registration" tests="8" failures="1" errors="1" skipped="1"
|
||||
assertions="20" time="16.082687" timestamp="2021-04-02T15:48:23"
|
||||
file="tests/registration.code">
|
||||
|
||||
<!-- <properties> Test suites (and test cases, see below) can have additional
|
||||
properties such as environment variables or version numbers. -->
|
||||
<properties>
|
||||
<!-- <property> Each property has a name and value. Some tools also support
|
||||
properties with text values instead of value attributes. -->
|
||||
<property name="version" value="1.774" />
|
||||
<property name="commit" value="ef7bebf" />
|
||||
<property name="browser" value="Google Chrome" />
|
||||
<property name="ci" value="https://github.com/actions/runs/1234" />
|
||||
<property name="config">
|
||||
Config line #1
|
||||
Config line #2
|
||||
Config line #3
|
||||
</property>
|
||||
</properties>
|
||||
|
||||
<!-- <system-out> Optionally data written to standard out for the suite.
|
||||
Also supported on a test case level, see below. -->
|
||||
<system-out>Data written to standard out.</system-out>
|
||||
|
||||
<!-- <system-err> Optionally data written to standard error for the suite.
|
||||
Also supported on a test case level, see below. -->
|
||||
<system-err>Data written to standard error.</system-err>
|
||||
|
||||
<!-- <testcase> There are one or more test cases in a test suite. A test passed
|
||||
if there isn't an additional result element (skipped, failure, error).
|
||||
|
||||
name The name of this test case, often the method name
|
||||
classname The name of the parent class/folder, often the same as the suite's name
|
||||
assertions Number of assertions checked during test case execution
|
||||
time Execution time of the test in seconds
|
||||
file Source code file of this test case
|
||||
line Source code line number of the start of this test case
|
||||
-->
|
||||
<testcase name="testCase1" classname="Tests.Registration" assertions="2"
|
||||
time="2.436" file="tests/registration.code" line="24" />
|
||||
<testcase name="testCase2" classname="Tests.Registration" assertions="6"
|
||||
time="1.534" file="tests/registration.code" line="62" />
|
||||
<testcase name="testCase3" classname="Tests.Registration" assertions="3"
|
||||
time="0.822" file="tests/registration.code" line="102" />
|
||||
|
||||
<!-- Example of a test case that was skipped -->
|
||||
<testcase name="testCase4" classname="Tests.Registration" assertions="0"
|
||||
time="0" file="tests/registration.code" line="164">
|
||||
<!-- <skipped> Indicates that the test was not executed. Can have an optional
|
||||
message describing why the test was skipped. -->
|
||||
<skipped message="Test was skipped." />
|
||||
</testcase>
|
||||
|
||||
<!-- Example of a test case that failed. -->
|
||||
<testcase name="testCase5" classname="Tests.Registration" assertions="2"
|
||||
time="2.902412" file="tests/registration.code" line="202">
|
||||
<!-- <failure> The test failed because one of the assertions/checks failed.
|
||||
Can have a message and failure type, often the assertion type or class. The text
|
||||
content of the element often includes the failure description or stack trace. -->
|
||||
<failure message="Expected value did not match." type="AssertionError">
|
||||
<!-- Failure description or stack trace -->
|
||||
</failure>
|
||||
</testcase>
|
||||
|
||||
<!-- Example of a test case that had errors. -->
|
||||
<testcase name="testCase6" classname="Tests.Registration" assertions="0"
|
||||
time="3.819" file="tests/registration.code" line="235">
|
||||
<!-- <error> The test had an unexpected error during execution. Can have a
|
||||
message and error type, often the exception type or class. The text
|
||||
content of the element often includes the error description or stack trace. -->
|
||||
<error message="Division by zero." type="ArithmeticError">
|
||||
<!-- Error description or stack trace -->
|
||||
</error>
|
||||
</testcase>
|
||||
|
||||
<!-- Example of a test case with outputs. -->
|
||||
<testcase name="testCase7" classname="Tests.Registration" assertions="3"
|
||||
time="2.944" file="tests/registration.code" line="287">
|
||||
<!-- <system-out> Optional data written to standard out for the test case. -->
|
||||
<system-out>Data written to standard out.</system-out>
|
||||
|
||||
<!-- <system-err> Optional data written to standard error for the test case. -->
|
||||
<system-err>Data written to standard error.</system-err>
|
||||
</testcase>
|
||||
|
||||
<!-- Example of a test case with properties -->
|
||||
<testcase name="testCase8" classname="Tests.Registration" assertions="4"
|
||||
time="1.625275" file="tests/registration.code" line="302">
|
||||
<!-- <properties> Some tools also support properties for test cases. -->
|
||||
<properties>
|
||||
<property name="priority" value="high" />
|
||||
<property name="language" value="english" />
|
||||
<property name="author" value="Adrian" />
|
||||
<property name="attachment" value="screenshots/dashboard.png" />
|
||||
<property name="attachment" value="screenshots/users.png" />
|
||||
<property name="description">
|
||||
This text describes the purpose of this test case and provides
|
||||
an overview of what the test does and how it works.
|
||||
</property>
|
||||
</properties>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
28
__tests__/fixtures/external/phpunit/junit-basic.xml
vendored
Normal file
28
__tests__/fixtures/external/phpunit/junit-basic.xml
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
This is a basic JUnit-style XML example to highlight the basis structure.
|
||||
|
||||
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
|
||||
Testmo test management software - https://www.testmo.com/
|
||||
-->
|
||||
<testsuites time="15.682687">
|
||||
<testsuite name="Tests.Registration" time="6.605871">
|
||||
<testcase name="testCase1" classname="Tests.Registration" time="2.113871" />
|
||||
<testcase name="testCase2" classname="Tests.Registration" time="1.051" />
|
||||
<testcase name="testCase3" classname="Tests.Registration" time="3.441" />
|
||||
</testsuite>
|
||||
<testsuite name="Tests.Authentication" time="9.076816">
|
||||
<testsuite name="Tests.Authentication.Login" time="4.356">
|
||||
<testcase name="testCase4" classname="Tests.Authentication.Login" time="2.244" />
|
||||
<testcase name="testCase5" classname="Tests.Authentication.Login" time="0.781" />
|
||||
<testcase name="testCase6" classname="Tests.Authentication.Login" time="1.331" />
|
||||
</testsuite>
|
||||
<testcase name="testCase7" classname="Tests.Authentication" time="2.508" />
|
||||
<testcase name="testCase8" classname="Tests.Authentication" time="1.230816" />
|
||||
<testcase name="testCase9" classname="Tests.Authentication" time="0.982">
|
||||
<failure message="Assertion error message" type="AssertionError">
|
||||
<!-- Call stack printed here -->
|
||||
</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
212
__tests__/fixtures/external/phpunit/phpcheckstyle-phpunit.xml
vendored
Normal file
212
__tests__/fixtures/external/phpunit/phpcheckstyle-phpunit.xml
vendored
Normal file
@@ -0,0 +1,212 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites>
|
||||
<testsuite name="/workspace/phpcheckstyle/phpunit.xml" tests="30" assertions="117" errors="0" failures="2" skipped="0" time="0.041478">
|
||||
<testsuite name="PHPUnitTestSuite" tests="30" assertions="117" errors="0" failures="2" skipped="0" time="0.041478">
|
||||
<testsuite name="CommentsTest" file="/workspace/phpcheckstyle/test/CommentsTest.php" tests="3" assertions="12" errors="0" failures="0" skipped="0" time="0.006702">
|
||||
<testcase name="testGoodDoc" file="/workspace/phpcheckstyle/test/CommentsTest.php" line="12" class="CommentsTest" classname="CommentsTest" assertions="4" time="0.005093"/>
|
||||
<testcase name="testComments" file="/workspace/phpcheckstyle/test/CommentsTest.php" line="30" class="CommentsTest" classname="CommentsTest" assertions="4" time="0.000921">
|
||||
<system-out>File "./test/sample/bad_comments.php" warning, line 4 - Avoid Shell/Perl like comments.
|
||||
File "./test/sample/bad_comments.php" warning, line 6 - The class Comments must have a docblock comment.
|
||||
File "./test/sample/bad_comments.php" warning, line 10 - The function testComment must have a docblock comment.
|
||||
File "./test/sample/bad_comments.php" warning, line 18 - The function testComment returns a value and must include @returns in its docblock.
|
||||
File "./test/sample/bad_comments.php" warning, line 18 - The function testComment parameters must match those in its docblock @param.
|
||||
File "./test/sample/bad_comments.php" warning, line 18 - The function testComment throws an exception and must include @throws in its docblock.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testTODOs" file="/workspace/phpcheckstyle/test/CommentsTest.php" line="48" class="CommentsTest" classname="CommentsTest" assertions="4" time="0.000688">
|
||||
<system-out>File "./test/sample/todo.php" warning, line 3 - TODO: The todo message.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="DeprecationTest" file="/workspace/phpcheckstyle/test/DeprecationTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.000974">
|
||||
<testcase name="testDeprecations" file="/workspace/phpcheckstyle/test/DeprecationTest.php" line="12" class="DeprecationTest" classname="DeprecationTest" assertions="4" time="0.000974">
|
||||
<system-out>File "./test/sample/bad_deprecation.php" warning, line 17 - split is deprecated since PHP 5.3. explode($pattern, $string) or preg_split('@'.$pattern.'@', $string) must be used instead.
|
||||
File "./test/sample/bad_deprecation.php" warning, line 19 - ereg is deprecated since PHP 5.3. preg_match('@'.$pattern.'@', $string) must be used instead.
|
||||
File "./test/sample/bad_deprecation.php" warning, line 21 - session_register is deprecated since PHP 5.3. $_SESSION must be used instead.
|
||||
File "./test/sample/bad_deprecation.php" warning, line 23 - mysql_db_query is deprecated since PHP 5.3. mysql_select_db and mysql_query must be used instead.
|
||||
File "./test/sample/bad_deprecation.php" warning, line 25 - $HTTP_GET_VARS is deprecated since PHP 5.3. $_GET must be used instead.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="GoodTest" file="/workspace/phpcheckstyle/test/GoodTest.php" tests="4" assertions="16" errors="0" failures="0" skipped="0" time="0.005357">
|
||||
<testcase name="testGood" file="/workspace/phpcheckstyle/test/GoodTest.php" line="12" class="GoodTest" classname="GoodTest" assertions="4" time="0.002647"/>
|
||||
<testcase name="testDoWhile" file="/workspace/phpcheckstyle/test/GoodTest.php" line="32" class="GoodTest" classname="GoodTest" assertions="4" time="0.001022"/>
|
||||
<testcase name="testAnonymousFunction" file="/workspace/phpcheckstyle/test/GoodTest.php" line="50" class="GoodTest" classname="GoodTest" assertions="4" time="0.000800"/>
|
||||
<testcase name="testException" file="/workspace/phpcheckstyle/test/GoodTest.php" line="68" class="GoodTest" classname="GoodTest" assertions="4" time="0.000888"/>
|
||||
</testsuite>
|
||||
<testsuite name="IndentationTest" file="/workspace/phpcheckstyle/test/IndentationTest.php" tests="8" assertions="32" errors="0" failures="0" skipped="0" time="0.007654">
|
||||
<testcase name="testTabIndentation" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="12" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000857">
|
||||
<system-out>File "./test/sample/bad_indentation.php" warning, line 8 - Whitespace indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 15 - Whitespace indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 17 - Whitespace indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 18 - Whitespace indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 19 - Whitespace indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 20 - Whitespace indentation must not be used.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testSpaceIndentation" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="30" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000929">
|
||||
<system-out>File "./test/sample/bad_indentation.php" warning, line 10 - Tab indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 10 - The indentation level must be 4 but was 1.
|
||||
File "./test/sample/bad_indentation.php" warning, line 13 - Tab indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 13 - The indentation level must be 4 but was 1.
|
||||
File "./test/sample/bad_indentation.php" warning, line 15 - The indentation level must be 8 but was 4.
|
||||
File "./test/sample/bad_indentation.php" warning, line 16 - Tab indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 16 - The indentation level must be 8 but was 1.
|
||||
File "./test/sample/bad_indentation.php" warning, line 17 - The indentation level must be 8 but was 3.
|
||||
File "./test/sample/bad_indentation.php" warning, line 18 - The indentation level must be 8 but was 5.
|
||||
File "./test/sample/bad_indentation.php" warning, line 19 - The indentation level must be 8 but was 6.
|
||||
File "./test/sample/bad_indentation.php" warning, line 20 - The indentation level must be 4 but was 1.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testSpaceIndentationArray" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="51" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000975">
|
||||
<system-out>File "./test/sample/bad_indentation_array.php" warning, line 10 - Tab indentation must not be used.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 10 - The indentation level must be 4 but was 1.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 13 - Tab indentation must not be used.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 13 - The indentation level must be 4 but was 1.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 16 - The indentation level must be 12 but was 8.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 24 - The indentation level must be 12 but was 8.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 29 - The indentation level must be 8 but was 12.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 15 - Undeclared or unused variable: $aVar.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 19 - Undeclared or unused variable: $bVar.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 23 - Undeclared or unused variable: $cVar.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 27 - Undeclared or unused variable: $dVar.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testGoodSpaceIndentationArray" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="72" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.001212"/>
|
||||
<testcase name="testGoodIndentationNewLine" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="93" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000859"/>
|
||||
<testcase name="testGoodIndentationSpaces" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="116" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000780"/>
|
||||
<testcase name="testBadSpaces" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="137" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.001120">
|
||||
<system-out>File "./test/sample/bad_spaces.php" warning, line 17 - Whitespace must follow ,.
|
||||
File "./test/sample/bad_spaces.php" warning, line 17 - Whitespace must precede {.
|
||||
File "./test/sample/bad_spaces.php" warning, line 19 - Whitespace must follow if.
|
||||
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must precede =.
|
||||
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must follow =.
|
||||
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must precede +.
|
||||
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must follow +.
|
||||
File "./test/sample/bad_spaces.php" info, line 25 - Whitespace must not precede ,.
|
||||
File "./test/sample/bad_spaces.php" info, line 26 - Whitespace must not follow !.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testBadSpaceAfterControl" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="155" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000922">
|
||||
<system-out>File "./test/sample/bad_space_after_control.php" warning, line 19 - Whitespace must not follow if.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="MetricsTest" file="/workspace/phpcheckstyle/test/MetricsTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.004147">
|
||||
<testcase name="testMetrics" file="/workspace/phpcheckstyle/test/MetricsTest.php" line="12" class="MetricsTest" classname="MetricsTest" assertions="4" time="0.004147">
|
||||
<system-out>File "./test/sample/bad_metrics.php" warning, line 21 - The function testMetrics's number of parameters (6) must not exceed 4.
|
||||
File "./test/sample/bad_metrics.php" info, line 55 - Line is too long. [233/160]
|
||||
File "./test/sample/bad_metrics.php" warning, line 21 - The Cyclomatic Complexity of function testMetrics is too high. [15/10]
|
||||
File "./test/sample/bad_metrics.php" warning, line 244 - The testMetrics function body length is too long. [223/200]
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="NamingTest" file="/workspace/phpcheckstyle/test/NamingTest.php" tests="2" assertions="8" errors="0" failures="0" skipped="0" time="0.002697">
|
||||
<testcase name="testNaming" file="/workspace/phpcheckstyle/test/NamingTest.php" line="12" class="NamingTest" classname="NamingTest" assertions="4" time="0.001426">
|
||||
<system-out>File "./test/sample/_bad_naming.php" error, line 11 - Constant _badly_named_constant name should follow the pattern /^[A-Z][A-Z0-9_]*$/.
|
||||
File "./test/sample/_bad_naming.php" error, line 13 - Constant bad_CONST name should follow the pattern /^[A-Z][A-Z0-9_]*$/.
|
||||
File "./test/sample/_bad_naming.php" warning, line 17 - Top level variable $XXX name should follow the pattern /^[a-z_][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" warning, line 20 - Variable x name length is too short.
|
||||
File "./test/sample/_bad_naming.php" error, line 28 - Class badlynamedclass name should follow the pattern /^[A-Z][a-zA-Z0-9_]*$/.
|
||||
File "./test/sample/_bad_naming.php" warning, line 32 - Member variable $YYY name should follow the pattern /^[a-z_][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" warning, line 37 - The constructor name must be __construct().
|
||||
File "./test/sample/_bad_naming.php" error, line 44 - Function Badlynamedfunction name should follow the pattern /^[a-z][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" warning, line 47 - Local variable $ZZZ name should follow the pattern /^[a-z_][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" error, line 54 - Protected function Badlynamedfunction2 name should follow the pattern /^[a-z][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" error, line 61 - Private function badlynamedfunction3 name should follow the pattern /^_[a-z][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" error, line 70 - Interface _badlynamedinterface name should follow the pattern /^[A-Z][a-zA-Z0-9_]*$/.
|
||||
File "./test/sample/_bad_naming.php" error, line 75 - File _bad_naming.php name should follow the pattern /^[a-zA-Z][a-zA-Z0-9._]*$/.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testFunctionNaming" file="/workspace/phpcheckstyle/test/NamingTest.php" line="32" class="NamingTest" classname="NamingTest" assertions="4" time="0.001271"/>
|
||||
</testsuite>
|
||||
<testsuite name="OptimizationTest" file="/workspace/phpcheckstyle/test/OptimizationTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.000994">
|
||||
<testcase name="testTextAfterClosingTag" file="/workspace/phpcheckstyle/test/OptimizationTest.php" line="12" class="OptimizationTest" classname="OptimizationTest" assertions="4" time="0.000994">
|
||||
<system-out>File "./test/sample/bad_optimisation.php" warning, line 18 - count function must not be used inside a loop.
|
||||
File "./test/sample/bad_optimisation.php" warning, line 23 - count function must not be used inside a loop.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="OtherTest" file="/workspace/phpcheckstyle/test/OtherTest.php" tests="4" assertions="13" errors="0" failures="2" skipped="0" time="0.007329">
|
||||
<testcase name="testOther" file="/workspace/phpcheckstyle/test/OtherTest.php" line="12" class="OtherTest" classname="OtherTest" assertions="4" time="0.005251">
|
||||
<failure type="PHPUnit\Framework\ExpectationFailedException">OtherTest::testOther
|
||||
We expect 20 warnings
|
||||
Failed asserting that 19 matches expected 20.
|
||||
|
||||
/workspace/phpcheckstyle/test/OtherTest.php:24</failure>
|
||||
<system-out>File "./test/sample/bad_other.php" warning, line 17 - All arguments with default values must be at the end of the block or statement.
|
||||
File "./test/sample/bad_other.php" warning, line 21 - Errors must not be silenced when calling a function.
|
||||
File "./test/sample/bad_other.php" warning, line 23 - Prefer single-quoted strings when you don't need string interpolation.
|
||||
File "./test/sample/bad_other.php" warning, line 23 - Encapsed variables must not be used inside a string.
|
||||
File "./test/sample/bad_other.php" warning, line 23 - Encapsed variables must not be used inside a string.
|
||||
File "./test/sample/bad_other.php" warning, line 23 - Prefer single-quoted strings when you don't need string interpolation.
|
||||
File "./test/sample/bad_other.php" warning, line 37 - TODO: Show todos
|
||||
File "./test/sample/bad_other.php" warning, line 40 - Avoid empty statements (;;).
|
||||
File "./test/sample/bad_other.php" warning, line 42 - Boolean operators (&&) must be used instead of logical operators (AND).
|
||||
File "./test/sample/bad_other.php" warning, line 42 - Empty if block.
|
||||
File "./test/sample/bad_other.php" warning, line 48 - Heredoc syntax must not be used.
|
||||
File "./test/sample/bad_other.php" warning, line 52 - The statement if must contain its code within a {} block.
|
||||
File "./test/sample/bad_other.php" warning, line 54 - Consider using a strict comparison operator instead of ==.
|
||||
File "./test/sample/bad_other.php" warning, line 54 - The statement while must contain its code within a {} block.
|
||||
File "./test/sample/bad_other.php" warning, line 66 - The switch statement must have a default case.
|
||||
File "./test/sample/bad_other.php" warning, line 79 - The default case of a switch statement must be located after all other cases.
|
||||
File "./test/sample/bad_other.php" warning, line 93 - Unary operators (++ or --) must not be used inside a control statement
|
||||
File "./test/sample/bad_other.php" warning, line 95 - Assigments (=) must not be used inside a control statement.
|
||||
File "./test/sample/bad_other.php" warning, line 106 - File ./test/sample/bad_other.php must not have multiple class declarations.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testException" file="/workspace/phpcheckstyle/test/OtherTest.php" line="31" class="OtherTest" classname="OtherTest" assertions="1" time="0.000751">
|
||||
<failure type="PHPUnit\Framework\ExpectationFailedException">OtherTest::testException
|
||||
We expect 1 error
|
||||
Failed asserting that 0 matches expected 1.
|
||||
|
||||
/workspace/phpcheckstyle/test/OtherTest.php:40</failure>
|
||||
</testcase>
|
||||
<testcase name="testEmpty" file="/workspace/phpcheckstyle/test/OtherTest.php" line="50" class="OtherTest" classname="OtherTest" assertions="4" time="0.000427">
|
||||
<system-out>File "./test/sample/empty.php" warning, line 1 - The file ./test/sample/empty.php is empty.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testSwitchCaseNeedBreak" file="/workspace/phpcheckstyle/test/OtherTest.php" line="69" class="OtherTest" classname="OtherTest" assertions="4" time="0.000901">
|
||||
<system-out>File "./test/sample/switch_multi_case.php" warning, line 10 - The case statement must contain a break.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="PHPTagsTest" file="/workspace/phpcheckstyle/test/PHPTagsTest.php" tests="2" assertions="8" errors="0" failures="0" skipped="0" time="0.001272">
|
||||
<testcase name="testTextAfterClosingTag" file="/workspace/phpcheckstyle/test/PHPTagsTest.php" line="12" class="PHPTagsTest" classname="PHPTagsTest" assertions="4" time="0.000641">
|
||||
<system-out>File "./test/sample/bad_php_tags_text_after_end.php" warning, line 9 - A PHP close tag must not be included at the end of the file.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testClosingTagNotNeeded" file="/workspace/phpcheckstyle/test/PHPTagsTest.php" line="30" class="PHPTagsTest" classname="PHPTagsTest" assertions="4" time="0.000631">
|
||||
<system-out>File "./test/sample/bad_php_tags_end_not_needed.php" warning, line 1 - PHP tag should be at the beginning of the line.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="ProhibitedTest" file="/workspace/phpcheckstyle/test/ProhibitedTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.000938">
|
||||
<testcase name="testProhibited" file="/workspace/phpcheckstyle/test/ProhibitedTest.php" line="13" class="ProhibitedTest" classname="ProhibitedTest" assertions="4" time="0.000938">
|
||||
<system-out>File "./test/sample/bad_prohibited.php" warning, line 18 - The function exec must not be called.
|
||||
File "./test/sample/bad_prohibited.php" warning, line 20 - Token T_PRINT must not be used.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="StrictCompareTest" file="/workspace/phpcheckstyle/test/StrictCompareTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.001578">
|
||||
<testcase name="testStrictCompare" file="/workspace/phpcheckstyle/test/StrictCompareTest.php" line="12" class="StrictCompareTest" classname="StrictCompareTest" assertions="4" time="0.001578">
|
||||
<system-out>File "./test/sample/bad_strictcompare.php" warning, line 14 - Consider using a strict comparison operator instead of ==.
|
||||
File "./test/sample/bad_strictcompare.php" warning, line 19 - Consider using a strict comparison operator instead of !=.
|
||||
File "./test/sample/bad_strictcompare.php" warning, line 24 - Consider using a strict comparison operator instead of ==.
|
||||
File "./test/sample/bad_strictcompare.php" warning, line 29 - Consider using a strict comparison operator instead of ==.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="UnusedTest" file="/workspace/phpcheckstyle/test/UnusedTest.php" tests="2" assertions="8" errors="0" failures="0" skipped="0" time="0.001835">
|
||||
<testcase name="testGoodUnused" file="/workspace/phpcheckstyle/test/UnusedTest.php" line="13" class="UnusedTest" classname="UnusedTest" assertions="4" time="0.000940"/>
|
||||
<testcase name="testBadUnused" file="/workspace/phpcheckstyle/test/UnusedTest.php" line="32" class="UnusedTest" classname="UnusedTest" assertions="4" time="0.000895">
|
||||
<system-out>File "./test/sample/bad_unused.php" warning, line 23 - Function _testUnused has unused code after RETURN.
|
||||
File "./test/sample/bad_unused.php" warning, line 27 - The function _testUnused parameter $b is not used.
|
||||
File "./test/sample/bad_unused.php" warning, line 18 - Unused private function: _testUnused.
|
||||
File "./test/sample/bad_unused.php" warning, line 20 - Undeclared or unused variable: $c.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuite>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
@@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites>
|
||||
<testsuite errors="0" skipped="0" tests="4" time="0.3" timestamp="2026-01-01T16:36:10">
|
||||
<testcase classname="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingBasics]" name="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingBasics]"/>
|
||||
<testcase classname="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingIndividual]" name="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingIndividual]"/>
|
||||
<testcase classname="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingComponents]" name="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingComponents]"/>
|
||||
<testcase classname="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testMultipleFormsInTemplate]" name="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testMultipleFormsInTemplate]"/>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
83
__tests__/fixtures/nette-tester/tester-v1.7-report.xml
Normal file
83
__tests__/fixtures/nette-tester/tester-v1.7-report.xml
Normal file
@@ -0,0 +1,83 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites>
|
||||
<testsuite errors="1" skipped="3" tests="65" time="2.1" timestamp="2026-01-01T16:50:52">
|
||||
<testcase classname="tests/Framework/Dumper.toPhp.php7.phpt" name="tests/Framework/Dumper.toPhp.php7.phpt">
|
||||
<skipped/>
|
||||
</testcase>
|
||||
<testcase classname="tests/CodeCoverage/Collector.start.phpt" name="tests/CodeCoverage/Collector.start.phpt">
|
||||
<skipped/>
|
||||
</testcase>
|
||||
<testcase classname="tests/CodeCoverage/PhpParser.parse.lines.phpt" name="tests/CodeCoverage/PhpParser.parse.lines.phpt"/>
|
||||
<testcase classname="tests/CodeCoverage/PhpParser.parse.methods.phpt" name="tests/CodeCoverage/PhpParser.parse.methods.phpt"/>
|
||||
<testcase classname="tests/CodeCoverage/CloverXMLGenerator.phpt" name="tests/CodeCoverage/CloverXMLGenerator.phpt"/>
|
||||
<testcase classname="tests/CodeCoverage/PhpParser.parse.edge.phpt" name="tests/CodeCoverage/PhpParser.parse.edge.phpt"/>
|
||||
<testcase classname="tests/CodeCoverage/PhpParser.parse.lines-of-code.phpt" name="tests/CodeCoverage/PhpParser.parse.lines-of-code.phpt"/>
|
||||
<testcase classname="tests/CodeCoverage/PhpParser.parse.namespaces.phpt" name="tests/CodeCoverage/PhpParser.parse.namespaces.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.contains.phpt" name="tests/Framework/Assert.contains.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.count.phpt" name="tests/Framework/Assert.count.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.phpt" name="tests/Framework/Assert.equal.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.recursive.phpt [method=testSimple]" name="tests/Framework/Assert.equal.recursive.phpt [method=testSimple]"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.recursive.phpt [method=testMultiple]" name="tests/Framework/Assert.equal.recursive.phpt [method=testMultiple]"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.recursive.phpt [method=testDeep]" name="tests/Framework/Assert.equal.recursive.phpt [method=testDeep]"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.recursive.phpt [method=testCross]" name="tests/Framework/Assert.equal.recursive.phpt [method=testCross]"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.recursive.phpt [method=testThirdParty]" name="tests/Framework/Assert.equal.recursive.phpt [method=testThirdParty]"/>
|
||||
<testcase classname="tests/Framework/Assert.error.phpt" name="tests/Framework/Assert.error.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.exception.phpt" name="tests/Framework/Assert.exception.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.false.phpt" name="tests/Framework/Assert.false.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.match.phpt" name="tests/Framework/Assert.match.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.match.regexp.phpt" name="tests/Framework/Assert.match.regexp.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.nan.phpt" name="tests/Framework/Assert.nan.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.noError.phpt" name="tests/Framework/Assert.noError.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.same.phpt" name="tests/Framework/Assert.same.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.null.phpt" name="tests/Framework/Assert.null.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.true.phpt" name="tests/Framework/Assert.true.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.truthy.phpt" name="tests/Framework/Assert.truthy.phpt"/>
|
||||
<testcase classname="tests/Framework/DataProvider.load.phpt" name="tests/Framework/DataProvider.load.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.type.phpt" name="tests/Framework/Assert.type.phpt"/>
|
||||
<testcase classname="tests/Framework/DataProvider.parseAnnotation.phpt" name="tests/Framework/DataProvider.parseAnnotation.phpt"/>
|
||||
<testcase classname="tests/Framework/DataProvider.testQuery.phpt" name="tests/Framework/DataProvider.testQuery.phpt"/>
|
||||
<testcase classname="tests/Framework/DomQuery.css2Xpath.phpt" name="tests/Framework/DomQuery.css2Xpath.phpt"/>
|
||||
<testcase classname="tests/Framework/DomQuery.fromHtml.phpt" name="tests/Framework/DomQuery.fromHtml.phpt"/>
|
||||
<testcase classname="tests/Framework/DomQuery.fromXml.phpt" name="tests/Framework/DomQuery.fromXml.phpt"/>
|
||||
<testcase classname="tests/Framework/Dumper.dumpException.phpt" name="tests/Framework/Dumper.dumpException.phpt"/>
|
||||
<testcase classname="tests/Framework/Dumper.color.phpt" name="tests/Framework/Dumper.color.phpt"/>
|
||||
<testcase classname="tests/Framework/Dumper.toLine.phpt" name="tests/Framework/Dumper.toLine.phpt"/>
|
||||
<testcase classname="tests/Framework/Dumper.toPhp.recursion.phpt" name="tests/Framework/Dumper.toPhp.recursion.phpt"/>
|
||||
<testcase classname="tests/Framework/Dumper.toPhp.phpt" name="tests/Framework/Dumper.toPhp.phpt"/>
|
||||
<testcase classname="tests/Framework/FileMock.phpt" name="tests/Framework/FileMock.phpt"/>
|
||||
<testcase classname="tests/Framework/Helpers.escapeArg.phpt" name="tests/Framework/Helpers.escapeArg.phpt"/>
|
||||
<testcase classname="tests/Framework/Helpers.parseDocComment.phpt" name="tests/Framework/Helpers.parseDocComment.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.annotationThrows.phpt" name="tests/Framework/TestCase.annotationThrows.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.annotationThrows.setUp.tearDown.phpt" name="tests/Framework/TestCase.annotationThrows.setUp.tearDown.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.annotationThrows.syntax.phpt" name="tests/Framework/TestCase.annotationThrows.syntax.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.basic.phpt" name="tests/Framework/TestCase.basic.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.dataProvider.generator.phpt" name="tests/Framework/TestCase.dataProvider.generator.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.dataProvider.phpt" name="tests/Framework/TestCase.dataProvider.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.invalidMethods.phpt" name="tests/Framework/TestCase.invalidMethods.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.invalidProvider.phpt" name="tests/Framework/TestCase.invalidProvider.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.order.error.phpt" name="tests/Framework/TestCase.order.error.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.order.errorMuted.phpt" name="tests/Framework/TestCase.order.errorMuted.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.order.phpt" name="tests/Framework/TestCase.order.phpt"/>
|
||||
<testcase classname="Prevent loop in error handling. The #268 regression. | tests/Framework/TestCase.ownErrorHandler.phpt" name="Prevent loop in error handling. The #268 regression. | tests/Framework/TestCase.ownErrorHandler.phpt"/>
|
||||
<testcase classname="tests/Runner/CommandLine.phpt" name="tests/Runner/CommandLine.phpt"/>
|
||||
<testcase classname="tests/Runner/HhvmPhpInterpreter.phpt" name="tests/Runner/HhvmPhpInterpreter.phpt">
|
||||
<skipped/>
|
||||
</testcase>
|
||||
<testcase classname="tests/Runner/Runner.find-tests.phpt" name="tests/Runner/Runner.find-tests.phpt"/>
|
||||
<testcase classname="tests/Runner/Job.phpt" name="tests/Runner/Job.phpt"/>
|
||||
<testcase classname="tests/Runner/ZendPhpExecutable.phpt" name="tests/Runner/ZendPhpExecutable.phpt"/>
|
||||
<testcase classname="tests/Runner/Runner.multiple.phpt" name="tests/Runner/Runner.multiple.phpt"/>
|
||||
<testcase classname="tests/Runner/Runner.edge.phpt" name="tests/Runner/Runner.edge.phpt"/>
|
||||
<testcase classname="tests/Runner/Runner.stop-on-fail.phpt" name="tests/Runner/Runner.stop-on-fail.phpt"/>
|
||||
<testcase classname="tests/Runner/Runner.multiple-fails.phpt" name="tests/Runner/Runner.multiple-fails.phpt">
|
||||
<failure message="Failed: '... in /Users/izso/Developer/nette/tester/tests/Runner/multiple-fails/...' should match
|
||||
... '..., unexpected end of file in %a%testcase-syntax-error.phptx on line ...'
|
||||
|
||||
diff '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.expected' '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.actual'
|
||||
|
||||
in tests/Runner/Runner.multiple-fails.phpt(78) Tester\Assert::match()"/>
|
||||
</testcase>
|
||||
<testcase classname="tests/RunnerOutput/JUnitPrinter.phpt" name="tests/RunnerOutput/JUnitPrinter.phpt"/>
|
||||
<testcase classname="tests/Runner/Runner.annotations.phpt" name="tests/Runner/Runner.annotations.phpt"/>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
23
__tests__/fixtures/phpunit/phpunit-paths.xml
Normal file
23
__tests__/fixtures/phpunit/phpunit-paths.xml
Normal file
@@ -0,0 +1,23 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites>
|
||||
<testsuite name="SampleSuite" tests="6" failures="6" time="0.006">
|
||||
<testcase name="testFailure" classname="SampleSuite" file="/home/runner/work/repo/src/Fake.php" line="42" time="0.001">
|
||||
<failure type="Exception" message="Boom">/home/runner/work/repo/src/Fake.php:42</failure>
|
||||
</testcase>
|
||||
<testcase name="testStringFailure" classname="SampleSuite" file="/home/runner/work/repo/src/Other.php" line="10" time="0.001">
|
||||
<failure>/home/runner/work/repo/src/Other.php:10</failure>
|
||||
</testcase>
|
||||
<testcase name="testParenFailure" classname="SampleSuite" file="/home/runner/work/repo/src/Paren.php" line="123" time="0.001">
|
||||
<failure>at /home/runner/work/repo/src/Paren.php(123)</failure>
|
||||
</testcase>
|
||||
<testcase name="testWindowsFailure" classname="SampleSuite" file="C:\repo\src\Win.php" line="77" time="0.001">
|
||||
<failure>C:\repo\src\Win.php:77</failure>
|
||||
</testcase>
|
||||
<testcase name="testWindowsParenFailure" classname="SampleSuite" file="C:\repo\src\WinParen.php" line="88" time="0.001">
|
||||
<failure>at C:\repo\src\WinParen.php(88)</failure>
|
||||
</testcase>
|
||||
<testcase name="testPhptFailure" classname="SampleSuite" file="/home/runner/work/repo/tests/Sample.phpt" line="12" time="0.001">
|
||||
<failure>/home/runner/work/repo/tests/Sample.phpt:12</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
79
__tests__/fixtures/phpunit/phpunit.xml
Normal file
79
__tests__/fixtures/phpunit/phpunit.xml
Normal file
@@ -0,0 +1,79 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites>
|
||||
<testsuite name="CLI Arguments" tests="12" assertions="12" errors="0" failures="2" skipped="0" time="0.140397">
|
||||
<testcase name="targeting-traits-with-coversclass-attribute-is-deprecated.phpt" file="/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-coversclass-attribute-is-deprecated.phpt" assertions="1" time="0.068151">
|
||||
<failure type="PHPUnit\Framework\PhptAssertionFailedError">targeting-traits-with-coversclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
|
||||
--- Expected
|
||||
+++ Actual
|
||||
@@ @@
|
||||
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
|
||||
Test Runner Configured
|
||||
Test Suite Loaded (1 test)
|
||||
+Test Runner Triggered Warning (No code coverage driver available)
|
||||
Event Facade Sealed
|
||||
Test Runner Started
|
||||
Test Suite Sorted
|
||||
@@ @@
|
||||
Test Preparation Started (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Prepared (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Passed (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
|
||||
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\TestFixture\CoveredTrait with #[CoversClass] is deprecated, please refactor your test to use #[CoversTrait] instead.)
|
||||
Test Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Suite Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest, 1 test)
|
||||
Test Runner Execution Finished
|
||||
Test Runner Finished
|
||||
-PHPUnit Finished (Shell Exit Code: 0)
|
||||
+PHPUnit Finished (Shell Exit Code: 1)
|
||||
|
||||
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-coversclass-attribute-is-deprecated.phpt:28
|
||||
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
|
||||
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
|
||||
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200</failure>
|
||||
</testcase>
|
||||
<testcase name="targeting-traits-with-usesclass-attribute-is-deprecated.phpt" file="/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-usesclass-attribute-is-deprecated.phpt" assertions="1" time="0.064268">
|
||||
<failure type="PHPUnit\Framework\PhptAssertionFailedError">targeting-traits-with-usesclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
|
||||
--- Expected
|
||||
+++ Actual
|
||||
@@ @@
|
||||
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
|
||||
Test Runner Configured
|
||||
Test Suite Loaded (1 test)
|
||||
+Test Runner Triggered Warning (No code coverage driver available)
|
||||
Event Facade Sealed
|
||||
Test Runner Started
|
||||
Test Suite Sorted
|
||||
@@ @@
|
||||
Test Preparation Started (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Prepared (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Passed (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
|
||||
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\TestFixture\CoveredTrait with #[UsesClass] is deprecated, please refactor your test to use #[UsesTrait] instead.)
|
||||
Test Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Suite Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest, 1 test)
|
||||
Test Runner Execution Finished
|
||||
Test Runner Finished
|
||||
-PHPUnit Finished (Shell Exit Code: 0)
|
||||
+PHPUnit Finished (Shell Exit Code: 1)
|
||||
|
||||
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-usesclass-attribute-is-deprecated.phpt:28
|
||||
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
|
||||
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
|
||||
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200</failure>
|
||||
</testcase>
|
||||
<testsuite name="PHPUnit\Event\CollectingDispatcherTest" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/CollectingDispatcherTest.php" tests="2" assertions="2" errors="0" failures="0" skipped="0" time="0.004256">
|
||||
<testcase name="testHasNoCollectedEventsWhenFlushedImmediatelyAfterCreation" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/CollectingDispatcherTest.php" line="20" class="PHPUnit\Event\CollectingDispatcherTest" classname="PHPUnit.Event.CollectingDispatcherTest" assertions="1" time="0.001441"/>
|
||||
<testcase name="testCollectsDispatchedEventsUntilFlushed" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/CollectingDispatcherTest.php" line="27" class="PHPUnit\Event\CollectingDispatcherTest" classname="PHPUnit.Event.CollectingDispatcherTest" assertions="1" time="0.002815"/>
|
||||
</testsuite>
|
||||
<testsuite name="PHPUnit\Event\DeferringDispatcherTest" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" tests="4" assertions="4" errors="0" failures="0" skipped="0" time="0.002928">
|
||||
<testcase name="testCollectsEventsUntilFlush" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="22" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.001672"/>
|
||||
<testcase name="testFlushesCollectedEvents" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="35" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.000661"/>
|
||||
<testcase name="testSubscriberCanBeRegistered" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="53" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.000334"/>
|
||||
<testcase name="testTracerCanBeRegistered" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="69" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.000262"/>
|
||||
</testsuite>
|
||||
<testsuite name="PHPUnit\Event\DirectDispatcherTest" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" tests="4" assertions="4" errors="0" failures="0" skipped="0" time="0.000794">
|
||||
<testcase name="testDispatchesEventToKnownSubscribers" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="24" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000170"/>
|
||||
<testcase name="testDispatchesEventToTracers" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="43" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000248"/>
|
||||
<testcase name="testRegisterRejectsUnknownSubscriber" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="62" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000257"/>
|
||||
<testcase name="testDispatchRejectsUnknownEventType" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="73" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000119"/>
|
||||
</testsuite>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
42
__tests__/fixtures/python-xunit-pytest.xml
Normal file
42
__tests__/fixtures/python-xunit-pytest.xml
Normal file
@@ -0,0 +1,42 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<testsuites name="pytest tests">
|
||||
<testsuite name="pytest" errors="0" failures="2" skipped="2" tests="15" time="0.019"
|
||||
timestamp="2025-11-15T11:51:49.548396-05:00" hostname="Mac.hsd1.va.comcast.net">
|
||||
<properties>
|
||||
<property name="custom_prop" value="custom_val"/>
|
||||
</properties>
|
||||
<testcase classname="tests.test_lib" name="test_always_pass" time="0.002"/>
|
||||
<testcase classname="tests.test_lib" name="test_with_subtests" time="0.005"/>
|
||||
<testcase classname="tests.test_lib" name="test_parameterized[param1]" time="0.000"/>
|
||||
<testcase classname="tests.test_lib" name="test_parameterized[param2]" time="0.000"/>
|
||||
<testcase classname="tests.test_lib" name="test_always_skip" time="0.000">
|
||||
<skipped type="pytest.skip" message="skipped">/Users/mike/Projects/python-test/tests/test_lib.py:20: skipped
|
||||
</skipped>
|
||||
</testcase>
|
||||
<testcase classname="tests.test_lib" name="test_always_fail" time="0.000">
|
||||
<failure message="assert False">def test_always_fail():
|
||||
> assert False
|
||||
E assert False
|
||||
|
||||
tests/test_lib.py:25: AssertionError
|
||||
</failure>
|
||||
</testcase>
|
||||
<testcase classname="tests.test_lib" name="test_expected_failure" time="0.000">
|
||||
<skipped type="pytest.xfail" message=""/>
|
||||
</testcase>
|
||||
<testcase classname="tests.test_lib" name="test_error" time="0.000">
|
||||
<failure message="Exception: error">def test_error():
|
||||
> raise Exception("error")
|
||||
E Exception: error
|
||||
|
||||
tests/test_lib.py:32: Exception
|
||||
</failure>
|
||||
</testcase>
|
||||
<testcase classname="tests.test_lib" name="test_with_record_property" time="0.000">
|
||||
<properties>
|
||||
<property name="example_key" value="1"/>
|
||||
</properties>
|
||||
</testcase>
|
||||
<testcase classname="custom_classname" name="test_with_record_xml_attribute" time="0.000"/>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
27
__tests__/fixtures/python-xunit-unittest.xml
Normal file
27
__tests__/fixtures/python-xunit-unittest.xml
Normal file
@@ -0,0 +1,27 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuite name="TestAcme-20251114214921" tests="8" file=".py" time="0.001" timestamp="2025-11-14T21:49:22" failures="1" errors="1" skipped="2">
|
||||
<testcase classname="TestAcme" name="test_always_pass" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="8"/>
|
||||
<testcase classname="TestAcme" name="test_parameterized_0_param1" time="0.001" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="618"/>
|
||||
<testcase classname="TestAcme" name="test_parameterized_1_param2" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="618"/>
|
||||
<testcase classname="TestAcme" name="test_with_subtests" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="11"/>
|
||||
<testcase classname="TestAcme" name="test_always_fail" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="23">
|
||||
<failure type="AssertionError" message="failed"><![CDATA[Traceback (most recent call last):
|
||||
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail
|
||||
self.fail("failed")
|
||||
AssertionError: failed
|
||||
]]></failure>
|
||||
</testcase>
|
||||
<testcase classname="TestAcme" name="test_error" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="30">
|
||||
<error type="Exception" message="error"><![CDATA[Traceback (most recent call last):
|
||||
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error
|
||||
raise Exception("error")
|
||||
Exception: error
|
||||
]]></error>
|
||||
</testcase>
|
||||
<testcase classname="TestAcme" name="test_always_skip" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="20">
|
||||
<skipped type="skip" message="skipped"/>
|
||||
</testcase>
|
||||
<testcase classname="TestAcme" name="test_expected_failure" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="26">
|
||||
<skipped type="XFAIL" message="expected failure: (<class 'AssertionError'>, AssertionError('expected failure'), <traceback object at 0x100c125c0>)"/>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
@@ -73,6 +73,46 @@ describe('java-junit tests', () => {
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report from testmo/junitxml basic example matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'junit4-basic.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'junit-basic.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JavaJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report from testmo/junitxml complete example matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'junit4-complete.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'junit-complete.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JavaJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('parses empty failures in test results', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'empty_failures.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
|
||||
@@ -207,4 +207,143 @@ describe('jest-junit tests', () => {
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^# My Custom Title\n/)
|
||||
})
|
||||
|
||||
it('report can be collapsed when configured', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
collapsed: 'always'
|
||||
})
|
||||
// Report should include collapsible details
|
||||
expect(report).toContain('<details><summary>Expand for details</summary>')
|
||||
expect(report).toContain('</details>')
|
||||
})
|
||||
|
||||
it('report is not collapsed when configured to never', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
collapsed: 'never'
|
||||
})
|
||||
// Report should not include collapsible details
|
||||
expect(report).not.toContain('<details><summary>Expand for details</summary>')
|
||||
expect(report).not.toContain('</details>')
|
||||
})
|
||||
|
||||
it('report auto-collapses when all tests pass', async () => {
|
||||
// Test with a fixture that has all passing tests (no failures)
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit-eslint.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify this fixture has no failures
|
||||
expect(result.failed).toBe(0)
|
||||
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
collapsed: 'auto'
|
||||
})
|
||||
|
||||
// Should collapse when all tests pass
|
||||
expect(report).toContain('<details><summary>Expand for details</summary>')
|
||||
expect(report).toContain('</details>')
|
||||
})
|
||||
|
||||
it('report does not auto-collapse when tests fail', async () => {
|
||||
// Test with a fixture that has failing tests
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify this fixture has failures
|
||||
expect(result.failed).toBeGreaterThan(0)
|
||||
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
collapsed: 'auto'
|
||||
})
|
||||
|
||||
// Should not collapse when there are failures
|
||||
expect(report).not.toContain('<details><summary>Expand for details</summary>')
|
||||
expect(report).not.toContain('</details>')
|
||||
})
|
||||
|
||||
it('report includes the short summary', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const shortSummary = '1 passed, 4 failed and 1 skipped'
|
||||
const report = getReport([result], DEFAULT_OPTIONS, shortSummary)
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^## 1 passed, 4 failed and 1 skipped\n/)
|
||||
})
|
||||
|
||||
it('report includes a custom report title and short summary', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const shortSummary = '1 passed, 4 failed and 1 skipped'
|
||||
const report = getReport(
|
||||
[result],
|
||||
{
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle: 'My Custom Title'
|
||||
},
|
||||
shortSummary
|
||||
)
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^# My Custom Title\n## 1 passed, 4 failed and 1 skipped\n/)
|
||||
})
|
||||
})
|
||||
|
||||
347
__tests__/phpunit-junit.test.ts
Normal file
347
__tests__/phpunit-junit.test.ts
Normal file
@@ -0,0 +1,347 @@
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
|
||||
import {PhpunitJunitParser} from '../src/parsers/phpunit-junit/phpunit-junit-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {getReport} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
describe('phpunit-junit tests', () => {
|
||||
it('produces empty test run result when there are no test cases', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'empty', 'phpunit-empty.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result.tests).toBe(0)
|
||||
expect(result.result).toBe('success')
|
||||
})
|
||||
|
||||
it('report from phpunit test results matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'phpunit-test-results.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('parses nested test suites correctly', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Should have 4 test suites (3 nested ones plus the parent with direct testcases)
|
||||
expect(result.suites.length).toBe(4)
|
||||
|
||||
// Verify suite names
|
||||
const suiteNames = result.suites.map(s => s.name)
|
||||
expect(suiteNames).toContain('PHPUnit\\Event\\CollectingDispatcherTest')
|
||||
expect(suiteNames).toContain('PHPUnit\\Event\\DeferringDispatcherTest')
|
||||
expect(suiteNames).toContain('PHPUnit\\Event\\DirectDispatcherTest')
|
||||
expect(suiteNames).toContain('CLI Arguments')
|
||||
|
||||
// Verify total test count
|
||||
expect(result.tests).toBe(12)
|
||||
expect(result.passed).toBe(10)
|
||||
expect(result.failed).toBe(2)
|
||||
})
|
||||
|
||||
it('extracts error details from failures', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find the CLI Arguments suite which has failures
|
||||
const cliSuite = result.suites.find(s => s.name === 'CLI Arguments')
|
||||
expect(cliSuite).toBeDefined()
|
||||
|
||||
// Get the failed tests
|
||||
const failedTests = cliSuite!.groups.flatMap(g => g.tests).filter(t => t.result === 'failed')
|
||||
expect(failedTests.length).toBe(2)
|
||||
|
||||
// Verify error details are captured
|
||||
for (const test of failedTests) {
|
||||
expect(test.error).toBeDefined()
|
||||
expect(test.error!.details).toContain('Failed asserting that string matches format description')
|
||||
}
|
||||
})
|
||||
|
||||
it('maps absolute paths to tracked files for annotations', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit-paths.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: [
|
||||
'src/Fake.php',
|
||||
'src/Other.php',
|
||||
'src/Paren.php',
|
||||
'src/Win.php',
|
||||
'src/WinParen.php',
|
||||
'tests/Sample.phpt'
|
||||
]
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
const suite = result.suites.find(s => s.name === 'SampleSuite')
|
||||
expect(suite).toBeDefined()
|
||||
|
||||
const tests = suite!.groups.flatMap(g => g.tests)
|
||||
const fileFailure = tests.find(t => t.name === 'testFailure')
|
||||
expect(fileFailure).toBeDefined()
|
||||
expect(fileFailure!.error).toBeDefined()
|
||||
expect(fileFailure!.error!.path).toBe('src/Fake.php')
|
||||
expect(fileFailure!.error!.line).toBe(42)
|
||||
|
||||
const stringFailure = tests.find(t => t.name === 'testStringFailure')
|
||||
expect(stringFailure).toBeDefined()
|
||||
expect(stringFailure!.error).toBeDefined()
|
||||
expect(stringFailure!.error!.path).toBe('src/Other.php')
|
||||
expect(stringFailure!.error!.line).toBe(10)
|
||||
|
||||
const parenFailure = tests.find(t => t.name === 'testParenFailure')
|
||||
expect(parenFailure).toBeDefined()
|
||||
expect(parenFailure!.error).toBeDefined()
|
||||
expect(parenFailure!.error!.path).toBe('src/Paren.php')
|
||||
expect(parenFailure!.error!.line).toBe(123)
|
||||
|
||||
const windowsFailure = tests.find(t => t.name === 'testWindowsFailure')
|
||||
expect(windowsFailure).toBeDefined()
|
||||
expect(windowsFailure!.error).toBeDefined()
|
||||
expect(windowsFailure!.error!.path).toBe('src/Win.php')
|
||||
expect(windowsFailure!.error!.line).toBe(77)
|
||||
|
||||
const windowsParenFailure = tests.find(t => t.name === 'testWindowsParenFailure')
|
||||
expect(windowsParenFailure).toBeDefined()
|
||||
expect(windowsParenFailure!.error).toBeDefined()
|
||||
expect(windowsParenFailure!.error!.path).toBe('src/WinParen.php')
|
||||
expect(windowsParenFailure!.error!.line).toBe(88)
|
||||
|
||||
const phptFailure = tests.find(t => t.name === 'testPhptFailure')
|
||||
expect(phptFailure).toBeDefined()
|
||||
expect(phptFailure!.error).toBeDefined()
|
||||
expect(phptFailure!.error!.path).toBe('tests/Sample.phpt')
|
||||
expect(phptFailure!.error!.line).toBe(12)
|
||||
})
|
||||
|
||||
it('parses junit-basic.xml with nested suites and failure', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'junit-basic.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify test counts
|
||||
expect(result.tests).toBe(9)
|
||||
expect(result.passed).toBe(8)
|
||||
expect(result.failed).toBe(1)
|
||||
expect(result.result).toBe('failed')
|
||||
|
||||
// Verify suites - should have Tests.Registration, Tests.Authentication.Login, and Tests.Authentication
|
||||
expect(result.suites.length).toBe(3)
|
||||
|
||||
const suiteNames = result.suites.map(s => s.name)
|
||||
expect(suiteNames).toContain('Tests.Registration')
|
||||
expect(suiteNames).toContain('Tests.Authentication.Login')
|
||||
expect(suiteNames).toContain('Tests.Authentication')
|
||||
|
||||
// Verify the Registration suite has 3 tests
|
||||
const registrationSuite = result.suites.find(s => s.name === 'Tests.Registration')
|
||||
expect(registrationSuite).toBeDefined()
|
||||
const registrationTests = registrationSuite!.groups.flatMap(g => g.tests)
|
||||
expect(registrationTests.length).toBe(3)
|
||||
|
||||
// Verify the Authentication suite has 3 direct tests (not counting nested suite)
|
||||
const authSuite = result.suites.find(s => s.name === 'Tests.Authentication')
|
||||
expect(authSuite).toBeDefined()
|
||||
const authTests = authSuite!.groups.flatMap(g => g.tests)
|
||||
expect(authTests.length).toBe(3)
|
||||
|
||||
// Verify the Login nested suite has 3 tests
|
||||
const loginSuite = result.suites.find(s => s.name === 'Tests.Authentication.Login')
|
||||
expect(loginSuite).toBeDefined()
|
||||
const loginTests = loginSuite!.groups.flatMap(g => g.tests)
|
||||
expect(loginTests.length).toBe(3)
|
||||
|
||||
// Verify failure is captured
|
||||
const failedTest = authTests.find(t => t.name === 'testCase9')
|
||||
expect(failedTest).toBeDefined()
|
||||
expect(failedTest!.result).toBe('failed')
|
||||
expect(failedTest!.error).toBeDefined()
|
||||
expect(failedTest!.error!.message).toBe('AssertionError: Assertion error message')
|
||||
})
|
||||
|
||||
it('parses phpcheckstyle-phpunit.xml with deeply nested suites', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify test counts from the XML: tests="30", failures="2"
|
||||
expect(result.tests).toBe(30)
|
||||
expect(result.passed).toBe(28)
|
||||
expect(result.failed).toBe(2)
|
||||
expect(result.result).toBe('failed')
|
||||
|
||||
// Verify the number of test suites extracted (leaf suites with testcases)
|
||||
// CommentsTest, DeprecationTest, GoodTest, IndentationTest, MetricsTest,
|
||||
// NamingTest, OptimizationTest, OtherTest, PHPTagsTest, ProhibitedTest,
|
||||
// StrictCompareTest, UnusedTest = 12 suites
|
||||
expect(result.suites.length).toBe(12)
|
||||
|
||||
const suiteNames = result.suites.map(s => s.name)
|
||||
expect(suiteNames).toContain('CommentsTest')
|
||||
expect(suiteNames).toContain('GoodTest')
|
||||
expect(suiteNames).toContain('IndentationTest')
|
||||
expect(suiteNames).toContain('OtherTest')
|
||||
})
|
||||
|
||||
it('extracts test data from phpcheckstyle-phpunit.xml', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find the CommentsTest suite
|
||||
const commentsSuite = result.suites.find(s => s.name === 'CommentsTest')
|
||||
expect(commentsSuite).toBeDefined()
|
||||
|
||||
// Verify tests are extracted correctly
|
||||
const tests = commentsSuite!.groups.flatMap(g => g.tests)
|
||||
expect(tests.length).toBe(3)
|
||||
|
||||
const testGoodDoc = tests.find(t => t.name === 'testGoodDoc')
|
||||
expect(testGoodDoc).toBeDefined()
|
||||
expect(testGoodDoc!.result).toBe('success')
|
||||
})
|
||||
|
||||
it('captures failure details from phpcheckstyle-phpunit.xml', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find the OtherTest suite which has failures
|
||||
const otherSuite = result.suites.find(s => s.name === 'OtherTest')
|
||||
expect(otherSuite).toBeDefined()
|
||||
|
||||
const failedTests = otherSuite!.groups.flatMap(g => g.tests).filter(t => t.result === 'failed')
|
||||
expect(failedTests.length).toBe(2)
|
||||
|
||||
// Verify failure details
|
||||
const testOther = failedTests.find(t => t.name === 'testOther')
|
||||
expect(testOther).toBeDefined()
|
||||
expect(testOther!.error).toBeDefined()
|
||||
expect(testOther!.error!.details).toContain('We expect 20 warnings')
|
||||
expect(testOther!.error!.details).toContain('Failed asserting that 19 matches expected 20')
|
||||
|
||||
const testException = failedTests.find(t => t.name === 'testException')
|
||||
expect(testException).toBeDefined()
|
||||
expect(testException!.error).toBeDefined()
|
||||
expect(testException!.error!.details).toContain('We expect 1 error')
|
||||
})
|
||||
|
||||
it('report from junit-basic.xml matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'junit-basic.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'phpunit-junit-basic-results.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report from phpcheckstyle-phpunit.xml matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'phpunit-phpcheckstyle-results.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
})
|
||||
93
__tests__/python-xunit.test.ts
Normal file
93
__tests__/python-xunit.test.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
|
||||
import {PythonXunitParser} from '../src/parsers/python-xunit/python-xunit-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
const defaultOpts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
describe('python-xunit unittest report', () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-unittest.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-unittest.md')
|
||||
|
||||
it('report from python test results matches snapshot', async () => {
|
||||
const trackedFiles = ['tests/test_lib.py']
|
||||
const opts: ParseOptions = {
|
||||
...defaultOpts,
|
||||
trackedFiles
|
||||
}
|
||||
|
||||
const parser = new PythonXunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report does not include a title by default', async () => {
|
||||
const parser = new PythonXunitParser(defaultOpts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result])
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it.each([
|
||||
['empty string', ''],
|
||||
['space', ' '],
|
||||
['tab', '\t'],
|
||||
['newline', '\n']
|
||||
])('report does not include a title when configured value is %s', async (_, reportTitle) => {
|
||||
const parser = new PythonXunitParser(defaultOpts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle
|
||||
})
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it('report includes a custom report title', async () => {
|
||||
const parser = new PythonXunitParser(defaultOpts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle: 'My Custom Title'
|
||||
})
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^# My Custom Title\n/)
|
||||
})
|
||||
})
|
||||
|
||||
describe('python-xunit pytest report', () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-pytest.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-pytest.md')
|
||||
|
||||
it('report from python test results matches snapshot', async () => {
|
||||
const trackedFiles = ['tests/test_lib.py']
|
||||
const opts: ParseOptions = {
|
||||
...defaultOpts,
|
||||
trackedFiles
|
||||
}
|
||||
|
||||
const parser = new PythonXunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
})
|
||||
120
__tests__/report/get-report.test.ts
Normal file
120
__tests__/report/get-report.test.ts
Normal file
@@ -0,0 +1,120 @@
|
||||
import {getBadge, DEFAULT_OPTIONS, ReportOptions} from '../../src/report/get-report'
|
||||
|
||||
describe('getBadge', () => {
|
||||
describe('URI encoding with special characters', () => {
|
||||
it('generates correct URI with simple badge title', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'tests'
|
||||
}
|
||||
const badge = getBadge(5, 0, 1, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('handles badge title with single hyphen', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'unit-tests'
|
||||
}
|
||||
const badge = getBadge(3, 0, 0, options)
|
||||
// The hyphen in the badge title should be encoded as --
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('handles badge title with multiple hyphens', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'integration-api-tests'
|
||||
}
|
||||
const badge = getBadge(10, 0, 0, options)
|
||||
// All hyphens in the title should be encoded as --
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('handles badge title with multiple underscores', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'my_integration_test'
|
||||
}
|
||||
const badge = getBadge(10, 0, 0, options)
|
||||
// All underscores in the title should be encoded as __
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('handles badge title with version format containing hyphen', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'MariaDb 12.0-ubi database tests'
|
||||
}
|
||||
const badge = getBadge(1, 0, 0, options)
|
||||
// The hyphen in "12.0-ubi" should be encoded as --
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('handles badge title with dots and hyphens', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'v1.2.3-beta-test'
|
||||
}
|
||||
const badge = getBadge(4, 1, 0, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('preserves structural hyphens between label and message', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'test-suite'
|
||||
}
|
||||
const badge = getBadge(2, 3, 1, options)
|
||||
// The URI should have literal hyphens separating title-message-color
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
})
|
||||
|
||||
describe('generates test outcome as color name for imgshields', () => {
|
||||
it('uses success color when all tests pass', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(5, 0, 0, options)
|
||||
expect(badge).toContain('-success)')
|
||||
})
|
||||
|
||||
it('uses critical color when tests fail', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(5, 2, 0, options)
|
||||
expect(badge).toContain('-critical)')
|
||||
})
|
||||
|
||||
it('uses yellow color when no tests found', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(0, 0, 0, options)
|
||||
expect(badge).toContain('-yellow)')
|
||||
})
|
||||
})
|
||||
|
||||
describe('badge message composition', () => {
|
||||
it('includes only passed count when no failures or skips', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(5, 0, 0, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('includes passed and failed counts', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(5, 2, 0, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('includes passed, failed and skipped counts', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(5, 2, 1, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('uses "none" message when no tests', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(0, 0, 0, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
224
__tests__/tester-junit.test.ts
Normal file
224
__tests__/tester-junit.test.ts
Normal file
@@ -0,0 +1,224 @@
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
|
||||
import {NetteTesterJunitParser} from '../src/parsers/tester-junit/tester-junit-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {getReport} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
describe('tester-junit tests', () => {
|
||||
it('produces empty test run result when there are no test cases', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'empty', 'phpunit-empty.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result.tests).toBe(0)
|
||||
expect(result.result).toBe('success')
|
||||
})
|
||||
|
||||
it('report from tester-v1.7-report.xml matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'tester-v1.7-test-results.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('parses tester-v1.7-report.xml correctly', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify test counts from XML: tests="65" errors="1" skipped="3"
|
||||
expect(result.tests).toBe(65)
|
||||
expect(result.failed).toBe(1)
|
||||
expect(result.skipped).toBe(3)
|
||||
expect(result.passed).toBe(61)
|
||||
|
||||
// Verify suite name uses file name
|
||||
expect(result.suites.length).toBe(1)
|
||||
expect(result.suites[0].name).toBe('tester-v1.7-report.xml')
|
||||
})
|
||||
|
||||
it('groups tests by directory structure', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Get all group names
|
||||
const groupNames = result.suites[0].groups.map(g => g.name)
|
||||
|
||||
// Verify expected directory groups exist
|
||||
expect(groupNames).toContain('tests/Framework')
|
||||
expect(groupNames).toContain('tests/CodeCoverage')
|
||||
expect(groupNames).toContain('tests/Runner')
|
||||
expect(groupNames).toContain('tests/RunnerOutput')
|
||||
})
|
||||
|
||||
it('parses test names with method suffixes correctly', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find the Framework group which has tests with method suffixes
|
||||
const frameworkGroup = result.suites[0].groups.find(g => g.name === 'tests/Framework')
|
||||
expect(frameworkGroup).toBeDefined()
|
||||
|
||||
// Find tests with method suffixes
|
||||
const testWithMethod = frameworkGroup!.tests.find(t => t.name.includes('::testSimple'))
|
||||
expect(testWithMethod).toBeDefined()
|
||||
expect(testWithMethod!.name).toBe('Assert.equal.recursive.phpt::testSimple')
|
||||
})
|
||||
|
||||
it('parses complex test names from BootstrapFormRenderer-report.xml', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'BootstrapFormRenderer-report.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'tester-bootstrap-test-results.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify test counts: 4 tests, all passed
|
||||
expect(result.tests).toBe(4)
|
||||
expect(result.passed).toBe(4)
|
||||
expect(result.failed).toBe(0)
|
||||
expect(result.skipped).toBe(0)
|
||||
|
||||
// Verify suite name
|
||||
expect(result.suites[0].name).toBe('BootstrapFormRenderer-report.xml')
|
||||
|
||||
// All tests should have method names
|
||||
const allTests = result.suites[0].groups.flatMap(g => g.tests)
|
||||
expect(allTests.every(t => t.name.includes('::'))).toBe(true)
|
||||
expect(allTests.some(t => t.name.includes('::testRenderingBasics'))).toBe(true)
|
||||
expect(allTests.some(t => t.name.includes('::testRenderingIndividual'))).toBe(true)
|
||||
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('extracts error details from failures', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find the failed test
|
||||
const allTests = result.suites[0].groups.flatMap(g => g.tests)
|
||||
const failedTests = allTests.filter(t => t.result === 'failed')
|
||||
|
||||
expect(failedTests.length).toBe(1)
|
||||
|
||||
// Verify error details are captured
|
||||
const failedTest = failedTests[0]
|
||||
expect(failedTest.error).toBeDefined()
|
||||
expect(failedTest.error!.details).toContain('Failed:')
|
||||
expect(failedTest.error!.details).toContain('multiple-fails')
|
||||
})
|
||||
|
||||
it('correctly identifies skipped tests', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find skipped tests
|
||||
const allTests = result.suites[0].groups.flatMap(g => g.tests)
|
||||
const skippedTests = allTests.filter(t => t.result === 'skipped')
|
||||
|
||||
expect(skippedTests.length).toBe(3)
|
||||
|
||||
// Verify some known skipped tests
|
||||
expect(skippedTests.some(t => t.name.includes('Dumper.toPhp.php7.phpt'))).toBe(true)
|
||||
expect(skippedTests.some(t => t.name.includes('Collector.start.phpt'))).toBe(true)
|
||||
})
|
||||
|
||||
it('parses test with description prefix correctly', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find test with description prefix
|
||||
const allTests = result.suites[0].groups.flatMap(g => g.tests)
|
||||
// The test name is generated from the basename, and the description is shown in parentheses
|
||||
const testWithDescription = allTests.find(t => t.name.includes('Prevent loop'))
|
||||
|
||||
expect(testWithDescription).toBeDefined()
|
||||
expect(testWithDescription!.name).toContain('Prevent loop')
|
||||
expect(testWithDescription!.name).toContain('TestCase.ownErrorHandler.phpt')
|
||||
})
|
||||
})
|
||||
19
action.yml
19
action.yml
@@ -1,6 +1,5 @@
|
||||
name: Test Reporter
|
||||
description: |
|
||||
Shows test results in GitHub UI: .NET (xUnit, NUnit, MSTest), Dart, Flutter, Java (JUnit), JavaScript (JEST, Mocha)
|
||||
description: Displays test results from popular testing frameworks directly in GitHub
|
||||
author: Michal Dorner <dorner.michal@gmail.com>
|
||||
inputs:
|
||||
artifact:
|
||||
@@ -29,9 +28,13 @@ inputs:
|
||||
- dotnet-nunit
|
||||
- dotnet-trx
|
||||
- flutter-json
|
||||
- golang-json
|
||||
- java-junit
|
||||
- jest-junit
|
||||
- mocha-json
|
||||
- tester-junit
|
||||
- phpunit-junit
|
||||
- python-xunit
|
||||
- rspec-json
|
||||
- swift-xunit
|
||||
required: true
|
||||
@@ -68,6 +71,10 @@ inputs:
|
||||
working-directory:
|
||||
description: Relative path under $GITHUB_WORKSPACE where the repository was checked out
|
||||
required: false
|
||||
report-title:
|
||||
description: Title for the test report summary
|
||||
required: false
|
||||
default: ''
|
||||
only-summary:
|
||||
description: |
|
||||
Allows you to generate only the summary.
|
||||
@@ -85,6 +92,14 @@ inputs:
|
||||
description: Customize badge title
|
||||
required: false
|
||||
default: 'tests'
|
||||
collapsed:
|
||||
description: |
|
||||
Controls whether test report details are collapsed or expanded. Supported options:
|
||||
- auto: Collapse only if all tests pass (default behavior)
|
||||
- always: Always collapse the report details
|
||||
- never: Always expand the report details
|
||||
required: false
|
||||
default: 'auto'
|
||||
token:
|
||||
description: GitHub Access Token
|
||||
required: false
|
||||
|
||||
1466
dist/index.js
generated
vendored
1466
dist/index.js
generated
vendored
File diff suppressed because it is too large
Load Diff
80
dist/licenses.txt
generated
vendored
80
dist/licenses.txt
generated
vendored
@@ -1350,48 +1350,62 @@ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
|
||||
sax
|
||||
ISC
|
||||
The ISC License
|
||||
BlueOak-1.0.0
|
||||
# Blue Oak Model License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
Version 1.0.0
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
## Purpose
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
This license gives everyone as much permission to work with
|
||||
this software as possible, while protecting contributors
|
||||
from liability.
|
||||
|
||||
====
|
||||
## Acceptance
|
||||
|
||||
`String.fromCodePoint` by Mathias Bynens used according to terms of MIT
|
||||
License, as follows:
|
||||
In order to receive this license, you must agree to its
|
||||
rules. The rules of this license are both obligations
|
||||
under that agreement and conditions to your license.
|
||||
You must not do anything with this software that triggers
|
||||
a rule that you cannot or will not follow.
|
||||
|
||||
Copyright Mathias Bynens <https://mathiasbynens.be/>
|
||||
## Copyright
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
Each contributor licenses you to do everything with this
|
||||
software that would otherwise infringe that contributor's
|
||||
copyright in it.
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
## Notices
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
You must ensure that everyone who gets a copy of
|
||||
any part of this software from you, with or without
|
||||
changes, also gets the text of this license or a link to
|
||||
<https://blueoakcouncil.org/license/1.0.0>.
|
||||
|
||||
## Excuse
|
||||
|
||||
If anyone notifies you in writing that you have not
|
||||
complied with [Notices](#notices), you can keep your
|
||||
license by taking all practical steps to comply within 30
|
||||
days after the notice. If you do not do so, your license
|
||||
ends immediately.
|
||||
|
||||
## Patent
|
||||
|
||||
Each contributor licenses you to do everything with this
|
||||
software that would otherwise infringe any patent claims
|
||||
they can license or become able to license.
|
||||
|
||||
## Reliability
|
||||
|
||||
No contributor can revoke this license.
|
||||
|
||||
## No Liability
|
||||
|
||||
***As far as the law allows, this software comes as is,
|
||||
without any warranty or condition, and no contributor
|
||||
will be liable to anyone for any damages related to this
|
||||
software or this license, under any kind of legal claim.***
|
||||
|
||||
|
||||
to-regex-range
|
||||
|
||||
3438
package-lock.json
generated
3438
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
26
package.json
26
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "test-reporter",
|
||||
"version": "2.1.0",
|
||||
"version": "2.5.0",
|
||||
"private": true,
|
||||
"description": "Presents test results from popular testing frameworks as Github check run",
|
||||
"main": "lib/main.js",
|
||||
@@ -16,6 +16,7 @@
|
||||
"all": "npm run build && npm run format && npm run lint && npm run package && npm test",
|
||||
"dart-fixture": "cd \"reports/dart\" && dart test --file-reporter=\"json:../../__tests__/fixtures/dart-json.json\"",
|
||||
"dotnet-fixture": "dotnet test reports/dotnet/DotnetTests.XUnitTests --logger \"trx;LogFileName=../../../../__tests__/fixtures/dotnet-trx.trx\"",
|
||||
"dotnet-xunitv3-fixture": "dotnet run --project reports/dotnet/DotnetTests.XUnitV3Tests/DotnetTests.XUnitV3Tests.csproj --report-trx --report-trx-filename dotnet-xunitv3.trx --results-directory __tests__/fixtures/",
|
||||
"dotnet-nunit-fixture": "nunit.exe reports/dotnet/DotnetTests.NUnitV3Tests/bin/Debug/netcoreapp3.1/DotnetTests.NUnitV3Tests.dll --result=__tests__/fixtures/dotnet-nunit.xml",
|
||||
"dotnet-nunit-legacy-fixture": "nunit-console.exe reports/dotnet-nunit-legacy/NUnitLegacy.sln --result=__tests__/fixtures/dotnet-nunit-legacy.xml",
|
||||
"golang-json-fixture": "go test -v -json -timeout 5s ./reports/go | tee __tests__/fixtures/golang-json.json",
|
||||
@@ -41,32 +42,35 @@
|
||||
"adm-zip": "^0.5.16",
|
||||
"fast-glob": "^3.3.3",
|
||||
"got": "^11.8.6",
|
||||
"picomatch": "^4.0.2",
|
||||
"picomatch": "^4.0.3",
|
||||
"xml2js": "^0.6.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@octokit/webhooks-types": "^7.6.1",
|
||||
"@types/adm-zip": "^0.5.7",
|
||||
"@types/jest": "^29.5.14",
|
||||
"@types/node": "^20.19.2",
|
||||
"@types/picomatch": "^2.3.4",
|
||||
"@types/jest": "^30.0.0",
|
||||
"@types/node": "^20.19.23",
|
||||
"@types/picomatch": "^4.0.2",
|
||||
"@types/xml2js": "^0.4.14",
|
||||
"@typescript-eslint/eslint-plugin": "^7.18.0",
|
||||
"@typescript-eslint/parser": "^7.18.0",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"@vercel/ncc": "^0.38.4",
|
||||
"eol-converter-cli": "^1.1.0",
|
||||
"eslint": "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.10.1",
|
||||
"eslint-plugin-github": "^4.10.2",
|
||||
"eslint-plugin-import": "^2.32.0",
|
||||
"eslint-plugin-jest": "^28.14.0",
|
||||
"eslint-plugin-prettier": "^5.5.1",
|
||||
"jest": "^30.0.4",
|
||||
"eslint-plugin-prettier": "^5.5.4",
|
||||
"jest": "^30.2.0",
|
||||
"jest-junit": "^16.0.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"js-yaml": "^4.1.1",
|
||||
"prettier": "^3.6.2",
|
||||
"ts-jest": "^29.4.0",
|
||||
"typescript": "^5.8.3"
|
||||
"ts-jest": "^29.4.5",
|
||||
"typescript": "^5.9.3"
|
||||
},
|
||||
"overrides": {
|
||||
"sax": "^1.4.3"
|
||||
},
|
||||
"jest-junit": {
|
||||
"suiteName": "jest tests",
|
||||
|
||||
@@ -40,7 +40,7 @@ namespace DotnetTests.XUnitTests
|
||||
}
|
||||
|
||||
[Test]
|
||||
[Timeout(1)]
|
||||
[CancelAfter(1)]
|
||||
public void Timeout_Test()
|
||||
{
|
||||
Thread.Sleep(100);
|
||||
@@ -58,7 +58,7 @@ namespace DotnetTests.XUnitTests
|
||||
[TestCase(3)]
|
||||
public void Is_Even_Number(int i)
|
||||
{
|
||||
Assert.True(i % 2 == 0);
|
||||
Assert.That(i % 2 == 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>netcoreapp3.1</TargetFramework>
|
||||
|
||||
<IsPackable>false</IsPackable>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<DeterministicSourcePaths>true</DeterministicSourcePaths>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="NUnit" Version="3.13.2" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.5.0" />
|
||||
<PackageReference Include="NUnit" Version="4.3.2" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>netstandard2.0</TargetFramework>
|
||||
<DeterministicSourcePaths>true</DeterministicSourcePaths>
|
||||
</PropertyGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>netcoreapp3.1</TargetFramework>
|
||||
|
||||
<IsPackable>false</IsPackable>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<DeterministicSourcePaths>true</DeterministicSourcePaths>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.5.0" />
|
||||
<PackageReference Include="xunit" Version="2.4.0" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.0" />
|
||||
<PackageReference Include="coverlet.collector" Version="1.2.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.1" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<OutputType>exe</OutputType>
|
||||
<DeterministicSourcePaths>true</DeterministicSourcePaths>
|
||||
<UseMicrosoftTestingPlatformRunner>true</UseMicrosoftTestingPlatformRunner>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Testing.Extensions.TrxReport" Version="1.7.3" />
|
||||
<PackageReference Include="xunit.v3" Version="2.0.3" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
27
reports/dotnet/DotnetTests.XUnitV3Tests/FixtureTests.cs
Normal file
27
reports/dotnet/DotnetTests.XUnitV3Tests/FixtureTests.cs
Normal file
@@ -0,0 +1,27 @@
|
||||
using System;
|
||||
using Xunit;
|
||||
|
||||
namespace DotnetTests.XUnitV3Tests;
|
||||
|
||||
public sealed class Fixture : IDisposable
|
||||
{
|
||||
public void Dispose()
|
||||
{
|
||||
throw new InvalidOperationException("Failure during fixture disposal");
|
||||
}
|
||||
}
|
||||
|
||||
public class FixtureTests(Fixture fixture) : IClassFixture<Fixture>
|
||||
{
|
||||
[Fact]
|
||||
public void Passing_Test()
|
||||
{
|
||||
Assert.NotNull(fixture);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Failing_Test()
|
||||
{
|
||||
Assert.Null(fixture);
|
||||
}
|
||||
}
|
||||
@@ -11,6 +11,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "DotnetTests.XUnitTests", "D
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DotnetTests.NUnitV3Tests", "DotnetTests.NUnitV3Tests\DotnetTests.NUnitV3Tests.csproj", "{81023ED7-56CB-47E9-86C5-9125A0873C55}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DotnetTests.XUnitV3Tests", "DotnetTests.XUnitV3Tests\DotnetTests.XUnitV3Tests.csproj", "{D35E65DC-62EF-4612-9FF3-66F5600BFB74}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
@@ -29,6 +31,10 @@ Global
|
||||
{81023ED7-56CB-47E9-86C5-9125A0873C55}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{81023ED7-56CB-47E9-86C5-9125A0873C55}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{81023ED7-56CB-47E9-86C5-9125A0873C55}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{D35E65DC-62EF-4612-9FF3-66F5600BFB74}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{D35E65DC-62EF-4612-9FF3-66F5600BFB74}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{D35E65DC-62EF-4612-9FF3-66F5600BFB74}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{D35E65DC-62EF-4612-9FF3-66F5600BFB74}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
@@ -36,6 +42,7 @@ Global
|
||||
GlobalSection(NestedProjects) = preSolution
|
||||
{F8607EDB-D25D-47AA-8132-38ACA242E845} = {BCAC3B31-ADB1-4221-9D5B-182EE868648C}
|
||||
{81023ED7-56CB-47E9-86C5-9125A0873C55} = {BCAC3B31-ADB1-4221-9D5B-182EE868648C}
|
||||
{D35E65DC-62EF-4612-9FF3-66F5600BFB74} = {BCAC3B31-ADB1-4221-9D5B-182EE868648C}
|
||||
EndGlobalSection
|
||||
GlobalSection(ExtensibilityGlobals) = postSolution
|
||||
SolutionGuid = {6ED5543C-74AA-4B21-8050-943550F3F66E}
|
||||
|
||||
4826
reports/jest/package-lock.json
generated
4826
reports/jest/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
1453
reports/mocha/package-lock.json
generated
1453
reports/mocha/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -9,6 +9,6 @@
|
||||
"author": "Michal Dorner <dorner.michal@gmail.com>",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"mocha": "^8.3.0"
|
||||
"mocha": "^11.7.5"
|
||||
}
|
||||
}
|
||||
|
||||
45
src/main.ts
45
src/main.ts
@@ -17,9 +17,11 @@ import {GolangJsonParser} from './parsers/golang-json/golang-json-parser'
|
||||
import {JavaJunitParser} from './parsers/java-junit/java-junit-parser'
|
||||
import {JestJunitParser} from './parsers/jest-junit/jest-junit-parser'
|
||||
import {MochaJsonParser} from './parsers/mocha-json/mocha-json-parser'
|
||||
import {PhpunitJunitParser} from './parsers/phpunit-junit/phpunit-junit-parser'
|
||||
import {PythonXunitParser} from './parsers/python-xunit/python-xunit-parser'
|
||||
import {RspecJsonParser} from './parsers/rspec-json/rspec-json-parser'
|
||||
import {SwiftXunitParser} from './parsers/swift-xunit/swift-xunit-parser'
|
||||
|
||||
import {NetteTesterJunitParser} from './parsers/tester-junit/tester-junit-parser'
|
||||
import {normalizeDirPath, normalizeFilePath} from './utils/path-utils'
|
||||
import {getCheckRunContext} from './utils/github-utils'
|
||||
|
||||
@@ -49,6 +51,7 @@ class TestReporter {
|
||||
readonly useActionsSummary = core.getInput('use-actions-summary', {required: false}) === 'true'
|
||||
readonly badgeTitle = core.getInput('badge-title', {required: false})
|
||||
readonly reportTitle = core.getInput('report-title', {required: false})
|
||||
readonly collapsed = core.getInput('collapsed', {required: false}) as 'auto' | 'always' | 'never'
|
||||
readonly token = core.getInput('token', {required: true})
|
||||
readonly octokit: InstanceType<typeof GitHub>
|
||||
readonly context = getCheckRunContext()
|
||||
@@ -66,6 +69,11 @@ class TestReporter {
|
||||
return
|
||||
}
|
||||
|
||||
if (this.collapsed !== 'auto' && this.collapsed !== 'always' && this.collapsed !== 'never') {
|
||||
core.setFailed(`Input parameter 'collapsed' has invalid value`)
|
||||
return
|
||||
}
|
||||
|
||||
if (isNaN(this.maxAnnotations) || this.maxAnnotations < 0 || this.maxAnnotations > 50) {
|
||||
core.setFailed(`Input parameter 'max-annotations' has invalid value`)
|
||||
return
|
||||
@@ -166,7 +174,7 @@ class TestReporter {
|
||||
}
|
||||
}
|
||||
|
||||
const {listSuites, listTests, onlySummary, useActionsSummary, badgeTitle, reportTitle} = this
|
||||
const {listSuites, listTests, onlySummary, useActionsSummary, badgeTitle, reportTitle, collapsed} = this
|
||||
|
||||
const passed = results.reduce((sum, tr) => sum + tr.passed, 0)
|
||||
const failed = results.reduce((sum, tr) => sum + tr.failed, 0)
|
||||
@@ -175,19 +183,23 @@ class TestReporter {
|
||||
|
||||
let baseUrl = ''
|
||||
if (this.useActionsSummary) {
|
||||
const summary = getReport(results, {
|
||||
listSuites,
|
||||
listTests,
|
||||
baseUrl,
|
||||
onlySummary,
|
||||
useActionsSummary,
|
||||
badgeTitle,
|
||||
reportTitle
|
||||
})
|
||||
const summary = getReport(
|
||||
results,
|
||||
{
|
||||
listSuites,
|
||||
listTests,
|
||||
baseUrl,
|
||||
onlySummary,
|
||||
useActionsSummary,
|
||||
badgeTitle,
|
||||
reportTitle,
|
||||
collapsed
|
||||
},
|
||||
shortSummary
|
||||
)
|
||||
|
||||
core.info('Summary content:')
|
||||
core.info(summary)
|
||||
core.summary.addRaw(`# ${shortSummary}`)
|
||||
await core.summary.addRaw(summary).write()
|
||||
} else {
|
||||
core.info(`Creating check run ${name}`)
|
||||
@@ -211,7 +223,8 @@ class TestReporter {
|
||||
onlySummary,
|
||||
useActionsSummary,
|
||||
badgeTitle,
|
||||
reportTitle
|
||||
reportTitle,
|
||||
collapsed
|
||||
})
|
||||
|
||||
core.info('Creating annotations')
|
||||
@@ -260,10 +273,16 @@ class TestReporter {
|
||||
return new JestJunitParser(options)
|
||||
case 'mocha-json':
|
||||
return new MochaJsonParser(options)
|
||||
case 'phpunit-junit':
|
||||
return new PhpunitJunitParser(options)
|
||||
case 'python-xunit':
|
||||
return new PythonXunitParser(options)
|
||||
case 'rspec-json':
|
||||
return new RspecJsonParser(options)
|
||||
case 'swift-xunit':
|
||||
return new SwiftXunitParser(options)
|
||||
case 'tester-junit':
|
||||
return new NetteTesterJunitParser(options)
|
||||
default:
|
||||
throw new Error(`Input variable 'reporter' is set to invalid value '${reporter}'`)
|
||||
}
|
||||
|
||||
@@ -242,13 +242,13 @@ export class DartJsonParser implements TestParser {
|
||||
private getRelativePath(path: string): string {
|
||||
const prefix = 'file://'
|
||||
if (path.startsWith(prefix)) {
|
||||
path = path.substr(prefix.length)
|
||||
path = path.substring(prefix.length)
|
||||
}
|
||||
|
||||
path = normalizeFilePath(path)
|
||||
const workDir = this.getWorkDir(path)
|
||||
if (workDir !== undefined && path.startsWith(workDir)) {
|
||||
path = path.substr(workDir.length)
|
||||
path = path.substring(workDir.length)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
@@ -77,13 +77,13 @@ export class DotnetNunitParser implements TestParser {
|
||||
.join('.')
|
||||
const groupName = suitesWithoutTheories[suitesWithoutTheories.length - 1].$.name
|
||||
|
||||
let existingSuite = result.find(existingSuite => existingSuite.name === suiteName)
|
||||
let existingSuite = result.find(suite => suite.name === suiteName)
|
||||
if (existingSuite === undefined) {
|
||||
existingSuite = new TestSuiteResult(suiteName, [])
|
||||
result.push(existingSuite)
|
||||
}
|
||||
|
||||
let existingGroup = existingSuite.groups.find(existingGroup => existingGroup.name === groupName)
|
||||
let existingGroup = existingSuite.groups.find(group => group.name === groupName)
|
||||
if (existingGroup === undefined) {
|
||||
existingGroup = new TestGroupResult(groupName, [])
|
||||
existingSuite.groups.push(existingGroup)
|
||||
@@ -136,7 +136,7 @@ export class DotnetNunitParser implements TestParser {
|
||||
path = normalizeFilePath(path)
|
||||
const workDir = this.getWorkDir(path)
|
||||
if (workDir !== undefined && path.startsWith(workDir)) {
|
||||
path = path.substr(workDir.length)
|
||||
path = path.substring(workDir.length)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
@@ -81,7 +81,7 @@ export class DotnetTrxParser implements TestParser {
|
||||
|
||||
const testClasses: {[name: string]: TestClass} = {}
|
||||
for (const r of unitTestsResults) {
|
||||
const className = r.test.TestMethod[0].$.className
|
||||
const className = r.test.TestMethod[0].$.className ?? "Unclassified"
|
||||
let tc = testClasses[className]
|
||||
if (tc === undefined) {
|
||||
tc = new TestClass(className)
|
||||
@@ -94,7 +94,7 @@ export class DotnetTrxParser implements TestParser {
|
||||
const resultTestName = r.result.$.testName
|
||||
const testName =
|
||||
resultTestName.startsWith(className) && resultTestName[className.length] === '.'
|
||||
? resultTestName.substr(className.length + 1)
|
||||
? resultTestName.substring(className.length + 1)
|
||||
: resultTestName
|
||||
|
||||
const test = new Test(testName, r.result.$.outcome, duration, error)
|
||||
@@ -177,7 +177,7 @@ export class DotnetTrxParser implements TestParser {
|
||||
const filePath = normalizeFilePath(fileStr)
|
||||
const workDir = this.getWorkDir(filePath)
|
||||
if (workDir) {
|
||||
const file = filePath.substr(workDir.length)
|
||||
const file = filePath.substring(workDir.length)
|
||||
if (trackedFiles.includes(file)) {
|
||||
const line = parseInt(lineStr)
|
||||
return {path: file, line}
|
||||
|
||||
@@ -106,7 +106,7 @@ export class JestJunitParser implements TestParser {
|
||||
path = normalizeFilePath(path)
|
||||
const workDir = this.getWorkDir(path)
|
||||
if (workDir !== undefined && path.startsWith(workDir)) {
|
||||
path = path.substr(workDir.length)
|
||||
path = path.substring(workDir.length)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
@@ -61,7 +61,7 @@ export class MochaJsonParser implements TestParser {
|
||||
private processTest(suite: TestSuiteResult, test: MochaJsonTest, result: TestExecutionResult): void {
|
||||
const groupName =
|
||||
test.fullTitle !== test.title
|
||||
? test.fullTitle.substr(0, test.fullTitle.length - test.title.length).trimEnd()
|
||||
? test.fullTitle.substring(0, test.fullTitle.length - test.title.length).trimEnd()
|
||||
: null
|
||||
|
||||
let group = suite.groups.find(grp => grp.name === groupName)
|
||||
@@ -103,7 +103,7 @@ export class MochaJsonParser implements TestParser {
|
||||
path = normalizeFilePath(path)
|
||||
const workDir = this.getWorkDir(path)
|
||||
if (workDir !== undefined && path.startsWith(workDir)) {
|
||||
path = path.substr(workDir.length)
|
||||
path = path.substring(workDir.length)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
258
src/parsers/phpunit-junit/phpunit-junit-parser.ts
Normal file
258
src/parsers/phpunit-junit/phpunit-junit-parser.ts
Normal file
@@ -0,0 +1,258 @@
|
||||
import {ParseOptions, TestParser} from '../../test-parser'
|
||||
import {parseStringPromise} from 'xml2js'
|
||||
|
||||
import {PhpunitReport, SingleSuiteReport, TestCase, TestSuite} from './phpunit-junit-types'
|
||||
import {getBasePath, normalizeFilePath} from '../../utils/path-utils'
|
||||
|
||||
import {
|
||||
TestExecutionResult,
|
||||
TestRunResult,
|
||||
TestSuiteResult,
|
||||
TestGroupResult,
|
||||
TestCaseResult,
|
||||
TestCaseError
|
||||
} from '../../test-results'
|
||||
|
||||
export class PhpunitJunitParser implements TestParser {
|
||||
readonly trackedFiles: Set<string>
|
||||
readonly trackedFilesList: string[]
|
||||
private assumedWorkDir: string | undefined
|
||||
|
||||
constructor(readonly options: ParseOptions) {
|
||||
this.trackedFilesList = options.trackedFiles.map(f => normalizeFilePath(f))
|
||||
this.trackedFiles = new Set(this.trackedFilesList)
|
||||
}
|
||||
|
||||
async parse(filePath: string, content: string): Promise<TestRunResult> {
|
||||
const reportOrSuite = await this.getPhpunitReport(filePath, content)
|
||||
const isReport = (reportOrSuite as PhpunitReport).testsuites !== undefined
|
||||
|
||||
// XML might contain:
|
||||
// - multiple suites under <testsuites> root node
|
||||
// - single <testsuite> as root node
|
||||
let report: PhpunitReport
|
||||
if (isReport) {
|
||||
report = reportOrSuite as PhpunitReport
|
||||
} else {
|
||||
// Make it behave the same way as if suite was inside <testsuites> root node
|
||||
const suite = (reportOrSuite as SingleSuiteReport).testsuite
|
||||
report = {
|
||||
testsuites: {
|
||||
$: {time: suite.$.time},
|
||||
testsuite: [suite]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this.getTestRunResult(filePath, report)
|
||||
}
|
||||
|
||||
private async getPhpunitReport(filePath: string, content: string): Promise<PhpunitReport | SingleSuiteReport> {
|
||||
try {
|
||||
return await parseStringPromise(content)
|
||||
} catch (e) {
|
||||
throw new Error(`Invalid XML at ${filePath}\n\n${e}`)
|
||||
}
|
||||
}
|
||||
|
||||
private getTestRunResult(filePath: string, report: PhpunitReport): TestRunResult {
|
||||
const suites: TestSuiteResult[] = []
|
||||
this.collectSuites(suites, report.testsuites.testsuite ?? [])
|
||||
|
||||
const seconds = parseFloat(report.testsuites.$?.time ?? '')
|
||||
const time = isNaN(seconds) ? undefined : seconds * 1000
|
||||
return new TestRunResult(filePath, suites, time)
|
||||
}
|
||||
|
||||
private collectSuites(results: TestSuiteResult[], testsuites: TestSuite[]): void {
|
||||
for (const ts of testsuites) {
|
||||
// Recursively process nested test suites first (depth-first)
|
||||
if (ts.testsuite) {
|
||||
this.collectSuites(results, ts.testsuite)
|
||||
}
|
||||
|
||||
// Only add suites that have direct test cases
|
||||
// This avoids adding container suites that only hold nested suites
|
||||
if (ts.testcase && ts.testcase.length > 0) {
|
||||
const name = ts.$.name.trim()
|
||||
const time = parseFloat(ts.$.time) * 1000
|
||||
results.push(new TestSuiteResult(name, this.getGroups(ts), time))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private getGroups(suite: TestSuite): TestGroupResult[] {
|
||||
if (!suite.testcase || suite.testcase.length === 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
const groups: {name: string; tests: TestCase[]}[] = []
|
||||
for (const tc of suite.testcase) {
|
||||
// Use classname (PHPUnit style) for grouping
|
||||
// If classname matches suite name, use empty string to avoid redundancy
|
||||
const className = tc.$.classname ?? tc.$.class ?? ''
|
||||
const groupName = className === suite.$.name ? '' : className
|
||||
let grp = groups.find(g => g.name === groupName)
|
||||
if (grp === undefined) {
|
||||
grp = {name: groupName, tests: []}
|
||||
groups.push(grp)
|
||||
}
|
||||
grp.tests.push(tc)
|
||||
}
|
||||
|
||||
return groups.map(grp => {
|
||||
const tests = grp.tests.map(tc => {
|
||||
const name = tc.$.name.trim()
|
||||
const result = this.getTestCaseResult(tc)
|
||||
const time = parseFloat(tc.$.time) * 1000
|
||||
const error = this.getTestCaseError(tc)
|
||||
return new TestCaseResult(name, result, time, error)
|
||||
})
|
||||
return new TestGroupResult(grp.name, tests)
|
||||
})
|
||||
}
|
||||
|
||||
private getTestCaseResult(test: TestCase): TestExecutionResult {
|
||||
if (test.failure || test.error) return 'failed'
|
||||
if (test.skipped) return 'skipped'
|
||||
return 'success'
|
||||
}
|
||||
|
||||
private getTestCaseError(tc: TestCase): TestCaseError | undefined {
|
||||
if (!this.options.parseErrors) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
// We process <error> and <failure> the same way
|
||||
const failures = tc.failure ?? tc.error
|
||||
if (!failures || failures.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const failure = failures[0]
|
||||
const details = typeof failure === 'string' ? failure : failure._ ?? ''
|
||||
|
||||
// PHPUnit provides file path directly in testcase attributes
|
||||
let filePath: string | undefined
|
||||
let line: number | undefined
|
||||
|
||||
if (tc.$.file) {
|
||||
const relativePath = this.getRelativePath(tc.$.file)
|
||||
if (this.trackedFiles.has(relativePath)) {
|
||||
filePath = relativePath
|
||||
}
|
||||
if (tc.$.line) {
|
||||
line = parseInt(tc.$.line)
|
||||
}
|
||||
}
|
||||
|
||||
// If file not in tracked files, try to extract from error details
|
||||
if (!filePath && details) {
|
||||
const extracted = this.extractFileAndLine(details)
|
||||
if (extracted) {
|
||||
filePath = extracted.filePath
|
||||
line = extracted.line
|
||||
}
|
||||
}
|
||||
|
||||
let message: string | undefined
|
||||
if (typeof failure !== 'string' && failure.$) {
|
||||
message = failure.$.message
|
||||
if (failure.$.type) {
|
||||
message = message ? `${failure.$.type}: ${message}` : failure.$.type
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
path: filePath,
|
||||
line,
|
||||
details,
|
||||
message
|
||||
}
|
||||
}
|
||||
|
||||
private extractFileAndLine(details: string): {filePath: string; line: number} | undefined {
|
||||
// PHPUnit stack traces typically have format: /path/to/file.php:123
|
||||
const lines = details.split(/\r?\n/)
|
||||
|
||||
for (const str of lines) {
|
||||
// Match patterns like /path/to/file.php:123 or at /path/to/file.php(123)
|
||||
const matchColon = str.match(/((?:[A-Za-z]:)?[^\s:()]+?\.(?:php|phpt)):(\d+)/)
|
||||
if (matchColon) {
|
||||
const relativePath = this.getRelativePath(matchColon[1])
|
||||
if (this.trackedFiles.has(relativePath)) {
|
||||
return {filePath: relativePath, line: parseInt(matchColon[2])}
|
||||
}
|
||||
}
|
||||
|
||||
const matchParen = str.match(/((?:[A-Za-z]:)?[^\s:()]+?\.(?:php|phpt))\((\d+)\)/)
|
||||
if (matchParen) {
|
||||
const relativePath = this.getRelativePath(matchParen[1])
|
||||
if (this.trackedFiles.has(relativePath)) {
|
||||
return {filePath: relativePath, line: parseInt(matchParen[2])}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an absolute file path to a relative path by stripping the working directory prefix.
|
||||
*
|
||||
* @param path - The absolute file path from PHPUnit output (e.g., `/home/runner/work/repo/src/Test.php`)
|
||||
* @returns The relative path (e.g., `src/Test.php`) if a working directory can be determined,
|
||||
* otherwise returns the normalized original path
|
||||
*/
|
||||
private getRelativePath(path: string): string {
|
||||
path = normalizeFilePath(path)
|
||||
const workDir = this.getWorkDir(path)
|
||||
if (workDir !== undefined && path.startsWith(workDir)) {
|
||||
path = path.substring(workDir.length)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the working directory prefix to strip from absolute file paths.
|
||||
*
|
||||
* The working directory is resolved using the following priority:
|
||||
*
|
||||
* 1. **Explicit configuration** - If `options.workDir` is set, it takes precedence.
|
||||
* This allows users to explicitly specify the working directory.
|
||||
*
|
||||
* 2. **Cached assumption** - If we've previously determined a working directory
|
||||
* (`assumedWorkDir`) and the current path starts with it, we reuse that value.
|
||||
* This avoids redundant computation for subsequent paths.
|
||||
*
|
||||
* 3. **Heuristic detection** - Uses `getBasePath()` to find the common prefix between
|
||||
* the absolute path and the list of tracked files in the repository. For example:
|
||||
* - Absolute path: `/home/runner/work/repo/src/Test.php`
|
||||
* - Tracked file: `src/Test.php`
|
||||
* - Detected workDir: `/home/runner/work/repo/`
|
||||
*
|
||||
* Once detected, the working directory is cached in `assumedWorkDir` for efficiency.
|
||||
*
|
||||
* @param path - The normalized absolute file path to analyze
|
||||
* @returns The working directory prefix (with trailing slash), or `undefined` if it cannot be determined
|
||||
*
|
||||
* @example
|
||||
* // With tracked file 'src/Foo.php' and path '/home/runner/work/repo/src/Foo.php'
|
||||
* // Returns: '/home/runner/work/repo/'
|
||||
*/
|
||||
private getWorkDir(path: string): string | undefined {
|
||||
if (this.options.workDir) {
|
||||
return this.options.workDir
|
||||
}
|
||||
|
||||
if (this.assumedWorkDir && path.startsWith(this.assumedWorkDir)) {
|
||||
return this.assumedWorkDir
|
||||
}
|
||||
|
||||
const basePath = getBasePath(path, this.trackedFilesList)
|
||||
if (basePath !== undefined) {
|
||||
this.assumedWorkDir = basePath
|
||||
}
|
||||
return basePath
|
||||
}
|
||||
}
|
||||
52
src/parsers/phpunit-junit/phpunit-junit-types.ts
Normal file
52
src/parsers/phpunit-junit/phpunit-junit-types.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
export interface PhpunitReport {
|
||||
testsuites: TestSuites
|
||||
}
|
||||
|
||||
export interface SingleSuiteReport {
|
||||
testsuite: TestSuite
|
||||
}
|
||||
|
||||
export interface TestSuites {
|
||||
$?: {
|
||||
time?: string
|
||||
}
|
||||
testsuite?: TestSuite[]
|
||||
}
|
||||
|
||||
export interface TestSuite {
|
||||
$: {
|
||||
name: string
|
||||
tests?: string
|
||||
assertions?: string
|
||||
errors?: string
|
||||
failures?: string
|
||||
skipped?: string
|
||||
time: string
|
||||
file?: string
|
||||
}
|
||||
testcase?: TestCase[]
|
||||
testsuite?: TestSuite[]
|
||||
}
|
||||
|
||||
export interface TestCase {
|
||||
$: {
|
||||
name: string
|
||||
class?: string
|
||||
classname?: string
|
||||
file?: string
|
||||
line?: string
|
||||
assertions?: string
|
||||
time: string
|
||||
}
|
||||
failure?: Failure[]
|
||||
error?: Failure[]
|
||||
skipped?: string[]
|
||||
}
|
||||
|
||||
export interface Failure {
|
||||
_: string
|
||||
$?: {
|
||||
type?: string
|
||||
message?: string
|
||||
}
|
||||
}
|
||||
8
src/parsers/python-xunit/python-xunit-parser.ts
Normal file
8
src/parsers/python-xunit/python-xunit-parser.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import {ParseOptions} from '../../test-parser'
|
||||
import {JavaJunitParser} from '../java-junit/java-junit-parser'
|
||||
|
||||
export class PythonXunitParser extends JavaJunitParser {
|
||||
constructor(readonly options: ParseOptions) {
|
||||
super(options)
|
||||
}
|
||||
}
|
||||
@@ -55,7 +55,7 @@ export class RspecJsonParser implements TestParser {
|
||||
private processTest(suite: TestSuiteResult, test: RspecExample, result: TestExecutionResult): void {
|
||||
const groupName =
|
||||
test.full_description !== test.description
|
||||
? test.full_description.substr(0, test.full_description.length - test.description.length).trimEnd()
|
||||
? test.full_description.substring(0, test.full_description.length - test.description.length).trimEnd()
|
||||
: null
|
||||
|
||||
let group = suite.groups.find(grp => grp.name === groupName)
|
||||
|
||||
260
src/parsers/tester-junit/tester-junit-parser.ts
Normal file
260
src/parsers/tester-junit/tester-junit-parser.ts
Normal file
@@ -0,0 +1,260 @@
|
||||
import * as path from 'path'
|
||||
import {ParseOptions, TestParser} from '../../test-parser'
|
||||
import {parseStringPromise} from 'xml2js'
|
||||
|
||||
import {NetteTesterReport, SingleSuiteReport, TestCase, TestSuite} from './tester-junit-types'
|
||||
import {normalizeFilePath} from '../../utils/path-utils'
|
||||
|
||||
import {
|
||||
TestExecutionResult,
|
||||
TestRunResult,
|
||||
TestSuiteResult,
|
||||
TestGroupResult,
|
||||
TestCaseResult,
|
||||
TestCaseError
|
||||
} from '../../test-results'
|
||||
|
||||
interface ParsedTestName {
|
||||
filePath: string
|
||||
method?: string
|
||||
description?: string
|
||||
className?: string
|
||||
displayName: string
|
||||
}
|
||||
|
||||
export class NetteTesterJunitParser implements TestParser {
|
||||
readonly trackedFiles: Set<string>
|
||||
readonly trackedFilesList: string[]
|
||||
|
||||
constructor(readonly options: ParseOptions) {
|
||||
this.trackedFilesList = options.trackedFiles.map(f => normalizeFilePath(f))
|
||||
this.trackedFiles = new Set(this.trackedFilesList)
|
||||
}
|
||||
|
||||
async parse(filePath: string, content: string): Promise<TestRunResult> {
|
||||
const reportOrSuite = await this.getNetteTesterReport(filePath, content)
|
||||
const isReport = (reportOrSuite as NetteTesterReport).testsuites !== undefined
|
||||
|
||||
// XML might contain:
|
||||
// - multiple suites under <testsuites> root node
|
||||
// - single <testsuite> as root node
|
||||
let report: NetteTesterReport
|
||||
if (isReport) {
|
||||
report = reportOrSuite as NetteTesterReport
|
||||
} else {
|
||||
// Make it behave the same way as if suite was inside <testsuites> root node
|
||||
const suite = (reportOrSuite as SingleSuiteReport).testsuite
|
||||
report = {
|
||||
testsuites: {
|
||||
$: {time: suite.$.time},
|
||||
testsuite: [suite]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this.getTestRunResult(filePath, report)
|
||||
}
|
||||
|
||||
private async getNetteTesterReport(
|
||||
filePath: string,
|
||||
content: string
|
||||
): Promise<NetteTesterReport | SingleSuiteReport> {
|
||||
try {
|
||||
return await parseStringPromise(content)
|
||||
} catch (e) {
|
||||
throw new Error(`Invalid XML at ${filePath}\n\n${e}`)
|
||||
}
|
||||
}
|
||||
|
||||
private getTestRunResult(filePath: string, report: NetteTesterReport): TestRunResult {
|
||||
const suites =
|
||||
report.testsuites.testsuite === undefined
|
||||
? []
|
||||
: report.testsuites.testsuite.map((ts, index) => {
|
||||
// Use report file name as suite name (user preference)
|
||||
const fileName = path.basename(filePath)
|
||||
// If there are multiple test suites, add index to distinguish them
|
||||
const name =
|
||||
report.testsuites.testsuite && report.testsuites.testsuite.length > 1
|
||||
? `${fileName} #${index + 1}`
|
||||
: fileName
|
||||
const time = parseFloat(ts.$.time) * 1000
|
||||
const sr = new TestSuiteResult(name, this.getGroups(ts), time)
|
||||
return sr
|
||||
})
|
||||
|
||||
const seconds = parseFloat(report.testsuites.$?.time ?? '')
|
||||
const time = isNaN(seconds) ? undefined : seconds * 1000
|
||||
return new TestRunResult(filePath, suites, time)
|
||||
}
|
||||
|
||||
private getGroups(suite: TestSuite): TestGroupResult[] {
|
||||
if (!suite.testcase || suite.testcase.length === 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Group tests by directory structure
|
||||
const groups: Map<string, TestCase[]> = new Map()
|
||||
|
||||
for (const tc of suite.testcase) {
|
||||
const parsed = this.parseTestCaseName(tc.$.classname)
|
||||
const directory = path.dirname(parsed.filePath)
|
||||
|
||||
if (!groups.has(directory)) {
|
||||
groups.set(directory, [])
|
||||
}
|
||||
groups.get(directory)!.push(tc)
|
||||
}
|
||||
|
||||
return Array.from(groups.entries()).map(([dir, tests]) => {
|
||||
const testResults = tests.map(tc => {
|
||||
const parsed = this.parseTestCaseName(tc.$.classname)
|
||||
const result = this.getTestCaseResult(tc)
|
||||
const time = parseFloat(tc.$.time || '0') * 1000
|
||||
const error = this.getTestCaseError(tc, parsed.filePath)
|
||||
return new TestCaseResult(parsed.displayName, result, time, error)
|
||||
})
|
||||
return new TestGroupResult(dir, testResults)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse test case name from classname attribute.
|
||||
*
|
||||
* Handles multiple patterns:
|
||||
* 1. Simple: "tests/Framework/Assert.equal.phpt"
|
||||
* 2. With method: "tests/Framework/Assert.equal.recursive.phpt [method=testSimple]"
|
||||
* 3. With description: "Prevent loop in error handling. The #268 regression. | tests/Framework/TestCase.ownErrorHandler.phpt"
|
||||
* 4. With class and method: "Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingBasics]"
|
||||
*/
|
||||
private parseTestCaseName(classname: string): ParsedTestName {
|
||||
let filePath = classname
|
||||
let method: string | undefined
|
||||
let description: string | undefined
|
||||
let className: string | undefined
|
||||
|
||||
// Pattern: "Description | filepath [method=methodName]"
|
||||
// or "ClassName | filepath [method=methodName]"
|
||||
const pipePattern = /^(.+?)\s*\|\s*(.+?)(?:\s*\[method=(.+?)\])?$/
|
||||
const pipeMatch = classname.match(pipePattern)
|
||||
|
||||
if (pipeMatch) {
|
||||
const prefix = pipeMatch[1].trim()
|
||||
filePath = pipeMatch[2].trim()
|
||||
method = pipeMatch[3]
|
||||
|
||||
// Check if prefix looks like a class name (contains backslash AND ends with dot)
|
||||
// Examples: "Kdyby\BootstrapFormRenderer\BootstrapRenderer."
|
||||
// vs description: "Prevent loop in error handling. The #268 regression."
|
||||
if (prefix.includes('\\') && prefix.endsWith('.')) {
|
||||
className = prefix
|
||||
} else {
|
||||
description = prefix
|
||||
}
|
||||
} else {
|
||||
// Pattern: "filepath [method=methodName]"
|
||||
const methodPattern = /^(.+?)\s*\[method=(.+?)\]$/
|
||||
const methodMatch = classname.match(methodPattern)
|
||||
|
||||
if (methodMatch) {
|
||||
filePath = methodMatch[1].trim()
|
||||
method = methodMatch[2].trim()
|
||||
}
|
||||
}
|
||||
|
||||
// Generate display name
|
||||
const baseName = path.basename(filePath)
|
||||
let displayName = baseName
|
||||
|
||||
if (method) {
|
||||
displayName = `${baseName}::${method}`
|
||||
}
|
||||
|
||||
if (description) {
|
||||
displayName = `${description} (${baseName})`
|
||||
} else if (className && method) {
|
||||
// For class names, keep them but still show the file
|
||||
displayName = `${baseName}::${method}`
|
||||
}
|
||||
|
||||
return {filePath, method, description, className, displayName}
|
||||
}
|
||||
|
||||
private getTestCaseResult(test: TestCase): TestExecutionResult {
|
||||
if (test.failure || test.error) return 'failed'
|
||||
if (test.skipped) return 'skipped'
|
||||
return 'success'
|
||||
}
|
||||
|
||||
private getTestCaseError(tc: TestCase, filePath: string): TestCaseError | undefined {
|
||||
if (!this.options.parseErrors) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
// We process <error> and <failure> the same way
|
||||
const failures = tc.failure ?? tc.error
|
||||
if (!failures || failures.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const failure = failures[0]
|
||||
// For Nette Tester, details are in the message attribute, not as inner text
|
||||
const details = typeof failure === 'string' ? failure : failure._ ?? failure.$?.message ?? ''
|
||||
|
||||
// Try to extract file path and line from error details
|
||||
let errorFilePath: string | undefined
|
||||
let line: number | undefined
|
||||
|
||||
if (details) {
|
||||
const extracted = this.extractFileAndLine(details)
|
||||
if (extracted) {
|
||||
errorFilePath = extracted.filePath
|
||||
line = extracted.line
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: use test file path if tracked
|
||||
if (!errorFilePath) {
|
||||
const normalized = normalizeFilePath(filePath)
|
||||
if (this.trackedFiles.has(normalized)) {
|
||||
errorFilePath = normalized
|
||||
}
|
||||
}
|
||||
|
||||
let message: string | undefined
|
||||
if (typeof failure !== 'string' && failure.$) {
|
||||
message = failure.$.message
|
||||
if (failure.$.type) {
|
||||
message = message ? `${failure.$.type}: ${message}` : failure.$.type
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
path: errorFilePath,
|
||||
line,
|
||||
details,
|
||||
message
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract file path and line number from error details.
|
||||
* Matches patterns like: /path/to/file.phpt:123 or /path/to/file.php:456
|
||||
*/
|
||||
private extractFileAndLine(details: string): {filePath: string; line: number} | undefined {
|
||||
const lines = details.split(/\r?\n/)
|
||||
|
||||
for (const str of lines) {
|
||||
// Match PHP file patterns: /path/to/file.phpt:123 or /path/to/file.php:456
|
||||
const match = str.match(/((?:[A-Za-z]:)?[^\s:()]+?\.(?:php|phpt)):(\d+)/)
|
||||
if (match) {
|
||||
const normalized = normalizeFilePath(match[1])
|
||||
if (this.trackedFiles.has(normalized)) {
|
||||
return {filePath: normalized, line: parseInt(match[2])}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
46
src/parsers/tester-junit/tester-junit-types.ts
Normal file
46
src/parsers/tester-junit/tester-junit-types.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
export interface NetteTesterReport {
|
||||
testsuites: TestSuites
|
||||
}
|
||||
|
||||
export interface SingleSuiteReport {
|
||||
testsuite: TestSuite
|
||||
}
|
||||
|
||||
export interface TestSuites {
|
||||
$?: {
|
||||
time?: string
|
||||
}
|
||||
testsuite?: TestSuite[]
|
||||
}
|
||||
|
||||
export interface TestSuite {
|
||||
$: {
|
||||
// NOTE: name attribute is intentionally omitted - Nette Tester doesn't provide it
|
||||
tests: string
|
||||
errors: string
|
||||
failures?: string
|
||||
skipped: string
|
||||
time: string
|
||||
timestamp?: string
|
||||
}
|
||||
testcase?: TestCase[]
|
||||
}
|
||||
|
||||
export interface TestCase {
|
||||
$: {
|
||||
classname: string // File path, possibly with method or description prefix
|
||||
name: string // Usually same as classname
|
||||
time: string
|
||||
}
|
||||
failure?: Failure[]
|
||||
error?: Failure[]
|
||||
skipped?: string[]
|
||||
}
|
||||
|
||||
export interface Failure {
|
||||
_?: string
|
||||
$?: {
|
||||
type?: string
|
||||
message?: string
|
||||
}
|
||||
}
|
||||
@@ -16,6 +16,7 @@ export interface ReportOptions {
|
||||
useActionsSummary: boolean
|
||||
badgeTitle: string
|
||||
reportTitle: string
|
||||
collapsed: 'auto' | 'always' | 'never'
|
||||
}
|
||||
|
||||
export const DEFAULT_OPTIONS: ReportOptions = {
|
||||
@@ -25,16 +26,19 @@ export const DEFAULT_OPTIONS: ReportOptions = {
|
||||
onlySummary: false,
|
||||
useActionsSummary: true,
|
||||
badgeTitle: 'tests',
|
||||
reportTitle: ''
|
||||
reportTitle: '',
|
||||
collapsed: 'auto'
|
||||
}
|
||||
|
||||
export function getReport(results: TestRunResult[], options: ReportOptions = DEFAULT_OPTIONS): string {
|
||||
core.info('Generating check run summary')
|
||||
|
||||
export function getReport(
|
||||
results: TestRunResult[],
|
||||
options: ReportOptions = DEFAULT_OPTIONS,
|
||||
shortSummary = ''
|
||||
): string {
|
||||
applySort(results)
|
||||
|
||||
const opts = {...options}
|
||||
let lines = renderReport(results, opts)
|
||||
let lines = renderReport(results, opts, shortSummary)
|
||||
let report = lines.join('\n')
|
||||
|
||||
if (getByteLength(report) <= getMaxReportLength(options)) {
|
||||
@@ -44,7 +48,7 @@ export function getReport(results: TestRunResult[], options: ReportOptions = DEF
|
||||
if (opts.listTests === 'all') {
|
||||
core.info("Test report summary is too big - setting 'listTests' to 'failed'")
|
||||
opts.listTests = 'failed'
|
||||
lines = renderReport(results, opts)
|
||||
lines = renderReport(results, opts, shortSummary)
|
||||
report = lines.join('\n')
|
||||
if (getByteLength(report) <= getMaxReportLength(options)) {
|
||||
return report
|
||||
@@ -101,7 +105,7 @@ function getByteLength(text: string): number {
|
||||
return Buffer.byteLength(text, 'utf8')
|
||||
}
|
||||
|
||||
function renderReport(results: TestRunResult[], options: ReportOptions): string[] {
|
||||
function renderReport(results: TestRunResult[], options: ReportOptions, shortSummary: string): string[] {
|
||||
const sections: string[] = []
|
||||
|
||||
const reportTitle: string = options.reportTitle.trim()
|
||||
@@ -109,6 +113,10 @@ function renderReport(results: TestRunResult[], options: ReportOptions): string[
|
||||
sections.push(`# ${reportTitle}`)
|
||||
}
|
||||
|
||||
if (shortSummary) {
|
||||
sections.push(`## ${shortSummary}`)
|
||||
}
|
||||
|
||||
const badge = getReportBadge(results, options)
|
||||
sections.push(badge)
|
||||
|
||||
@@ -125,7 +133,7 @@ function getReportBadge(results: TestRunResult[], options: ReportOptions): strin
|
||||
return getBadge(passed, failed, skipped, options)
|
||||
}
|
||||
|
||||
function getBadge(passed: number, failed: number, skipped: number, options: ReportOptions): string {
|
||||
export function getBadge(passed: number, failed: number, skipped: number, options: ReportOptions): string {
|
||||
const text = []
|
||||
if (passed > 0) {
|
||||
text.push(`${passed} passed`)
|
||||
@@ -145,14 +153,20 @@ function getBadge(passed: number, failed: number, skipped: number, options: Repo
|
||||
color = 'yellow'
|
||||
}
|
||||
const hint = failed > 0 ? 'Tests failed' : 'Tests passed successfully'
|
||||
const uri = encodeURIComponent(`${options.badgeTitle}-${message}-${color}`)
|
||||
return ``
|
||||
const encodedBadgeTitle = encodeImgShieldsURIComponent(options.badgeTitle)
|
||||
const encodedMessage = encodeImgShieldsURIComponent(message)
|
||||
const encodedColor = encodeImgShieldsURIComponent(color)
|
||||
return ``
|
||||
}
|
||||
|
||||
function getTestRunsReport(testRuns: TestRunResult[], options: ReportOptions): string[] {
|
||||
const sections: string[] = []
|
||||
const totalFailed = testRuns.reduce((sum, tr) => sum + tr.failed, 0)
|
||||
if (totalFailed === 0) {
|
||||
|
||||
// Determine if report should be collapsed based on collapsed option
|
||||
const shouldCollapse = options.collapsed === 'always' || (options.collapsed === 'auto' && totalFailed === 0)
|
||||
|
||||
if (shouldCollapse) {
|
||||
sections.push(`<details><summary>Expand for details</summary>`)
|
||||
sections.push(` `)
|
||||
}
|
||||
@@ -185,7 +199,7 @@ function getTestRunsReport(testRuns: TestRunResult[], options: ReportOptions): s
|
||||
sections.push(...suitesReports)
|
||||
}
|
||||
|
||||
if (totalFailed === 0) {
|
||||
if (shouldCollapse) {
|
||||
sections.push(`</details>`)
|
||||
}
|
||||
return sections
|
||||
@@ -305,3 +319,7 @@ function getResultIcon(result: TestExecutionResult): string {
|
||||
return ''
|
||||
}
|
||||
}
|
||||
|
||||
function encodeImgShieldsURIComponent(component: string): string {
|
||||
return encodeURIComponent(component).replace(/-/g, '--').replace(/_/g, '__')
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@ export function ellipsis(text: string, maxLength: number): string {
|
||||
return text
|
||||
}
|
||||
|
||||
return text.substr(0, maxLength - 3) + '...'
|
||||
return text.substring(0, maxLength - 3) + '...'
|
||||
}
|
||||
|
||||
export function formatTime(ms: number): string {
|
||||
|
||||
@@ -34,6 +34,6 @@ export function getBasePath(path: string, trackedFiles: string[]): string | unde
|
||||
return undefined
|
||||
}
|
||||
|
||||
const base = path.substr(0, path.length - max.length)
|
||||
const base = path.substring(0, path.length - max.length)
|
||||
return base
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user