mirror of
https://github.com/dorny/test-reporter.git
synced 2026-02-01 10:55:23 -08:00
Compare commits
205 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a810f9bf83 | ||
|
|
b082adf0ec | ||
|
|
bcafc9fcbe | ||
|
|
b0cbac615f | ||
|
|
c92a2893a9 | ||
|
|
6697ec40e6 | ||
|
|
63870298f5 | ||
|
|
68967725f7 | ||
|
|
e17be7e007 | ||
|
|
6efb86e1f8 | ||
|
|
055bc8c025 | ||
|
|
17c900ba4e | ||
|
|
ff2d13cc36 | ||
|
|
20823bb69a | ||
|
|
0be3971fec | ||
|
|
4ee97617f7 | ||
|
|
a97700c53c | ||
|
|
837045e72b | ||
|
|
d1de4d5f06 | ||
|
|
f24c625f56 | ||
|
|
6a8a429644 | ||
|
|
ee446707ff | ||
|
|
fe45e95373 | ||
|
|
e40a1da745 | ||
|
|
3445860437 | ||
|
|
9ef5c136b2 | ||
|
|
83e20c1534 | ||
|
|
4331a3b620 | ||
|
|
04232af26f | ||
|
|
cf146f4036 | ||
|
|
33fc27cf09 | ||
|
|
8fd5fc58ca | ||
|
|
fc80cb4400 | ||
|
|
79ea6a9d0e | ||
|
|
aef3d726a6 | ||
|
|
c1a56edcfe | ||
|
|
3b9dad208e | ||
|
|
7c636a991c | ||
|
|
cfce4bda71 | ||
|
|
fe87682515 | ||
|
|
9b8d3b002e | ||
|
|
e2f0ff6339 | ||
|
|
bc8c29617e | ||
|
|
9aef9d168f | ||
|
|
6b64465c34 | ||
|
|
6617053f9c | ||
|
|
43a747d94c | ||
|
|
7b7927aa7d | ||
|
|
eeac280b8e | ||
|
|
6939db53fb | ||
|
|
b3812e0f5b | ||
|
|
cd299561e7 | ||
|
|
c7935221e6 | ||
|
|
5fb0582760 | ||
|
|
7148297f02 | ||
|
|
828632acd0 | ||
|
|
4a41472ca4 | ||
|
|
22dc7b52f4 | ||
|
|
bed521d765 | ||
|
|
6079ce3d17 | ||
|
|
de77f76b7e | ||
|
|
c883ae9738 | ||
|
|
35be98f7e7 | ||
|
|
f372a8338e | ||
|
|
948dd03d7b | ||
|
|
cf9db500ed | ||
|
|
ba33405987 | ||
|
|
34d8269ede | ||
|
|
fd1c798d8d | ||
|
|
2211cf1035 | ||
|
|
be3721d54a | ||
|
|
d171d89cd4 | ||
|
|
661decd3af | ||
|
|
bd9e36bf0c | ||
|
|
9642942c97 | ||
|
|
aa953f36f9 | ||
|
|
f686ce916a | ||
|
|
b14337a039 | ||
|
|
ec1e910416 | ||
|
|
353a438514 | ||
|
|
dc3a92680f | ||
|
|
e8e27361af | ||
|
|
ec9d9d2459 | ||
|
|
be36461fba | ||
|
|
9c4a54379f | ||
|
|
07e5c648b5 | ||
|
|
4d84da17a1 | ||
|
|
1c33c4c823 | ||
|
|
eea8b67eb1 | ||
|
|
8dd7047bf0 | ||
|
|
71814ae0cd | ||
|
|
4128d36b92 | ||
|
|
d1504ea554 | ||
|
|
18430db883 | ||
|
|
ae8bd195f8 | ||
|
|
a1ac327414 | ||
|
|
09bbc2665b | ||
|
|
5456de96b0 | ||
|
|
6adcc0c72a | ||
|
|
2312e637f3 | ||
|
|
3a1ec876a9 | ||
|
|
c4b9a11207 | ||
|
|
981f52cdc2 | ||
|
|
016f16f7b8 | ||
|
|
6126f49c2c | ||
|
|
be2b975095 | ||
|
|
a6b3e93884 | ||
|
|
223c6cd55b | ||
|
|
b522d19cac | ||
|
|
d56352b96c | ||
|
|
f078ba5e08 | ||
|
|
389794c9ad | ||
|
|
9934a5fbd4 | ||
|
|
0f25185fa5 | ||
|
|
fb07f1b2a5 | ||
|
|
364887ed35 | ||
|
|
0b4ea9b681 | ||
|
|
302102c9a4 | ||
|
|
890a17cecf | ||
|
|
53f5051dfe | ||
|
|
d6ff56a60a | ||
|
|
b0baeedf4a | ||
|
|
ebe4a9b005 | ||
|
|
4a3cfcde80 | ||
|
|
84bcb5d437 | ||
|
|
a8c55a3654 | ||
|
|
a0398fb7dd | ||
|
|
34f1c566ff | ||
|
|
7745ff0ec1 | ||
|
|
d33ca7294f | ||
|
|
29aefa7a46 | ||
|
|
f1fa471229 | ||
|
|
0f47a5bec1 | ||
|
|
2b2d091d3d | ||
|
|
0840d7c281 | ||
|
|
0841c8130e | ||
|
|
d1bf678c89 | ||
|
|
5b44774702 | ||
|
|
ef7793576a | ||
|
|
2acf6c2ccd | ||
|
|
8b055ac247 | ||
|
|
bb9fb75efb | ||
|
|
596aee5d4e | ||
|
|
d609194929 | ||
|
|
8039983cdb | ||
|
|
314ef1dd49 | ||
|
|
6e6a65b7a0 | ||
|
|
3bd727259a | ||
|
|
5c0d9a463a | ||
|
|
613e721b02 | ||
|
|
f4ba16072c | ||
|
|
27dd4e035f | ||
|
|
10d304d4fb | ||
|
|
70db77d88c | ||
|
|
41662db5ca | ||
|
|
472c8c84b3 | ||
|
|
f5b0d547ba | ||
|
|
374896edff | ||
|
|
1a288b62f8 | ||
|
|
95058abb17 | ||
|
|
7befe80c6c | ||
|
|
45526f79fd | ||
|
|
9557e57e83 | ||
|
|
9d0f09a6b6 | ||
|
|
84e60bad69 | ||
|
|
1db430559c | ||
|
|
e052c7d317 | ||
|
|
de62e458d1 | ||
|
|
bec9662ac9 | ||
|
|
4067b7aa0f | ||
|
|
2d69204ad0 | ||
|
|
101b53723e | ||
|
|
3c93b151a3 | ||
|
|
06ae02969c | ||
|
|
21ae91ed3c | ||
|
|
521e122f40 | ||
|
|
482d7087e0 | ||
|
|
1397b99b7c | ||
|
|
d8481703bc | ||
|
|
c40b69fc4a | ||
|
|
ce340de8b9 | ||
|
|
953e623fd8 | ||
|
|
49c1f3ae6c | ||
|
|
b34d4b1bfe | ||
|
|
574868ab61 | ||
|
|
78ed680850 | ||
|
|
7676f84e6e | ||
|
|
b6671b1f76 | ||
|
|
eb1e62c1b1 | ||
|
|
41db6fbaaa | ||
|
|
81a2b8afcb | ||
|
|
653ebca2c2 | ||
|
|
675ad23cef | ||
|
|
775c900089 | ||
|
|
3816496a0a | ||
|
|
c1768c8b7a | ||
|
|
0b7d35fd12 | ||
|
|
d5456180a6 | ||
|
|
fc13ca0827 | ||
|
|
724497a84c | ||
|
|
3608ee03fd | ||
|
|
bd77050543 | ||
|
|
49667db475 | ||
|
|
83b7f42d2d | ||
|
|
ceb9822f8b |
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -3,7 +3,7 @@ name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: 'bug'
|
||||
assignees: 'dorny,dharmendrasha,j-catania'
|
||||
assignees: 'dorny,dharmendrasha'
|
||||
---
|
||||
|
||||
## Describe the bug
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/feature.md
vendored
2
.github/ISSUE_TEMPLATE/feature.md
vendored
@@ -3,7 +3,7 @@ name: Feature Request
|
||||
about: Suggest a feature
|
||||
title: ''
|
||||
labels: 'enhancement'
|
||||
assignees: 'dorny,dharmendrasha,j-catania'
|
||||
assignees: 'dorny,dharmendrasha'
|
||||
---
|
||||
|
||||
## Describe
|
||||
|
||||
6
.github/workflows/check-dist.yml
vendored
6
.github/workflows/check-dist.yml
vendored
@@ -21,10 +21,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Set Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
id: diff
|
||||
|
||||
# If index.js was different than expected, upload the expected version as an artifact
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@v6
|
||||
if: ${{ failure() && steps.diff.conclusion == 'failure' }}
|
||||
with:
|
||||
name: dist
|
||||
|
||||
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
@@ -13,8 +13,8 @@ jobs:
|
||||
name: Build & Test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
- run: npm ci
|
||||
@@ -24,8 +24,8 @@ jobs:
|
||||
- run: npm test
|
||||
|
||||
- name: Upload test results
|
||||
if: success() || failure()
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ !cancelled() }}
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: test-results
|
||||
path: __tests__/__results__/*.xml
|
||||
|
||||
4
.github/workflows/manual-run.yml
vendored
4
.github/workflows/manual-run.yml
vendored
@@ -8,14 +8,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: npm test
|
||||
|
||||
- name: Create test report
|
||||
uses: ./
|
||||
if: success() || failure()
|
||||
if: ${{ !cancelled() }}
|
||||
with:
|
||||
name: JEST Tests
|
||||
path: __tests__/__results__/*.xml
|
||||
|
||||
2
.github/workflows/test-report.yml
vendored
2
.github/workflows/test-report.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
name: Workflow test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
- uses: ./
|
||||
with:
|
||||
artifact: test-results
|
||||
|
||||
13
.markdownlint.json
Normal file
13
.markdownlint.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"blanks-around-headings": false,
|
||||
"blanks-around-lists": false,
|
||||
"blanks-around-tables": false,
|
||||
"blanks-around-fences": false,
|
||||
"no-bare-urls": false,
|
||||
"line-length": false,
|
||||
"ul-style": false,
|
||||
"no-inline-html": false,
|
||||
"no-multiple-blanks": {
|
||||
"maximum": 3
|
||||
}
|
||||
}
|
||||
54
CHANGELOG.md
54
CHANGELOG.md
@@ -1,5 +1,59 @@
|
||||
# Changelog
|
||||
|
||||
## 2.5.0
|
||||
* Feature: Add Nette Tester support with `tester-junit` reporter https://github.com/dorny/test-reporter/pull/707
|
||||
* Maintenance: Bump actions/upload-artifact from 5 to 6 https://github.com/dorny/test-reporter/pull/695
|
||||
|
||||
## 2.4.0
|
||||
* Feature: Add PHPUnit support with JUnit XML dialect parser https://github.com/dorny/test-reporter/pull/422
|
||||
* Feature: Add JUnit XML sample files and tests for validation https://github.com/dorny/test-reporter/pull/701
|
||||
* Fix: Refactor deprecated `String.substr()` function to use `String.substring()` https://github.com/dorny/test-reporter/pull/704
|
||||
|
||||
## 2.3.0
|
||||
* Feature: Add Python support with `python-xunit` reporter (pytest) https://github.com/dorny/test-reporter/pull/643
|
||||
* Feature: Add pytest traceback parsing and `directory-mapping` option https://github.com/dorny/test-reporter/pull/238
|
||||
* Performance: Update sax.js to fix large XML file parsing https://github.com/dorny/test-reporter/pull/681
|
||||
* Documentation: Complete documentation for all supported reporters https://github.com/dorny/test-reporter/pull/691
|
||||
* Security: Bump js-yaml and mocha in /reports/mocha (fixes prototype pollution) https://github.com/dorny/test-reporter/pull/682
|
||||
|
||||
## 2.2.0
|
||||
* Feature: Add collapsed option to control report summary visibility https://github.com/dorny/test-reporter/pull/664
|
||||
* Fix badge encoding for values including underscore and hyphens https://github.com/dorny/test-reporter/pull/672
|
||||
* Fix missing `report-title` attribute in action definition https://github.com/dorny/test-reporter/pull/637
|
||||
* Refactor variable names to fix shadowing issues https://github.com/dorny/test-reporter/pull/630
|
||||
|
||||
## 2.1.1
|
||||
* Fix error when a TestMethod element does not have a className attribute in a trx file https://github.com/dorny/test-reporter/pull/623
|
||||
* Add stack trace from trx to summary https://github.com/dorny/test-reporter/pull/615
|
||||
* List only failed tests https://github.com/dorny/test-reporter/pull/606
|
||||
* Add type definitions to `github-utils.ts` https://github.com/dorny/test-reporter/pull/604
|
||||
* Avoid split on undefined https://github.com/dorny/test-reporter/pull/258
|
||||
* Return links to summary report https://github.com/dorny/test-reporter/pull/588
|
||||
* Add step summary short summary https://github.com/dorny/test-reporter/pull/589
|
||||
* Fix for empty TRX TestDefinitions https://github.com/dorny/test-reporter/pull/582
|
||||
* Increase step summary limit to 1MiB https://github.com/dorny/test-reporter/pull/581
|
||||
* Fix input description for list options https://github.com/dorny/test-reporter/pull/572
|
||||
|
||||
## 2.1.0
|
||||
* Feature: Add summary title https://github.com/dorny/test-reporter/pull/568
|
||||
* Feature: Add Golang test parser https://github.com/dorny/test-reporter/pull/571
|
||||
* Increase step summary limit to 1MiB https://github.com/dorny/test-reporter/pull/581
|
||||
* Fix for empty TRX TestDefinitions https://github.com/dorny/test-reporter/pull/582
|
||||
* Fix input description for list options https://github.com/dorny/test-reporter/pull/572
|
||||
* Update npm packages https://github.com/dorny/test-reporter/pull/583
|
||||
|
||||
## 2.0.0
|
||||
* Parse JUnit report with detailed message in failure https://github.com/dorny/test-reporter/pull/559
|
||||
* Support displaying test results in markdown using GitHub Actions Job Summaries https://github.com/dorny/test-reporter/pull/383
|
||||
|
||||
## 1.9.1
|
||||
* Fix problematic retransmission of authentication token https://github.com/dorny/test-reporter/pull/438
|
||||
* Report correct number of tests in Dart https://github.com/dorny/test-reporter/pull/426
|
||||
* Number of completed tests mismatches passed/failed https://github.com/dorny/test-reporter/issues/319
|
||||
|
||||
## 1.9.0
|
||||
* Add support for Rspec (Ruby) https://github.com/dorny/test-reporter/pull/398
|
||||
|
||||
## 1.8.0
|
||||
* Add `SwiftXunitParser` class based on `JavaJunitParser` for `swift-xunit` reporter https://github.com/dorny/test-reporter/pull/317
|
||||
* Use NodeJS 18 LTS as default runtime https://github.com/dorny/test-reporter/pull/332
|
||||
|
||||
126
README.md
126
README.md
@@ -2,22 +2,26 @@
|
||||
|
||||
This [Github Action](https://github.com/features/actions) displays test results from popular testing frameworks directly in GitHub.
|
||||
|
||||
✔️ Parses test results in XML or JSON format and creates nice report as Github Check Run
|
||||
✔️ Parses test results in XML or JSON format and creates nice report as GitHub Check Run or GitHub Actions job summaries
|
||||
|
||||
✔️ Annotates code where it failed based on message and stack trace captured during test execution
|
||||
|
||||
✔️ Provides final `conclusion` and counts of `passed`, `failed` and `skipped` tests as output parameters
|
||||
|
||||
**How it looks:**
|
||||
|||||
|
||||
|||||
|
||||
|:--:|:--:|:--:|:--:|
|
||||
|
||||
**Supported languages / frameworks:**
|
||||
- .NET / [xUnit](https://xunit.net/) / [NUnit](https://nunit.org/) / [MSTest](https://github.com/Microsoft/testfx-docs)
|
||||
- .NET / [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples) ( [xUnit](https://xunit.net/) / [NUnit](https://nunit.org/) / [MSTest](https://github.com/Microsoft/testfx-docs) )
|
||||
- Dart / [test](https://pub.dev/packages/test)
|
||||
- Flutter / [test](https://pub.dev/packages/test)
|
||||
- Go / [go test](https://pkg.go.dev/testing)
|
||||
- Java / [JUnit](https://junit.org/)
|
||||
- JavaScript / [JEST](https://jestjs.io/) / [Mocha](https://mochajs.org/)
|
||||
- Python / [pytest](https://docs.pytest.org/en/stable/) / [unittest](https://docs.python.org/3/library/unittest.html)
|
||||
- PHP / [PHPUnit](https://phpunit.de/) / [Nette Tester](https://tester.nette.org/)
|
||||
- Ruby / [RSpec](https://rspec.info/)
|
||||
- Swift / xUnit
|
||||
|
||||
For more information see [Supported formats](#supported-formats) section.
|
||||
@@ -43,13 +47,13 @@ jobs:
|
||||
name: Build & Test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3 # checkout the repo
|
||||
- uses: actions/checkout@v4 # checkout the repo
|
||||
- run: npm ci # install packages
|
||||
- run: npm test # run tests (configured to use jest-junit reporter)
|
||||
|
||||
- name: Test Report
|
||||
uses: dorny/test-reporter@v1
|
||||
if: success() || failure() # run this step even if previous step failed
|
||||
uses: dorny/test-reporter@v2
|
||||
if: ${{ !cancelled() }} # run this step even if previous step failed
|
||||
with:
|
||||
name: JEST Tests # Name of the check run which will be created
|
||||
path: reports/jest-*.xml # Path to test results
|
||||
@@ -74,11 +78,11 @@ jobs:
|
||||
build-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3 # checkout the repo
|
||||
- uses: actions/checkout@v4 # checkout the repo
|
||||
- run: npm ci # install packages
|
||||
- run: npm test # run tests (configured to use jest-junit reporter)
|
||||
- uses: actions/upload-artifact@v3 # upload test results
|
||||
if: success() || failure() # run this step even if previous step failed
|
||||
- uses: actions/upload-artifact@v4 # upload test results
|
||||
if: ${{ !cancelled() }} # run this step even if previous step failed
|
||||
with:
|
||||
name: test-results
|
||||
path: jest-junit.xml
|
||||
@@ -99,7 +103,7 @@ jobs:
|
||||
report:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dorny/test-reporter@v1
|
||||
- uses: dorny/test-reporter@v2
|
||||
with:
|
||||
artifact: test-results # artifact name
|
||||
name: JEST Tests # Name of the check run which will be created
|
||||
@@ -110,7 +114,7 @@ jobs:
|
||||
## Usage
|
||||
|
||||
```yaml
|
||||
- uses: dorny/test-reporter@v1
|
||||
- uses: dorny/test-reporter@v2
|
||||
with:
|
||||
|
||||
# Name or regex of artifact containing test results
|
||||
@@ -137,12 +141,17 @@ jobs:
|
||||
|
||||
# Format of test results. Supported options:
|
||||
# dart-json
|
||||
# dotnet-nunit
|
||||
# dotnet-trx
|
||||
# flutter-json
|
||||
# golang-json
|
||||
# java-junit
|
||||
# jest-junit
|
||||
# mocha-json
|
||||
# phpunit-junit
|
||||
# python-xunit
|
||||
# rspec-json
|
||||
# swift-xunit
|
||||
reporter: ''
|
||||
|
||||
# Allows you to generate only the summary.
|
||||
@@ -150,9 +159,23 @@ jobs:
|
||||
# Detailed listing of test suites and test cases will be skipped.
|
||||
only-summary: 'false'
|
||||
|
||||
# Allows you to generate reports for Actions Summary
|
||||
# https://github.blog/2022-05-09-supercharging-github-actions-with-job-summaries/
|
||||
use-actions-summary: 'true'
|
||||
|
||||
# Optionally specify a title (Heading level 1) for the report. Leading and trailing whitespace are ignored.
|
||||
# This is useful for separating your test report from other sections in the build summary.
|
||||
# If omitted or set to whitespace/empty, no title will be printed.
|
||||
report-title: ''
|
||||
|
||||
# Customize the title of badges shown for each Actions Summary.
|
||||
# Useful when distinguish summaries for tests ran in multiple Actions steps.
|
||||
badge-title: 'tests'
|
||||
|
||||
# Limits which test suites are listed:
|
||||
# all
|
||||
# failed
|
||||
# none
|
||||
list-suites: 'all'
|
||||
|
||||
# Limits which test cases are listed:
|
||||
@@ -236,6 +259,20 @@ Supported testing frameworks:
|
||||
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>dotnet-nunit</summary>
|
||||
|
||||
Test execution must be configured to generate [NUnit3](https://docs.nunit.org/articles/nunit/technical-notes/usage/Test-Result-XML-Format.html) XML test results.
|
||||
Install the [NUnit3TestAdapter](https://www.nuget.org/packages/NUnit3TestAdapter) package (required; it registers the `nunit` logger for `dotnet test`), then run tests with:
|
||||
|
||||
`dotnet test --logger "nunit;LogFileName=test-results.xml"`
|
||||
|
||||
Supported testing frameworks:
|
||||
- [NUnit](https://nunit.org/)
|
||||
|
||||
For more information see [dotnet test](https://docs.microsoft.com/en-us/dotnet/core/tools/dotnet-test#examples)
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>flutter-json</summary>
|
||||
|
||||
@@ -263,6 +300,13 @@ For more information see:
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>golang-json</summary>
|
||||
|
||||
You must use the `-json` flag and output the results to a file (ex: `go test -json > testresults.json`)
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>java-junit (Experimental)</summary>
|
||||
|
||||
@@ -272,6 +316,27 @@ This is due to the fact Java stack traces don't contain a full path to the sourc
|
||||
Some heuristic was necessary to figure out the mapping between the line in the stack trace and an actual source file.
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>phpunit-junit</summary>
|
||||
|
||||
[PHPUnit](https://phpunit.de/) can generate JUnit XML via CLI:
|
||||
`phpunit --log-junit reports/phpunit-junit.xml`
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>tester-junit</summary>
|
||||
|
||||
[Nette Tester](https://tester.nette.org/) can generate JUnit XML via CLI:
|
||||
|
||||
```bash
|
||||
tester -s -o junit tests/ > reports/tester-junit.xml
|
||||
```
|
||||
|
||||
**Note:** Nette Tester's JUnit output doesn't include test suite names. The parser will use the report file name as the suite name and automatically group tests by directory structure.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>jest-junit</summary>
|
||||
|
||||
@@ -325,6 +390,41 @@ Before version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0), M
|
||||
Please update Mocha to version [v9.1.0](https://github.com/mochajs/mocha/releases/tag/v9.1.0) or above if you encounter this issue.
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>python-xunit (Experimental)</summary>
|
||||
|
||||
Support for Python test results in xUnit format is experimental - should work but it was not extensively tested.
|
||||
|
||||
For **pytest** support, configure [JUnit XML output](https://docs.pytest.org/en/stable/how-to/output.html#creating-junitxml-format-files) and run with the `--junit-xml` option, which also lets you specify the output path for test results.
|
||||
|
||||
```shell
|
||||
pytest --junit-xml=test-report.xml
|
||||
```
|
||||
|
||||
For **unittest** support, use a test runner that outputs the JUnit report format, such as [unittest-xml-reporting](https://pypi.org/project/unittest-xml-reporting/).
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>rspec-json</summary>
|
||||
|
||||
[RSpec](https://rspec.info/) testing framework support requires the usage of JSON formatter.
|
||||
You can configure RSpec to output JSON format by using the `--format json` option and redirecting to a file:
|
||||
|
||||
```shell
|
||||
rspec --format json --out rspec-results.json
|
||||
```
|
||||
|
||||
Or configure it in `.rspec` file:
|
||||
```
|
||||
--format json
|
||||
--out rspec-results.json
|
||||
```
|
||||
|
||||
For more information see:
|
||||
- [RSpec documentation](https://rspec.info/)
|
||||
- [RSpec Formatters](https://relishapp.com/rspec/rspec-core/docs/formatters)
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>swift-xunit (Experimental)</summary>
|
||||
|
||||
@@ -335,9 +435,9 @@ Support for Swift test results in xUnit format is experimental - should work but
|
||||
|
||||
Unfortunately, there are some known issues and limitations caused by GitHub API:
|
||||
|
||||
- Test report (i.e. Check Run summary) is markdown text. No custom styling or HTML is possible.
|
||||
- Test report (i.e. build summary) is Markdown text. No custom styling or HTML is possible.
|
||||
- Maximum report size is 65535 bytes. Input parameters `list-suites` and `list-tests` will be automatically adjusted if max size is exceeded.
|
||||
- Test report can't reference any additional files (e.g. screenshots). You can use `actions/upload-artifact@v3` to upload them and inspect them manually.
|
||||
- Test report can't reference any additional files (e.g. screenshots). You can use `actions/upload-artifact@v4` to upload them and inspect them manually.
|
||||
- Check Runs are created for specific commit SHA. It's not possible to specify under which workflow test report should belong if more
|
||||
workflows are running for the same SHA. Thanks to this GitHub "feature" it's possible your test report will appear in an unexpected place in GitHub UI.
|
||||
For more information, see [#67](https://github.com/dorny/test-reporter/issues/67).
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||

|
||||
## ❌ <a id="user-content-r0" href="#r0">fixtures/dart-json.json</a>
|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/dart-json.json](#user-content-r0)|1 ✅|4 ❌|1 ⚪|4s|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/dart-json.json</a>
|
||||
**6** tests were completed in **4s** with **1** passed, **4** failed and **1** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[test/main_test.dart](#r0s0)|1✅|3❌||74ms|
|
||||
|[test/second_test.dart](#r0s1)||1❌|1⚪|51ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#r0s0">test/main_test.dart</a>
|
||||
|[test/main_test.dart](#user-content-r0s0)|1 ✅|3 ❌||74ms|
|
||||
|[test/second_test.dart](#user-content-r0s1)||1 ❌|1 ⚪|51ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">test/main_test.dart</a>
|
||||
```
|
||||
Test 1
|
||||
✅ Passing test
|
||||
@@ -20,7 +23,7 @@ Test 2
|
||||
❌ Exception in test
|
||||
Exception: Some error
|
||||
```
|
||||
### ❌ <a id="user-content-r0s1" href="#r0s1">test/second_test.dart</a>
|
||||
### ❌ <a id="user-content-r0s1" href="#user-content-r0s1">test/second_test.dart</a>
|
||||
```
|
||||
❌ Timeout test
|
||||
TimeoutException after 0:00:00.000001: Test timed out after 0 seconds.
|
||||
|
||||
31
__tests__/__outputs__/dotnet-nunit.md
Normal file
31
__tests__/__outputs__/dotnet-nunit.md
Normal file
@@ -0,0 +1,31 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/dotnet-nunit.xml](#user-content-r0)|3 ✅|5 ❌|1 ⚪|230ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/dotnet-nunit.xml</a>
|
||||
**9** tests were completed in **230ms** with **3** passed, **5** failed and **1** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[DotnetTests.NUnitV3Tests.dll.DotnetTests.XUnitTests](#user-content-r0s0)|3 ✅|5 ❌|1 ⚪|69ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">DotnetTests.NUnitV3Tests.dll.DotnetTests.XUnitTests</a>
|
||||
```
|
||||
CalculatorTests
|
||||
✅ Is_Even_Number(2)
|
||||
❌ Is_Even_Number(3)
|
||||
Expected: True
|
||||
But was: False
|
||||
|
||||
❌ Exception_In_TargetTest
|
||||
System.DivideByZeroException : Attempted to divide by zero.
|
||||
❌ Exception_In_Test
|
||||
System.Exception : Test
|
||||
❌ Failing_Test
|
||||
Expected: 3
|
||||
But was: 2
|
||||
|
||||
✅ Passing_Test
|
||||
✅ Passing_Test_With_Description
|
||||
⚪ Skipped_Test
|
||||
❌ Timeout_Test
|
||||
|
||||
```
|
||||
34
__tests__/__outputs__/dotnet-trx-only-failed.md
Normal file
34
__tests__/__outputs__/dotnet-trx-only-failed.md
Normal file
@@ -0,0 +1,34 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/dotnet-trx.trx](#user-content-r0)|5 ✅|5 ❌|1 ⚪|1s|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/dotnet-trx.trx</a>
|
||||
**11** tests were completed in **1s** with **5** passed, **5** failed and **1** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[DotnetTests.XUnitTests.CalculatorTests](#user-content-r0s0)|5 ✅|5 ❌|1 ⚪|118ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">DotnetTests.XUnitTests.CalculatorTests</a>
|
||||
```
|
||||
❌ Exception_In_TargetTest
|
||||
System.DivideByZeroException : Attempted to divide by zero.
|
||||
at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.Unit\Calculator.cs:line 9
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 33
|
||||
❌ Exception_In_Test
|
||||
System.Exception : Test
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 39
|
||||
❌ Failing_Test
|
||||
Assert.Equal() Failure
|
||||
Expected: 3
|
||||
Actual: 2
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 27
|
||||
❌ Is_Even_Number(i: 3)
|
||||
Assert.True() Failure
|
||||
Expected: True
|
||||
Actual: False
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 59
|
||||
❌ Should be even number(i: 3)
|
||||
Assert.True() Failure
|
||||
Expected: True
|
||||
Actual: False
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Theory_With_Custom_Name(Int32 i) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 67
|
||||
```
|
||||
@@ -1,31 +1,40 @@
|
||||

|
||||
## ❌ <a id="user-content-r0" href="#r0">fixtures/dotnet-trx.trx</a>
|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/dotnet-trx.trx](#user-content-r0)|5 ✅|5 ❌|1 ⚪|1s|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/dotnet-trx.trx</a>
|
||||
**11** tests were completed in **1s** with **5** passed, **5** failed and **1** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[DotnetTests.XUnitTests.CalculatorTests](#r0s0)|5✅|5❌|1⚪|118ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#r0s0">DotnetTests.XUnitTests.CalculatorTests</a>
|
||||
|[DotnetTests.XUnitTests.CalculatorTests](#user-content-r0s0)|5 ✅|5 ❌|1 ⚪|118ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">DotnetTests.XUnitTests.CalculatorTests</a>
|
||||
```
|
||||
✅ Custom Name
|
||||
❌ Exception_In_TargetTest
|
||||
System.DivideByZeroException : Attempted to divide by zero.
|
||||
at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.Unit\Calculator.cs:line 9
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 33
|
||||
❌ Exception_In_Test
|
||||
System.Exception : Test
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 39
|
||||
❌ Failing_Test
|
||||
Assert.Equal() Failure
|
||||
Expected: 3
|
||||
Actual: 2
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 27
|
||||
✅ Is_Even_Number(i: 2)
|
||||
❌ Is_Even_Number(i: 3)
|
||||
Assert.True() Failure
|
||||
Expected: True
|
||||
Actual: False
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 59
|
||||
✅ Passing_Test
|
||||
✅ Should be even number(i: 2)
|
||||
❌ Should be even number(i: 3)
|
||||
Assert.True() Failure
|
||||
Expected: True
|
||||
Actual: False
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Theory_With_Custom_Name(Int32 i) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 67
|
||||
⚪ Skipped_Test
|
||||
✅ Timeout_Test
|
||||
```
|
||||
26
__tests__/__outputs__/dotnet-xunitv3.md
Normal file
26
__tests__/__outputs__/dotnet-xunitv3.md
Normal file
@@ -0,0 +1,26 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/dotnet-xunitv3.trx](#user-content-r0)|1 ✅|3 ❌||267ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/dotnet-xunitv3.trx</a>
|
||||
**4** tests were completed in **267ms** with **1** passed, **3** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[DotnetTests.XUnitV3Tests.FixtureTests](#user-content-r0s0)|1 ✅|1 ❌||18ms|
|
||||
|[Unclassified](#user-content-r0s1)||2 ❌||0ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">DotnetTests.XUnitV3Tests.FixtureTests</a>
|
||||
```
|
||||
❌ Failing_Test
|
||||
Assert.Null() Failure: Value is not null
|
||||
Expected: null
|
||||
Actual: Fixture { }
|
||||
at DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test() in /_/reports/dotnet/DotnetTests.XUnitV3Tests/FixtureTests.cs:line 25
|
||||
at System.RuntimeMethodHandle.InvokeMethod(Object target, Void** arguments, Signature sig, Boolean isConstructor)
|
||||
at System.Reflection.MethodBaseInvoker.InvokeWithNoArgs(Object obj, BindingFlags invokeAttr)
|
||||
✅ Passing_Test
|
||||
```
|
||||
### ❌ <a id="user-content-r0s1" href="#user-content-r0s1">Unclassified</a>
|
||||
```
|
||||
❌ [Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test)]
|
||||
❌ [Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test)]
|
||||
```
|
||||
@@ -1,71 +1,76 @@
|
||||

|
||||
## ✅ <a id="user-content-r0" href="#r0">fixtures/external/FluentValidation.Tests.trx</a>
|
||||
<details><summary>Expand for details</summary>
|
||||
|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/external/FluentValidation.Tests.trx](#user-content-r0)|803 ✅||1 ⚪|4s|
|
||||
## ✅ <a id="user-content-r0" href="#user-content-r0">fixtures/external/FluentValidation.Tests.trx</a>
|
||||
**804** tests were completed in **4s** with **803** passed, **0** failed and **1** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[FluentValidation.Tests.AbstractValidatorTester](#r0s0)|35✅|||12ms|
|
||||
|[FluentValidation.Tests.AccessorCacheTests](#r0s1)|4✅||1⚪|4ms|
|
||||
|[FluentValidation.Tests.AssemblyScannerTester](#r0s2)|2✅|||2ms|
|
||||
|[FluentValidation.Tests.CascadingFailuresTester](#r0s3)|38✅|||23ms|
|
||||
|[FluentValidation.Tests.ChainedValidationTester](#r0s4)|13✅|||6ms|
|
||||
|[FluentValidation.Tests.ChainingValidatorsTester](#r0s5)|3✅|||1ms|
|
||||
|[FluentValidation.Tests.ChildRulesTests](#r0s6)|2✅|||7ms|
|
||||
|[FluentValidation.Tests.CollectionValidatorWithParentTests](#r0s7)|16✅|||13ms|
|
||||
|[FluentValidation.Tests.ComplexValidationTester](#r0s8)|17✅|||26ms|
|
||||
|[FluentValidation.Tests.ConditionTests](#r0s9)|18✅|||9ms|
|
||||
|[FluentValidation.Tests.CreditCardValidatorTests](#r0s10)|2✅|||2ms|
|
||||
|[FluentValidation.Tests.CustomFailureActionTester](#r0s11)|3✅|||1ms|
|
||||
|[FluentValidation.Tests.CustomMessageFormatTester](#r0s12)|6✅|||3ms|
|
||||
|[FluentValidation.Tests.CustomValidatorTester](#r0s13)|10✅|||6ms|
|
||||
|[FluentValidation.Tests.DefaultValidatorExtensionTester](#r0s14)|30✅|||38ms|
|
||||
|[FluentValidation.Tests.EmailValidatorTests](#r0s15)|36✅|||18ms|
|
||||
|[FluentValidation.Tests.EmptyTester](#r0s16)|9✅|||5ms|
|
||||
|[FluentValidation.Tests.EnumValidatorTests](#r0s17)|12✅|||24ms|
|
||||
|[FluentValidation.Tests.EqualValidatorTests](#r0s18)|10✅|||3ms|
|
||||
|[FluentValidation.Tests.ExactLengthValidatorTester](#r0s19)|6✅|||2ms|
|
||||
|[FluentValidation.Tests.ExclusiveBetweenValidatorTests](#r0s20)|19✅|||6ms|
|
||||
|[FluentValidation.Tests.ExtensionTester](#r0s21)|4✅|||1ms|
|
||||
|[FluentValidation.Tests.ForEachRuleTests](#r0s22)|34✅|||47ms|
|
||||
|[FluentValidation.Tests.GreaterThanOrEqualToValidatorTester](#r0s23)|14✅|||5ms|
|
||||
|[FluentValidation.Tests.GreaterThanValidatorTester](#r0s24)|13✅|||4ms|
|
||||
|[FluentValidation.Tests.InclusiveBetweenValidatorTests](#r0s25)|18✅|||4ms|
|
||||
|[FluentValidation.Tests.InheritanceValidatorTest](#r0s26)|11✅|||18ms|
|
||||
|[FluentValidation.Tests.InlineValidatorTester](#r0s27)|1✅|||2ms|
|
||||
|[FluentValidation.Tests.LanguageManagerTests](#r0s28)|21✅|||28ms|
|
||||
|[FluentValidation.Tests.LengthValidatorTests](#r0s29)|16✅|||17ms|
|
||||
|[FluentValidation.Tests.LessThanOrEqualToValidatorTester](#r0s30)|13✅|||4ms|
|
||||
|[FluentValidation.Tests.LessThanValidatorTester](#r0s31)|16✅|||6ms|
|
||||
|[FluentValidation.Tests.LocalisedMessagesTester](#r0s32)|6✅|||3ms|
|
||||
|[FluentValidation.Tests.LocalisedNameTester](#r0s33)|2✅|||1ms|
|
||||
|[FluentValidation.Tests.MemberAccessorTests](#r0s34)|9✅|||5ms|
|
||||
|[FluentValidation.Tests.MessageFormatterTests](#r0s35)|10✅|||2ms|
|
||||
|[FluentValidation.Tests.ModelLevelValidatorTests](#r0s36)|2✅|||1ms|
|
||||
|[FluentValidation.Tests.NameResolutionPluggabilityTester](#r0s37)|3✅|||2ms|
|
||||
|[FluentValidation.Tests.NotEmptyTester](#r0s38)|10✅|||7ms|
|
||||
|[FluentValidation.Tests.NotEqualValidatorTests](#r0s39)|11✅|||7ms|
|
||||
|[FluentValidation.Tests.NotNullTester](#r0s40)|5✅|||1ms|
|
||||
|[FluentValidation.Tests.NullTester](#r0s41)|5✅|||2ms|
|
||||
|[FluentValidation.Tests.OnFailureTests](#r0s42)|10✅|||8ms|
|
||||
|[FluentValidation.Tests.PredicateValidatorTester](#r0s43)|5✅|||2ms|
|
||||
|[FluentValidation.Tests.PropertyChainTests](#r0s44)|7✅|||1ms|
|
||||
|[FluentValidation.Tests.RegularExpressionValidatorTests](#r0s45)|15✅|||6ms|
|
||||
|[FluentValidation.Tests.RuleBuilderTests](#r0s46)|29✅|||96ms|
|
||||
|[FluentValidation.Tests.RuleDependencyTests](#r0s47)|14✅|||3s|
|
||||
|[FluentValidation.Tests.RulesetTests](#r0s48)|21✅|||14ms|
|
||||
|[FluentValidation.Tests.ScalePrecisionValidatorTests](#r0s49)|6✅|||4ms|
|
||||
|[FluentValidation.Tests.SharedConditionTests](#r0s50)|42✅|||42ms|
|
||||
|[FluentValidation.Tests.StandalonePropertyValidationTester](#r0s51)|1✅|||0ms|
|
||||
|[FluentValidation.Tests.StringEnumValidatorTests](#r0s52)|10✅|||5ms|
|
||||
|[FluentValidation.Tests.TrackingCollectionTests](#r0s53)|3✅|||2ms|
|
||||
|[FluentValidation.Tests.TransformTests](#r0s54)|4✅|||3ms|
|
||||
|[FluentValidation.Tests.UserSeverityTester](#r0s55)|7✅|||3ms|
|
||||
|[FluentValidation.Tests.UserStateTester](#r0s56)|4✅|||3ms|
|
||||
|[FluentValidation.Tests.ValidateAndThrowTester](#r0s57)|14✅|||25ms|
|
||||
|[FluentValidation.Tests.ValidationResultTests](#r0s58)|8✅|||8ms|
|
||||
|[FluentValidation.Tests.ValidatorDescriptorTester](#r0s59)|5✅|||1ms|
|
||||
|[FluentValidation.Tests.ValidatorSelectorTests](#r0s60)|10✅|||9ms|
|
||||
|[FluentValidation.Tests.ValidatorTesterTester](#r0s61)|73✅|||74ms|
|
||||
### ✅ <a id="user-content-r0s0" href="#r0s0">FluentValidation.Tests.AbstractValidatorTester</a>
|
||||
|[FluentValidation.Tests.AbstractValidatorTester](#user-content-r0s0)|35 ✅|||12ms|
|
||||
|[FluentValidation.Tests.AccessorCacheTests](#user-content-r0s1)|4 ✅||1 ⚪|4ms|
|
||||
|[FluentValidation.Tests.AssemblyScannerTester](#user-content-r0s2)|2 ✅|||2ms|
|
||||
|[FluentValidation.Tests.CascadingFailuresTester](#user-content-r0s3)|38 ✅|||23ms|
|
||||
|[FluentValidation.Tests.ChainedValidationTester](#user-content-r0s4)|13 ✅|||6ms|
|
||||
|[FluentValidation.Tests.ChainingValidatorsTester](#user-content-r0s5)|3 ✅|||1ms|
|
||||
|[FluentValidation.Tests.ChildRulesTests](#user-content-r0s6)|2 ✅|||7ms|
|
||||
|[FluentValidation.Tests.CollectionValidatorWithParentTests](#user-content-r0s7)|16 ✅|||13ms|
|
||||
|[FluentValidation.Tests.ComplexValidationTester](#user-content-r0s8)|17 ✅|||26ms|
|
||||
|[FluentValidation.Tests.ConditionTests](#user-content-r0s9)|18 ✅|||9ms|
|
||||
|[FluentValidation.Tests.CreditCardValidatorTests](#user-content-r0s10)|2 ✅|||2ms|
|
||||
|[FluentValidation.Tests.CustomFailureActionTester](#user-content-r0s11)|3 ✅|||1ms|
|
||||
|[FluentValidation.Tests.CustomMessageFormatTester](#user-content-r0s12)|6 ✅|||3ms|
|
||||
|[FluentValidation.Tests.CustomValidatorTester](#user-content-r0s13)|10 ✅|||6ms|
|
||||
|[FluentValidation.Tests.DefaultValidatorExtensionTester](#user-content-r0s14)|30 ✅|||38ms|
|
||||
|[FluentValidation.Tests.EmailValidatorTests](#user-content-r0s15)|36 ✅|||18ms|
|
||||
|[FluentValidation.Tests.EmptyTester](#user-content-r0s16)|9 ✅|||5ms|
|
||||
|[FluentValidation.Tests.EnumValidatorTests](#user-content-r0s17)|12 ✅|||24ms|
|
||||
|[FluentValidation.Tests.EqualValidatorTests](#user-content-r0s18)|10 ✅|||3ms|
|
||||
|[FluentValidation.Tests.ExactLengthValidatorTester](#user-content-r0s19)|6 ✅|||2ms|
|
||||
|[FluentValidation.Tests.ExclusiveBetweenValidatorTests](#user-content-r0s20)|19 ✅|||6ms|
|
||||
|[FluentValidation.Tests.ExtensionTester](#user-content-r0s21)|4 ✅|||1ms|
|
||||
|[FluentValidation.Tests.ForEachRuleTests](#user-content-r0s22)|34 ✅|||47ms|
|
||||
|[FluentValidation.Tests.GreaterThanOrEqualToValidatorTester](#user-content-r0s23)|14 ✅|||5ms|
|
||||
|[FluentValidation.Tests.GreaterThanValidatorTester](#user-content-r0s24)|13 ✅|||4ms|
|
||||
|[FluentValidation.Tests.InclusiveBetweenValidatorTests](#user-content-r0s25)|18 ✅|||4ms|
|
||||
|[FluentValidation.Tests.InheritanceValidatorTest](#user-content-r0s26)|11 ✅|||18ms|
|
||||
|[FluentValidation.Tests.InlineValidatorTester](#user-content-r0s27)|1 ✅|||2ms|
|
||||
|[FluentValidation.Tests.LanguageManagerTests](#user-content-r0s28)|21 ✅|||28ms|
|
||||
|[FluentValidation.Tests.LengthValidatorTests](#user-content-r0s29)|16 ✅|||17ms|
|
||||
|[FluentValidation.Tests.LessThanOrEqualToValidatorTester](#user-content-r0s30)|13 ✅|||4ms|
|
||||
|[FluentValidation.Tests.LessThanValidatorTester](#user-content-r0s31)|16 ✅|||6ms|
|
||||
|[FluentValidation.Tests.LocalisedMessagesTester](#user-content-r0s32)|6 ✅|||3ms|
|
||||
|[FluentValidation.Tests.LocalisedNameTester](#user-content-r0s33)|2 ✅|||1ms|
|
||||
|[FluentValidation.Tests.MemberAccessorTests](#user-content-r0s34)|9 ✅|||5ms|
|
||||
|[FluentValidation.Tests.MessageFormatterTests](#user-content-r0s35)|10 ✅|||2ms|
|
||||
|[FluentValidation.Tests.ModelLevelValidatorTests](#user-content-r0s36)|2 ✅|||1ms|
|
||||
|[FluentValidation.Tests.NameResolutionPluggabilityTester](#user-content-r0s37)|3 ✅|||2ms|
|
||||
|[FluentValidation.Tests.NotEmptyTester](#user-content-r0s38)|10 ✅|||7ms|
|
||||
|[FluentValidation.Tests.NotEqualValidatorTests](#user-content-r0s39)|11 ✅|||7ms|
|
||||
|[FluentValidation.Tests.NotNullTester](#user-content-r0s40)|5 ✅|||1ms|
|
||||
|[FluentValidation.Tests.NullTester](#user-content-r0s41)|5 ✅|||2ms|
|
||||
|[FluentValidation.Tests.OnFailureTests](#user-content-r0s42)|10 ✅|||8ms|
|
||||
|[FluentValidation.Tests.PredicateValidatorTester](#user-content-r0s43)|5 ✅|||2ms|
|
||||
|[FluentValidation.Tests.PropertyChainTests](#user-content-r0s44)|7 ✅|||1ms|
|
||||
|[FluentValidation.Tests.RegularExpressionValidatorTests](#user-content-r0s45)|15 ✅|||6ms|
|
||||
|[FluentValidation.Tests.RuleBuilderTests](#user-content-r0s46)|29 ✅|||96ms|
|
||||
|[FluentValidation.Tests.RuleDependencyTests](#user-content-r0s47)|14 ✅|||3s|
|
||||
|[FluentValidation.Tests.RulesetTests](#user-content-r0s48)|21 ✅|||14ms|
|
||||
|[FluentValidation.Tests.ScalePrecisionValidatorTests](#user-content-r0s49)|6 ✅|||4ms|
|
||||
|[FluentValidation.Tests.SharedConditionTests](#user-content-r0s50)|42 ✅|||42ms|
|
||||
|[FluentValidation.Tests.StandalonePropertyValidationTester](#user-content-r0s51)|1 ✅|||0ms|
|
||||
|[FluentValidation.Tests.StringEnumValidatorTests](#user-content-r0s52)|10 ✅|||5ms|
|
||||
|[FluentValidation.Tests.TrackingCollectionTests](#user-content-r0s53)|3 ✅|||2ms|
|
||||
|[FluentValidation.Tests.TransformTests](#user-content-r0s54)|4 ✅|||3ms|
|
||||
|[FluentValidation.Tests.UserSeverityTester](#user-content-r0s55)|7 ✅|||3ms|
|
||||
|[FluentValidation.Tests.UserStateTester](#user-content-r0s56)|4 ✅|||3ms|
|
||||
|[FluentValidation.Tests.ValidateAndThrowTester](#user-content-r0s57)|14 ✅|||25ms|
|
||||
|[FluentValidation.Tests.ValidationResultTests](#user-content-r0s58)|8 ✅|||8ms|
|
||||
|[FluentValidation.Tests.ValidatorDescriptorTester](#user-content-r0s59)|5 ✅|||1ms|
|
||||
|[FluentValidation.Tests.ValidatorSelectorTests](#user-content-r0s60)|10 ✅|||9ms|
|
||||
|[FluentValidation.Tests.ValidatorTesterTester](#user-content-r0s61)|73 ✅|||74ms|
|
||||
### ✅ <a id="user-content-r0s0" href="#user-content-r0s0">FluentValidation.Tests.AbstractValidatorTester</a>
|
||||
```
|
||||
✅ Can_replace_default_errorcode_resolver
|
||||
✅ CanValidateInstancesOfType_returns_false_when_comparing_against_some_other_type
|
||||
@@ -103,7 +108,7 @@
|
||||
✅ WithName_should_override_field_name
|
||||
✅ WithName_should_override_field_name_with_value_from_other_property
|
||||
```
|
||||
### ✅ <a id="user-content-r0s1" href="#r0s1">FluentValidation.Tests.AccessorCacheTests</a>
|
||||
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">FluentValidation.Tests.AccessorCacheTests</a>
|
||||
```
|
||||
⚪ Benchmark
|
||||
✅ Equality_comparison_check
|
||||
@@ -111,12 +116,12 @@
|
||||
✅ Gets_member_for_nested_property
|
||||
✅ Identifies_if_memberexp_acts_on_model_instance
|
||||
```
|
||||
### ✅ <a id="user-content-r0s2" href="#r0s2">FluentValidation.Tests.AssemblyScannerTester</a>
|
||||
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">FluentValidation.Tests.AssemblyScannerTester</a>
|
||||
```
|
||||
✅ Finds_validators_for_types
|
||||
✅ ForEach_iterates_over_types
|
||||
```
|
||||
### ✅ <a id="user-content-r0s3" href="#r0s3">FluentValidation.Tests.CascadingFailuresTester</a>
|
||||
### ✅ <a id="user-content-r0s3" href="#user-content-r0s3">FluentValidation.Tests.CascadingFailuresTester</a>
|
||||
```
|
||||
✅ Cascade_mode_can_be_set_after_validator_instantiated
|
||||
✅ Cascade_mode_can_be_set_after_validator_instantiated_async
|
||||
@@ -157,7 +162,7 @@
|
||||
✅ Validation_stops_on_first_failure_when_set_to_StopOnFirstFailure_at_validator_level_async_legacy
|
||||
✅ Validation_stops_on_first_failure_when_set_to_StopOnFirstFailure_at_validator_level_legacy
|
||||
```
|
||||
### ✅ <a id="user-content-r0s4" href="#r0s4">FluentValidation.Tests.ChainedValidationTester</a>
|
||||
### ✅ <a id="user-content-r0s4" href="#user-content-r0s4">FluentValidation.Tests.ChainedValidationTester</a>
|
||||
```
|
||||
✅ Can_validate_using_validator_for_base_type
|
||||
✅ Chained_property_should_be_excluded
|
||||
@@ -173,18 +178,18 @@
|
||||
✅ Uses_explicit_ruleset
|
||||
✅ Validates_chained_property
|
||||
```
|
||||
### ✅ <a id="user-content-r0s5" href="#r0s5">FluentValidation.Tests.ChainingValidatorsTester</a>
|
||||
### ✅ <a id="user-content-r0s5" href="#user-content-r0s5">FluentValidation.Tests.ChainingValidatorsTester</a>
|
||||
```
|
||||
✅ Options_should_only_apply_to_current_validator
|
||||
✅ Should_create_multiple_validators
|
||||
✅ Should_execute_multiple_validators
|
||||
```
|
||||
### ✅ <a id="user-content-r0s6" href="#r0s6">FluentValidation.Tests.ChildRulesTests</a>
|
||||
### ✅ <a id="user-content-r0s6" href="#user-content-r0s6">FluentValidation.Tests.ChildRulesTests</a>
|
||||
```
|
||||
✅ Can_define_nested_rules_for_collection
|
||||
✅ ChildRules_works_with_RuleSet
|
||||
```
|
||||
### ✅ <a id="user-content-r0s7" href="#r0s7">FluentValidation.Tests.CollectionValidatorWithParentTests</a>
|
||||
### ✅ <a id="user-content-r0s7" href="#user-content-r0s7">FluentValidation.Tests.CollectionValidatorWithParentTests</a>
|
||||
```
|
||||
✅ Async_condition_should_work_with_child_collection
|
||||
✅ Can_specify_condition_for_individual_collection_elements
|
||||
@@ -203,7 +208,7 @@
|
||||
✅ Validates_collection_several_levels_deep
|
||||
✅ Validates_collection_several_levels_deep_async
|
||||
```
|
||||
### ✅ <a id="user-content-r0s8" href="#r0s8">FluentValidation.Tests.ComplexValidationTester</a>
|
||||
### ✅ <a id="user-content-r0s8" href="#user-content-r0s8">FluentValidation.Tests.ComplexValidationTester</a>
|
||||
```
|
||||
✅ Async_condition_should_work_with_complex_property
|
||||
✅ Async_condition_should_work_with_complex_property_when_validator_invoked_synchronously
|
||||
@@ -223,7 +228,7 @@
|
||||
✅ Validates_child_validator_synchronously
|
||||
✅ Validates_complex_property
|
||||
```
|
||||
### ✅ <a id="user-content-r0s9" href="#r0s9">FluentValidation.Tests.ConditionTests</a>
|
||||
### ✅ <a id="user-content-r0s9" href="#user-content-r0s9">FluentValidation.Tests.ConditionTests</a>
|
||||
```
|
||||
✅ Async_condition_executed_synchronosuly_with_asynchronous_collection_rule
|
||||
✅ Async_condition_executed_synchronosuly_with_asynchronous_rule
|
||||
@@ -244,18 +249,18 @@
|
||||
✅ Validation_should_succeed_when_condition_does_not_match
|
||||
✅ Validation_should_succeed_when_condition_matches
|
||||
```
|
||||
### ✅ <a id="user-content-r0s10" href="#r0s10">FluentValidation.Tests.CreditCardValidatorTests</a>
|
||||
### ✅ <a id="user-content-r0s10" href="#user-content-r0s10">FluentValidation.Tests.CreditCardValidatorTests</a>
|
||||
```
|
||||
✅ IsValidTests
|
||||
✅ When_validation_fails_the_default_error_should_be_set
|
||||
```
|
||||
### ✅ <a id="user-content-r0s11" href="#r0s11">FluentValidation.Tests.CustomFailureActionTester</a>
|
||||
### ✅ <a id="user-content-r0s11" href="#user-content-r0s11">FluentValidation.Tests.CustomFailureActionTester</a>
|
||||
```
|
||||
✅ Does_not_invoke_action_if_validation_success
|
||||
✅ Invokes_custom_action_on_failure
|
||||
✅ Passes_object_being_validated_to_action
|
||||
```
|
||||
### ✅ <a id="user-content-r0s12" href="#r0s12">FluentValidation.Tests.CustomMessageFormatTester</a>
|
||||
### ✅ <a id="user-content-r0s12" href="#user-content-r0s12">FluentValidation.Tests.CustomMessageFormatTester</a>
|
||||
```
|
||||
✅ Replaces_propertyvalue_placeholder
|
||||
✅ Replaces_propertyvalue_with_empty_string_when_null
|
||||
@@ -264,7 +269,7 @@
|
||||
✅ Uses_custom_delegate_for_building_message_only_for_specific_validator
|
||||
✅ Uses_property_value_in_message
|
||||
```
|
||||
### ✅ <a id="user-content-r0s13" href="#r0s13">FluentValidation.Tests.CustomValidatorTester</a>
|
||||
### ✅ <a id="user-content-r0s13" href="#user-content-r0s13">FluentValidation.Tests.CustomValidatorTester</a>
|
||||
```
|
||||
✅ New_Custom_Returns_single_failure
|
||||
✅ New_Custom_Returns_single_failure_async
|
||||
@@ -277,7 +282,7 @@
|
||||
✅ Runs_async_rule_synchronously_when_validator_invoked_synchronously
|
||||
✅ Runs_sync_rule_asynchronously_when_validator_invoked_asynchronously
|
||||
```
|
||||
### ✅ <a id="user-content-r0s14" href="#r0s14">FluentValidation.Tests.DefaultValidatorExtensionTester</a>
|
||||
### ✅ <a id="user-content-r0s14" href="#user-content-r0s14">FluentValidation.Tests.DefaultValidatorExtensionTester</a>
|
||||
```
|
||||
✅ Empty_should_create_EmptyValidator
|
||||
✅ Equal_should_create_EqualValidator_with_explicit_value
|
||||
@@ -310,7 +315,7 @@
|
||||
✅ ScalePrecision_should_create_ScalePrecisionValidator
|
||||
✅ ScalePrecision_should_create_ScalePrecisionValidator_with_ignore_trailing_zeros
|
||||
```
|
||||
### ✅ <a id="user-content-r0s15" href="#r0s15">FluentValidation.Tests.EmailValidatorTests</a>
|
||||
### ✅ <a id="user-content-r0s15" href="#user-content-r0s15">FluentValidation.Tests.EmailValidatorTests</a>
|
||||
```
|
||||
✅ Fails_email_validation_aspnetcore_compatible(email: " \r \t \n")
|
||||
✅ Fails_email_validation_aspnetcore_compatible(email: "")
|
||||
@@ -349,7 +354,7 @@
|
||||
✅ Valid_email_addresses_regex(email: "testperson+label@gmail.com")
|
||||
✅ Valid_email_addresses_regex(email: null)
|
||||
```
|
||||
### ✅ <a id="user-content-r0s16" href="#r0s16">FluentValidation.Tests.EmptyTester</a>
|
||||
### ✅ <a id="user-content-r0s16" href="#user-content-r0s16">FluentValidation.Tests.EmptyTester</a>
|
||||
```
|
||||
✅ Passes_for_ienumerable_that_doesnt_implement_ICollection
|
||||
✅ Passes_when_collection_empty
|
||||
@@ -361,7 +366,7 @@
|
||||
✅ When_value_is_null_validator_should_pass
|
||||
✅ When_value_is_whitespace_validation_should_pass
|
||||
```
|
||||
### ✅ <a id="user-content-r0s17" href="#r0s17">FluentValidation.Tests.EnumValidatorTests</a>
|
||||
### ✅ <a id="user-content-r0s17" href="#user-content-r0s17">FluentValidation.Tests.EnumValidatorTests</a>
|
||||
```
|
||||
✅ Flags_enum_invalid_when_using_outofrange_negative_value
|
||||
✅ Flags_enum_invalid_when_using_outofrange_positive_value
|
||||
@@ -376,7 +381,7 @@
|
||||
✅ When_the_enum_is_not_initialized_with_valid_value_then_the_validator_should_fail
|
||||
✅ When_validation_fails_the_default_error_should_be_set
|
||||
```
|
||||
### ✅ <a id="user-content-r0s18" href="#r0s18">FluentValidation.Tests.EqualValidatorTests</a>
|
||||
### ✅ <a id="user-content-r0s18" href="#user-content-r0s18">FluentValidation.Tests.EqualValidatorTests</a>
|
||||
```
|
||||
✅ Comparison_property_uses_custom_resolver
|
||||
✅ Should_store_comparison_type
|
||||
@@ -389,7 +394,7 @@
|
||||
✅ When_the_objects_are_not_equal_validation_should_fail
|
||||
✅ When_validation_fails_the_error_should_be_set
|
||||
```
|
||||
### ✅ <a id="user-content-r0s19" href="#r0s19">FluentValidation.Tests.ExactLengthValidatorTester</a>
|
||||
### ✅ <a id="user-content-r0s19" href="#user-content-r0s19">FluentValidation.Tests.ExactLengthValidatorTester</a>
|
||||
```
|
||||
✅ Min_and_max_properties_should_be_set
|
||||
✅ When_exact_length_rule_failes_error_should_have_exact_length_error_errorcode
|
||||
@@ -398,7 +403,7 @@
|
||||
✅ When_the_text_length_is_smaller_the_validator_should_fail
|
||||
✅ When_the_validator_fails_the_error_message_should_be_set
|
||||
```
|
||||
### ✅ <a id="user-content-r0s20" href="#r0s20">FluentValidation.Tests.ExclusiveBetweenValidatorTests</a>
|
||||
### ✅ <a id="user-content-r0s20" href="#user-content-r0s20">FluentValidation.Tests.ExclusiveBetweenValidatorTests</a>
|
||||
```
|
||||
✅ To_and_from_properties_should_be_set
|
||||
✅ To_and_from_properties_should_be_set_for_dates
|
||||
@@ -420,14 +425,14 @@
|
||||
✅ When_the_value_is_smaller_than_the_range_then_the_validator_should_fail
|
||||
✅ When_the_value_is_smaller_than_the_range_then_the_validator_should_fail_for_strings
|
||||
```
|
||||
### ✅ <a id="user-content-r0s21" href="#r0s21">FluentValidation.Tests.ExtensionTester</a>
|
||||
### ✅ <a id="user-content-r0s21" href="#user-content-r0s21">FluentValidation.Tests.ExtensionTester</a>
|
||||
```
|
||||
✅ Should_extract_member_from_member_expression
|
||||
✅ Should_return_null_for_non_member_expressions
|
||||
✅ Should_split_pascal_cased_member_name
|
||||
✅ SplitPascalCase_should_return_null_when_input_is_null
|
||||
```
|
||||
### ✅ <a id="user-content-r0s22" href="#r0s22">FluentValidation.Tests.ForEachRuleTests</a>
|
||||
### ✅ <a id="user-content-r0s22" href="#user-content-r0s22">FluentValidation.Tests.ForEachRuleTests</a>
|
||||
```
|
||||
✅ Async_condition_should_work_with_child_collection
|
||||
✅ Can_access_colletion_index
|
||||
@@ -464,7 +469,7 @@
|
||||
✅ When_runs_outside_RuleForEach_loop
|
||||
✅ When_runs_outside_RuleForEach_loop_async
|
||||
```
|
||||
### ✅ <a id="user-content-r0s23" href="#r0s23">FluentValidation.Tests.GreaterThanOrEqualToValidatorTester</a>
|
||||
### ✅ <a id="user-content-r0s23" href="#user-content-r0s23">FluentValidation.Tests.GreaterThanOrEqualToValidatorTester</a>
|
||||
```
|
||||
✅ Comparison_property_uses_custom_resolver
|
||||
✅ Comparison_type
|
||||
@@ -481,7 +486,7 @@
|
||||
✅ Validates_with_nullable_when_property_not_null_cross_property
|
||||
✅ Validates_with_property
|
||||
```
|
||||
### ✅ <a id="user-content-r0s24" href="#r0s24">FluentValidation.Tests.GreaterThanValidatorTester</a>
|
||||
### ✅ <a id="user-content-r0s24" href="#user-content-r0s24">FluentValidation.Tests.GreaterThanValidatorTester</a>
|
||||
```
|
||||
✅ Comparison_property_uses_custom_resolver
|
||||
✅ Comparison_Type
|
||||
@@ -497,7 +502,7 @@
|
||||
✅ Validates_with_nullable_when_property_not_null_cross_property
|
||||
✅ Validates_with_property
|
||||
```
|
||||
### ✅ <a id="user-content-r0s25" href="#r0s25">FluentValidation.Tests.InclusiveBetweenValidatorTests</a>
|
||||
### ✅ <a id="user-content-r0s25" href="#user-content-r0s25">FluentValidation.Tests.InclusiveBetweenValidatorTests</a>
|
||||
```
|
||||
✅ To_and_from_properties_should_be_set
|
||||
✅ To_and_from_properties_should_be_set_for_strings
|
||||
@@ -518,7 +523,7 @@
|
||||
✅ When_the_value_is_smaller_than_the_range_then_the_validator_should_fail
|
||||
✅ When_the_value_is_smaller_than_the_range_then_the_validator_should_fail_for_strings
|
||||
```
|
||||
### ✅ <a id="user-content-r0s26" href="#r0s26">FluentValidation.Tests.InheritanceValidatorTest</a>
|
||||
### ✅ <a id="user-content-r0s26" href="#user-content-r0s26">FluentValidation.Tests.InheritanceValidatorTest</a>
|
||||
```
|
||||
✅ Can_use_custom_subclass_with_nongeneric_overload
|
||||
✅ Validates_collection
|
||||
@@ -532,11 +537,11 @@
|
||||
✅ Validates_with_callback_accepting_derived_async
|
||||
✅ Validates_with_callback_async
|
||||
```
|
||||
### ✅ <a id="user-content-r0s27" href="#r0s27">FluentValidation.Tests.InlineValidatorTester</a>
|
||||
### ✅ <a id="user-content-r0s27" href="#user-content-r0s27">FluentValidation.Tests.InlineValidatorTester</a>
|
||||
```
|
||||
✅ Uses_inline_validator_to_build_rules
|
||||
```
|
||||
### ✅ <a id="user-content-r0s28" href="#r0s28">FluentValidation.Tests.LanguageManagerTests</a>
|
||||
### ✅ <a id="user-content-r0s28" href="#user-content-r0s28">FluentValidation.Tests.LanguageManagerTests</a>
|
||||
```
|
||||
✅ All_languages_should_be_loaded
|
||||
✅ All_localizations_have_same_parameters_as_English
|
||||
@@ -560,7 +565,7 @@
|
||||
✅ Gets_translation_for_specific_culture
|
||||
✅ Uses_error_code_as_localization_key
|
||||
```
|
||||
### ✅ <a id="user-content-r0s29" href="#r0s29">FluentValidation.Tests.LengthValidatorTests</a>
|
||||
### ✅ <a id="user-content-r0s29" href="#user-content-r0s29">FluentValidation.Tests.LengthValidatorTests</a>
|
||||
```
|
||||
✅ Min_and_max_properties_should_be_set
|
||||
✅ When_input_is_null_then_the_validator_should_pass
|
||||
@@ -579,7 +584,7 @@
|
||||
✅ When_the_text_is_smaller_than_the_range_then_the_validator_should_fail
|
||||
✅ When_the_validator_fails_the_error_message_should_be_set
|
||||
```
|
||||
### ✅ <a id="user-content-r0s30" href="#r0s30">FluentValidation.Tests.LessThanOrEqualToValidatorTester</a>
|
||||
### ✅ <a id="user-content-r0s30" href="#user-content-r0s30">FluentValidation.Tests.LessThanOrEqualToValidatorTester</a>
|
||||
```
|
||||
✅ Comparison_property_uses_custom_resolver
|
||||
✅ Comparison_type
|
||||
@@ -595,7 +600,7 @@
|
||||
✅ Validates_with_nullable_when_property_not_null_cross_property
|
||||
✅ Validates_with_property
|
||||
```
|
||||
### ✅ <a id="user-content-r0s31" href="#r0s31">FluentValidation.Tests.LessThanValidatorTester</a>
|
||||
### ✅ <a id="user-content-r0s31" href="#user-content-r0s31">FluentValidation.Tests.LessThanValidatorTester</a>
|
||||
```
|
||||
✅ Comparison_property_uses_custom_resolver
|
||||
✅ Comparison_type
|
||||
@@ -614,7 +619,7 @@
|
||||
✅ Validates_with_nullable_when_property_not_null_cross_property
|
||||
✅ Validates_with_nullable_when_property_null_cross_property
|
||||
```
|
||||
### ✅ <a id="user-content-r0s32" href="#r0s32">FluentValidation.Tests.LocalisedMessagesTester</a>
|
||||
### ✅ <a id="user-content-r0s32" href="#user-content-r0s32">FluentValidation.Tests.LocalisedMessagesTester</a>
|
||||
```
|
||||
✅ Correctly_assigns_default_localized_error_message
|
||||
✅ Does_not_throw_InvalidCastException_when_using_RuleForEach
|
||||
@@ -623,12 +628,12 @@
|
||||
✅ Uses_func_to_get_message
|
||||
✅ Uses_string_format_with_property_value
|
||||
```
|
||||
### ✅ <a id="user-content-r0s33" href="#r0s33">FluentValidation.Tests.LocalisedNameTester</a>
|
||||
### ✅ <a id="user-content-r0s33" href="#user-content-r0s33">FluentValidation.Tests.LocalisedNameTester</a>
|
||||
```
|
||||
✅ Uses_localized_name
|
||||
✅ Uses_localized_name_expression
|
||||
```
|
||||
### ✅ <a id="user-content-r0s34" href="#r0s34">FluentValidation.Tests.MemberAccessorTests</a>
|
||||
### ✅ <a id="user-content-r0s34" href="#user-content-r0s34">FluentValidation.Tests.MemberAccessorTests</a>
|
||||
```
|
||||
✅ ComplexPropertyGet
|
||||
✅ ComplexPropertySet
|
||||
@@ -640,7 +645,7 @@
|
||||
✅ SimplePropertyGet
|
||||
✅ SimplePropertySet
|
||||
```
|
||||
### ✅ <a id="user-content-r0s35" href="#r0s35">FluentValidation.Tests.MessageFormatterTests</a>
|
||||
### ✅ <a id="user-content-r0s35" href="#user-content-r0s35">FluentValidation.Tests.MessageFormatterTests</a>
|
||||
```
|
||||
✅ Adds_argument_and_custom_arguments
|
||||
✅ Adds_formatted_argument_and_custom_arguments
|
||||
@@ -653,18 +658,18 @@
|
||||
✅ Understands_date_formats
|
||||
✅ Understands_numeric_formats
|
||||
```
|
||||
### ✅ <a id="user-content-r0s36" href="#r0s36">FluentValidation.Tests.ModelLevelValidatorTests</a>
|
||||
### ✅ <a id="user-content-r0s36" href="#user-content-r0s36">FluentValidation.Tests.ModelLevelValidatorTests</a>
|
||||
```
|
||||
✅ Can_use_child_validator_at_model_level
|
||||
✅ Validates_at_model_level
|
||||
```
|
||||
### ✅ <a id="user-content-r0s37" href="#r0s37">FluentValidation.Tests.NameResolutionPluggabilityTester</a>
|
||||
### ✅ <a id="user-content-r0s37" href="#user-content-r0s37">FluentValidation.Tests.NameResolutionPluggabilityTester</a>
|
||||
```
|
||||
✅ Resolves_nested_properties
|
||||
✅ ShouldHaveValidationError_Should_support_custom_propertynameresolver
|
||||
✅ Uses_custom_property_name
|
||||
```
|
||||
### ✅ <a id="user-content-r0s38" href="#r0s38">FluentValidation.Tests.NotEmptyTester</a>
|
||||
### ✅ <a id="user-content-r0s38" href="#user-content-r0s38">FluentValidation.Tests.NotEmptyTester</a>
|
||||
```
|
||||
✅ Fails_for_array
|
||||
✅ Fails_for_ienumerable_that_doesnt_implement_ICollection
|
||||
@@ -677,7 +682,7 @@
|
||||
✅ When_value_is_null_validator_should_fail
|
||||
✅ When_value_is_whitespace_validation_should_fail
|
||||
```
|
||||
### ✅ <a id="user-content-r0s39" href="#r0s39">FluentValidation.Tests.NotEqualValidatorTests</a>
|
||||
### ✅ <a id="user-content-r0s39" href="#user-content-r0s39">FluentValidation.Tests.NotEqualValidatorTests</a>
|
||||
```
|
||||
✅ Comparison_property_uses_custom_resolver
|
||||
✅ Should_handle_custom_value_types_correctly
|
||||
@@ -691,7 +696,7 @@
|
||||
✅ When_the_objects_are_not_equal_then_the_validator_should_pass
|
||||
✅ When_the_validator_fails_the_error_message_should_be_set
|
||||
```
|
||||
### ✅ <a id="user-content-r0s40" href="#r0s40">FluentValidation.Tests.NotNullTester</a>
|
||||
### ✅ <a id="user-content-r0s40" href="#user-content-r0s40">FluentValidation.Tests.NotNullTester</a>
|
||||
```
|
||||
✅ Fails_when_nullable_value_type_is_null
|
||||
✅ Not_null_validator_should_not_crash_with_non_nullable_value_type
|
||||
@@ -699,7 +704,7 @@
|
||||
✅ NotNullValidator_should_pass_if_value_has_value
|
||||
✅ When_the_validator_fails_the_error_message_should_be_set
|
||||
```
|
||||
### ✅ <a id="user-content-r0s41" href="#r0s41">FluentValidation.Tests.NullTester</a>
|
||||
### ✅ <a id="user-content-r0s41" href="#user-content-r0s41">FluentValidation.Tests.NullTester</a>
|
||||
```
|
||||
✅ Not_null_validator_should_not_crash_with_non_nullable_value_type
|
||||
✅ NullValidator_should_fail_if_value_has_value
|
||||
@@ -707,7 +712,7 @@
|
||||
✅ Passes_when_nullable_value_type_is_null
|
||||
✅ When_the_validator_passes_the_error_message_should_be_set
|
||||
```
|
||||
### ✅ <a id="user-content-r0s42" href="#r0s42">FluentValidation.Tests.OnFailureTests</a>
|
||||
### ✅ <a id="user-content-r0s42" href="#user-content-r0s42">FluentValidation.Tests.OnFailureTests</a>
|
||||
```
|
||||
✅ OnFailure_called_for_each_failed_rule
|
||||
✅ OnFailure_called_for_each_failed_rule_asyncAsync
|
||||
@@ -720,7 +725,7 @@
|
||||
✅ WhenWithOnFailure_should_invoke_condition_on_async_inner_validator
|
||||
✅ WhenWithOnFailure_should_invoke_condition_on_inner_validator
|
||||
```
|
||||
### ✅ <a id="user-content-r0s43" href="#r0s43">FluentValidation.Tests.PredicateValidatorTester</a>
|
||||
### ✅ <a id="user-content-r0s43" href="#user-content-r0s43">FluentValidation.Tests.PredicateValidatorTester</a>
|
||||
```
|
||||
✅ Should_fail_when_predicate_returns_false
|
||||
✅ Should_succeed_when_predicate_returns_true
|
||||
@@ -728,7 +733,7 @@
|
||||
✅ When_validation_fails_metadata_should_be_set_on_failure
|
||||
✅ When_validation_fails_the_default_error_should_be_set
|
||||
```
|
||||
### ✅ <a id="user-content-r0s44" href="#r0s44">FluentValidation.Tests.PropertyChainTests</a>
|
||||
### ✅ <a id="user-content-r0s44" href="#user-content-r0s44">FluentValidation.Tests.PropertyChainTests</a>
|
||||
```
|
||||
✅ AddIndexer_throws_when_nothing_added
|
||||
✅ Calling_ToString_should_construct_string_representation_of_chain
|
||||
@@ -738,7 +743,7 @@
|
||||
✅ Should_ignore_blanks
|
||||
✅ Should_not_be_subchain
|
||||
```
|
||||
### ✅ <a id="user-content-r0s45" href="#r0s45">FluentValidation.Tests.RegularExpressionValidatorTests</a>
|
||||
### ✅ <a id="user-content-r0s45" href="#user-content-r0s45">FluentValidation.Tests.RegularExpressionValidatorTests</a>
|
||||
```
|
||||
✅ Can_access_expression_in_message
|
||||
✅ Can_access_expression_in_message_lambda
|
||||
@@ -756,7 +761,7 @@
|
||||
✅ When_the_text_matches_the_regular_expression_then_the_validator_should_pass
|
||||
✅ When_validation_fails_the_default_error_should_be_set
|
||||
```
|
||||
### ✅ <a id="user-content-r0s46" href="#r0s46">FluentValidation.Tests.RuleBuilderTests</a>
|
||||
### ✅ <a id="user-content-r0s46" href="#user-content-r0s46">FluentValidation.Tests.RuleBuilderTests</a>
|
||||
```
|
||||
✅ Adding_a_validator_should_return_builder
|
||||
✅ Adding_a_validator_should_store_validator
|
||||
@@ -788,7 +793,7 @@
|
||||
✅ Should_throw_when_inverse_predicate_is_null
|
||||
✅ Should_throw_when_predicate_is_null
|
||||
```
|
||||
### ✅ <a id="user-content-r0s47" href="#r0s47">FluentValidation.Tests.RuleDependencyTests</a>
|
||||
### ✅ <a id="user-content-r0s47" href="#user-content-r0s47">FluentValidation.Tests.RuleDependencyTests</a>
|
||||
```
|
||||
✅ Async_inside_dependent_rules
|
||||
✅ Async_inside_dependent_rules_when_parent_rule_not_async
|
||||
@@ -805,7 +810,7 @@
|
||||
✅ TestAsyncWithDependentRules_SyncEntry
|
||||
✅ Treats_root_level_RuleFor_call_as_dependent_rule_if_user_forgets_to_use_DependentRulesBuilder
|
||||
```
|
||||
### ✅ <a id="user-content-r0s48" href="#r0s48">FluentValidation.Tests.RulesetTests</a>
|
||||
### ✅ <a id="user-content-r0s48" href="#user-content-r0s48">FluentValidation.Tests.RulesetTests</a>
|
||||
```
|
||||
✅ Applies_multiple_rulesets_to_rule
|
||||
✅ Combines_rulesets_and_explicit_properties
|
||||
@@ -829,7 +834,7 @@
|
||||
✅ Trims_spaces
|
||||
✅ WithMessage_works_inside_rulesets
|
||||
```
|
||||
### ✅ <a id="user-content-r0s49" href="#r0s49">FluentValidation.Tests.ScalePrecisionValidatorTests</a>
|
||||
### ✅ <a id="user-content-r0s49" href="#user-content-r0s49">FluentValidation.Tests.ScalePrecisionValidatorTests</a>
|
||||
```
|
||||
✅ Scale_precision_should_be_valid
|
||||
✅ Scale_precision_should_be_valid_when_ignoring_trailing_zeroes
|
||||
@@ -838,7 +843,7 @@
|
||||
✅ Scale_precision_should_not_be_valid_when_ignoring_trailing_zeroes
|
||||
✅ Scale_precision_should_not_be_valid_when_they_are_equal
|
||||
```
|
||||
### ✅ <a id="user-content-r0s50" href="#r0s50">FluentValidation.Tests.SharedConditionTests</a>
|
||||
### ✅ <a id="user-content-r0s50" href="#user-content-r0s50">FluentValidation.Tests.SharedConditionTests</a>
|
||||
```
|
||||
✅ Async_condition_can_be_used_inside_ruleset
|
||||
✅ Condition_can_be_used_inside_ruleset
|
||||
@@ -883,11 +888,11 @@
|
||||
✅ When_condition_only_executed_once
|
||||
✅ WhenAsync_condition_only_executed_once
|
||||
```
|
||||
### ✅ <a id="user-content-r0s51" href="#r0s51">FluentValidation.Tests.StandalonePropertyValidationTester</a>
|
||||
### ✅ <a id="user-content-r0s51" href="#user-content-r0s51">FluentValidation.Tests.StandalonePropertyValidationTester</a>
|
||||
```
|
||||
✅ Should_validate_property_value_without_instance
|
||||
```
|
||||
### ✅ <a id="user-content-r0s52" href="#r0s52">FluentValidation.Tests.StringEnumValidatorTests</a>
|
||||
### ✅ <a id="user-content-r0s52" href="#user-content-r0s52">FluentValidation.Tests.StringEnumValidatorTests</a>
|
||||
```
|
||||
✅ IsValidTests_CaseInsensitive_CaseCorrect
|
||||
✅ IsValidTests_CaseInsensitive_CaseIncorrect
|
||||
@@ -900,20 +905,20 @@
|
||||
✅ When_the_property_is_initialized_with_null_then_the_validator_should_be_valid
|
||||
✅ When_validation_fails_the_default_error_should_be_set
|
||||
```
|
||||
### ✅ <a id="user-content-r0s53" href="#r0s53">FluentValidation.Tests.TrackingCollectionTests</a>
|
||||
### ✅ <a id="user-content-r0s53" href="#user-content-r0s53">FluentValidation.Tests.TrackingCollectionTests</a>
|
||||
```
|
||||
✅ Add_AddsItem
|
||||
✅ Should_not_raise_event_once_handler_detached
|
||||
✅ When_Item_Added_Raises_ItemAdded
|
||||
```
|
||||
### ✅ <a id="user-content-r0s54" href="#r0s54">FluentValidation.Tests.TransformTests</a>
|
||||
### ✅ <a id="user-content-r0s54" href="#user-content-r0s54">FluentValidation.Tests.TransformTests</a>
|
||||
```
|
||||
✅ Transforms_collection_element
|
||||
✅ Transforms_collection_element_async
|
||||
✅ Transforms_property_value
|
||||
✅ Transforms_property_value_to_another_type
|
||||
```
|
||||
### ✅ <a id="user-content-r0s55" href="#r0s55">FluentValidation.Tests.UserSeverityTester</a>
|
||||
### ✅ <a id="user-content-r0s55" href="#user-content-r0s55">FluentValidation.Tests.UserSeverityTester</a>
|
||||
```
|
||||
✅ Can_Provide_conditional_severity
|
||||
✅ Can_Provide_severity_for_item_in_collection
|
||||
@@ -923,14 +928,14 @@
|
||||
✅ Stores_user_severity_against_validation_failure
|
||||
✅ Throws_when_provider_is_null
|
||||
```
|
||||
### ✅ <a id="user-content-r0s56" href="#r0s56">FluentValidation.Tests.UserStateTester</a>
|
||||
### ✅ <a id="user-content-r0s56" href="#user-content-r0s56">FluentValidation.Tests.UserStateTester</a>
|
||||
```
|
||||
✅ Can_Provide_state_for_item_in_collection
|
||||
✅ Correctly_provides_object_being_validated
|
||||
✅ Stores_user_state_against_validation_failure
|
||||
✅ Throws_when_provider_is_null
|
||||
```
|
||||
### ✅ <a id="user-content-r0s57" href="#r0s57">FluentValidation.Tests.ValidateAndThrowTester</a>
|
||||
### ✅ <a id="user-content-r0s57" href="#user-content-r0s57">FluentValidation.Tests.ValidateAndThrowTester</a>
|
||||
```
|
||||
✅ Does_not_throw_when_valid
|
||||
✅ Does_not_throw_when_valid_and_a_ruleset
|
||||
@@ -947,7 +952,7 @@
|
||||
✅ ValidationException_provides_correct_message_when_appendDefaultMessage_false
|
||||
✅ ValidationException_provides_correct_message_when_appendDefaultMessage_true
|
||||
```
|
||||
### ✅ <a id="user-content-r0s58" href="#r0s58">FluentValidation.Tests.ValidationResultTests</a>
|
||||
### ✅ <a id="user-content-r0s58" href="#user-content-r0s58">FluentValidation.Tests.ValidationResultTests</a>
|
||||
```
|
||||
✅ Can_serialize_failure
|
||||
✅ Can_serialize_result
|
||||
@@ -958,7 +963,7 @@
|
||||
✅ ToString_return_error_messages_with_given_separator
|
||||
✅ ToString_return_error_messages_with_newline_as_separator
|
||||
```
|
||||
### ✅ <a id="user-content-r0s59" href="#r0s59">FluentValidation.Tests.ValidatorDescriptorTester</a>
|
||||
### ✅ <a id="user-content-r0s59" href="#user-content-r0s59">FluentValidation.Tests.ValidatorDescriptorTester</a>
|
||||
```
|
||||
✅ Does_not_throw_when_rule_declared_without_property
|
||||
✅ Gets_validators_for_property
|
||||
@@ -966,7 +971,7 @@
|
||||
✅ Returns_empty_collection_for_property_with_no_validators
|
||||
✅ Should_retrieve_name_given_to_it_pass_property_as_string
|
||||
```
|
||||
### ✅ <a id="user-content-r0s60" href="#r0s60">FluentValidation.Tests.ValidatorSelectorTests</a>
|
||||
### ✅ <a id="user-content-r0s60" href="#user-content-r0s60">FluentValidation.Tests.ValidatorSelectorTests</a>
|
||||
```
|
||||
✅ Can_use_property_with_include
|
||||
✅ Does_not_validate_other_property
|
||||
@@ -979,7 +984,7 @@
|
||||
✅ Validates_nullable_property_with_overriden_name_when_selected
|
||||
✅ Validates_property_using_expression
|
||||
```
|
||||
### ✅ <a id="user-content-r0s61" href="#r0s61">FluentValidation.Tests.ValidatorTesterTester</a>
|
||||
### ✅ <a id="user-content-r0s61" href="#user-content-r0s61">FluentValidation.Tests.ValidatorTesterTester</a>
|
||||
```
|
||||
✅ Allows_only_one_failure_to_match
|
||||
✅ Can_use_indexer_in_string_message
|
||||
@@ -1054,4 +1059,5 @@
|
||||
✅ Unexpected_message_check(withoutErrMsg: "bar", errMessages: [])
|
||||
✅ Unexpected_severity_check
|
||||
✅ Unexpected_state_check
|
||||
```
|
||||
```
|
||||
</details>
|
||||
38
__tests__/__outputs__/golang-json.md
Normal file
38
__tests__/__outputs__/golang-json.md
Normal file
@@ -0,0 +1,38 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/golang-json.json](#user-content-r0)|5 ✅|6 ❌|1 ⚪|6s|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/golang-json.json</a>
|
||||
**12** tests were completed in **6s** with **5** passed, **6** failed and **1** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[_/home/james_t/git/test-reporter/reports/go](#user-content-r0s0)|5 ✅|6 ❌|1 ⚪|6s|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">_/home/james_t/git/test-reporter/reports/go</a>
|
||||
```
|
||||
✅ TestPassing
|
||||
❌ TestFailing
|
||||
calculator_test.go:19: expected 1+1 = 3, got 2
|
||||
|
||||
❌ TestPanicInsideFunction
|
||||
calculator_test.go:76: caught panic: runtime error: integer divide by zero
|
||||
|
||||
❌ TestPanicInsideTest
|
||||
calculator_test.go:76: caught panic: bad stuff
|
||||
|
||||
⚪ TestSkipped
|
||||
calculator_test.go:45: skipping test
|
||||
|
||||
❌ TestCases
|
||||
|
||||
TestCases
|
||||
✅ 1_+_2_=_3
|
||||
✅ 4_+_7_=_11
|
||||
❌ 2_+_3_=_4
|
||||
calculator_test.go:67: expected 2 + 3 = 4, got 5
|
||||
|
||||
❌ 1_/_2_=_1
|
||||
calculator_test.go:67: expected 1 / 2 = 1, got 0
|
||||
|
||||
✅ 9_/_3_=_3
|
||||
✅ 14_/_7_=_2
|
||||
```
|
||||
17
__tests__/__outputs__/jest-junit-eslint.md
Normal file
17
__tests__/__outputs__/jest-junit-eslint.md
Normal file
@@ -0,0 +1,17 @@
|
||||

|
||||
<details><summary>Expand for details</summary>
|
||||
|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/jest-junit-eslint.xml](#user-content-r0)|1 ✅|||0ms|
|
||||
## ✅ <a id="user-content-r0" href="#user-content-r0">fixtures/jest-junit-eslint.xml</a>
|
||||
**1** tests were completed in **0ms** with **1** passed, **0** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[test.jsx](#user-content-r0s0)|1 ✅|||0ms|
|
||||
### ✅ <a id="user-content-r0s0" href="#user-content-r0s0">test.jsx</a>
|
||||
```
|
||||
test
|
||||
✅ test.jsx
|
||||
```
|
||||
</details>
|
||||
@@ -1,11 +1,14 @@
|
||||

|
||||
## ❌ <a id="user-content-r0" href="#r0">fixtures/jest-junit.xml</a>
|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/jest-junit.xml](#user-content-r0)|1 ✅|4 ❌|1 ⚪|1s|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/jest-junit.xml</a>
|
||||
**6** tests were completed in **1s** with **1** passed, **4** failed and **1** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[__tests__\main.test.js](#r0s0)|1✅|3❌||486ms|
|
||||
|[__tests__\second.test.js](#r0s1)||1❌|1⚪|82ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#r0s0">__tests__\main.test.js</a>
|
||||
|[__tests__\main.test.js](#user-content-r0s0)|1 ✅|3 ❌||486ms|
|
||||
|[__tests__\second.test.js](#user-content-r0s1)||1 ❌|1 ⚪|82ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">__tests__\main.test.js</a>
|
||||
```
|
||||
Test 1
|
||||
✅ Passing test
|
||||
@@ -18,7 +21,7 @@ Test 2
|
||||
❌ Exception in test
|
||||
Error: Some error
|
||||
```
|
||||
### ❌ <a id="user-content-r0s1" href="#r0s1">__tests__\second.test.js</a>
|
||||
### ❌ <a id="user-content-r0s1" href="#user-content-r0s1">__tests__\second.test.js</a>
|
||||
```
|
||||
❌ Timeout test
|
||||
: Timeout - Async callback was not invoked within the 1 ms timeout specified by jest.setTimeout.Timeout - Async callback was not invoked within the 1 ms timeout specified by jest.setTimeout.Error:
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||

|
||||
## ✅ <a id="user-content-r0" href="#r0">fixtures/external/jest/jest-react-component-test-results.xml</a>
|
||||
<details><summary>Expand for details</summary>
|
||||
|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/external/jest/jest-react-component-test-results.xml](#user-content-r0)|1 ✅|||1000ms|
|
||||
## ✅ <a id="user-content-r0" href="#user-content-r0">fixtures/external/jest/jest-react-component-test-results.xml</a>
|
||||
**1** tests were completed in **1000ms** with **1** passed, **0** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[\<Component /\>](#r0s0)|1✅|||798ms|
|
||||
### ✅ <a id="user-content-r0s0" href="#r0s0">\<Component /\></a>
|
||||
|[\<Component /\>](#user-content-r0s0)|1 ✅|||798ms|
|
||||
### ✅ <a id="user-content-r0s0" href="#user-content-r0s0">\<Component /\></a>
|
||||
```
|
||||
✅ <Component /> should render properly
|
||||
```
|
||||
```
|
||||
</details>
|
||||
File diff suppressed because it is too large
Load Diff
23
__tests__/__outputs__/junit-basic.md
Normal file
23
__tests__/__outputs__/junit-basic.md
Normal file
@@ -0,0 +1,23 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/external/java/junit4-basic.xml](#user-content-r0)|5 ✅|1 ❌||16s|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/junit4-basic.xml</a>
|
||||
**6** tests were completed in **16s** with **5** passed, **1** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[Tests.Authentication](#user-content-r0s0)|2 ✅|1 ❌||9s|
|
||||
|[Tests.Registration](#user-content-r0s1)|3 ✅|||7s|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Authentication</a>
|
||||
```
|
||||
✅ testCase7
|
||||
✅ testCase8
|
||||
❌ testCase9
|
||||
AssertionError: Assertion error message
|
||||
```
|
||||
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">Tests.Registration</a>
|
||||
```
|
||||
✅ testCase1
|
||||
✅ testCase2
|
||||
✅ testCase3
|
||||
```
|
||||
22
__tests__/__outputs__/junit-complete.md
Normal file
22
__tests__/__outputs__/junit-complete.md
Normal file
@@ -0,0 +1,22 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/external/java/junit4-complete.xml](#user-content-r0)|5 ✅|2 ❌|1 ⚪|16s|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/junit4-complete.xml</a>
|
||||
**8** tests were completed in **16s** with **5** passed, **2** failed and **1** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[Tests.Registration](#user-content-r0s0)|5 ✅|2 ❌|1 ⚪|16s|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Registration</a>
|
||||
```
|
||||
✅ testCase1
|
||||
✅ testCase2
|
||||
✅ testCase3
|
||||
⚪ testCase4
|
||||
❌ testCase5
|
||||
AssertionError: Expected value did not match.
|
||||
❌ testCase6
|
||||
ArithmeticError: Division by zero.
|
||||
✅ testCase7
|
||||
✅ testCase8
|
||||
```
|
||||
15
__tests__/__outputs__/junit-with-message.md
Normal file
15
__tests__/__outputs__/junit-with-message.md
Normal file
@@ -0,0 +1,15 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/junit-with-message.xml](#user-content-r0)||1 ❌||1ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/junit-with-message.xml</a>
|
||||
**1** tests were completed in **1ms** with **0** passed, **1** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[Test](#user-content-r0s0)||1 ❌||1ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Test</a>
|
||||
```
|
||||
Fails
|
||||
❌ Test
|
||||
error.cpp:01
|
||||
```
|
||||
@@ -1,11 +1,14 @@
|
||||

|
||||
## ❌ <a id="user-content-r0" href="#r0">fixtures/mocha-json.json</a>
|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/mocha-json.json](#user-content-r0)|1 ✅|4 ❌|1 ⚪|12ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/mocha-json.json</a>
|
||||
**6** tests were completed in **12ms** with **1** passed, **4** failed and **1** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[test/main.test.js](#r0s0)|1✅|3❌||1ms|
|
||||
|[test/second.test.js](#r0s1)||1❌|1⚪|8ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#r0s0">test/main.test.js</a>
|
||||
|[test/main.test.js](#user-content-r0s0)|1 ✅|3 ❌||1ms|
|
||||
|[test/second.test.js](#user-content-r0s1)||1 ❌|1 ⚪|8ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">test/main.test.js</a>
|
||||
```
|
||||
Test 1
|
||||
✅ Passing test
|
||||
@@ -21,7 +24,7 @@ Test 2
|
||||
❌ Exception in test
|
||||
Some error
|
||||
```
|
||||
### ❌ <a id="user-content-r0s1" href="#r0s1">test/second.test.js</a>
|
||||
### ❌ <a id="user-content-r0s1" href="#user-content-r0s1">test/second.test.js</a>
|
||||
```
|
||||
⚪ Skipped test
|
||||
❌ Timeout test
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
30
__tests__/__outputs__/phpunit-junit-basic-results.md
Normal file
30
__tests__/__outputs__/phpunit-junit-basic-results.md
Normal file
@@ -0,0 +1,30 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/external/phpunit/junit-basic.xml](#user-content-r0)|8 ✅|1 ❌||16s|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/phpunit/junit-basic.xml</a>
|
||||
**9** tests were completed in **16s** with **8** passed, **1** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[Tests.Authentication](#user-content-r0s0)|2 ✅|1 ❌||9s|
|
||||
|[Tests.Authentication.Login](#user-content-r0s1)|3 ✅|||4s|
|
||||
|[Tests.Registration](#user-content-r0s2)|3 ✅|||7s|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">Tests.Authentication</a>
|
||||
```
|
||||
✅ testCase7
|
||||
✅ testCase8
|
||||
❌ testCase9
|
||||
AssertionError: Assertion error message
|
||||
```
|
||||
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">Tests.Authentication.Login</a>
|
||||
```
|
||||
✅ testCase4
|
||||
✅ testCase5
|
||||
✅ testCase6
|
||||
```
|
||||
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">Tests.Registration</a>
|
||||
```
|
||||
✅ testCase1
|
||||
✅ testCase2
|
||||
✅ testCase3
|
||||
```
|
||||
88
__tests__/__outputs__/phpunit-phpcheckstyle-results.md
Normal file
88
__tests__/__outputs__/phpunit-phpcheckstyle-results.md
Normal file
@@ -0,0 +1,88 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/external/phpunit/phpcheckstyle-phpunit.xml](#user-content-r0)|28 ✅|2 ❌||41ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/phpunit/phpcheckstyle-phpunit.xml</a>
|
||||
**30** tests were completed in **41ms** with **28** passed, **2** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[CommentsTest](#user-content-r0s0)|3 ✅|||7ms|
|
||||
|[DeprecationTest](#user-content-r0s1)|1 ✅|||1ms|
|
||||
|[GoodTest](#user-content-r0s2)|4 ✅|||5ms|
|
||||
|[IndentationTest](#user-content-r0s3)|8 ✅|||8ms|
|
||||
|[MetricsTest](#user-content-r0s4)|1 ✅|||4ms|
|
||||
|[NamingTest](#user-content-r0s5)|2 ✅|||3ms|
|
||||
|[OptimizationTest](#user-content-r0s6)|1 ✅|||1ms|
|
||||
|[OtherTest](#user-content-r0s7)|2 ✅|2 ❌||7ms|
|
||||
|[PHPTagsTest](#user-content-r0s8)|2 ✅|||1ms|
|
||||
|[ProhibitedTest](#user-content-r0s9)|1 ✅|||1ms|
|
||||
|[StrictCompareTest](#user-content-r0s10)|1 ✅|||2ms|
|
||||
|[UnusedTest](#user-content-r0s11)|2 ✅|||2ms|
|
||||
### ✅ <a id="user-content-r0s0" href="#user-content-r0s0">CommentsTest</a>
|
||||
```
|
||||
✅ testGoodDoc
|
||||
✅ testComments
|
||||
✅ testTODOs
|
||||
```
|
||||
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">DeprecationTest</a>
|
||||
```
|
||||
✅ testDeprecations
|
||||
```
|
||||
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">GoodTest</a>
|
||||
```
|
||||
✅ testGood
|
||||
✅ testDoWhile
|
||||
✅ testAnonymousFunction
|
||||
✅ testException
|
||||
```
|
||||
### ✅ <a id="user-content-r0s3" href="#user-content-r0s3">IndentationTest</a>
|
||||
```
|
||||
✅ testTabIndentation
|
||||
✅ testSpaceIndentation
|
||||
✅ testSpaceIndentationArray
|
||||
✅ testGoodSpaceIndentationArray
|
||||
✅ testGoodIndentationNewLine
|
||||
✅ testGoodIndentationSpaces
|
||||
✅ testBadSpaces
|
||||
✅ testBadSpaceAfterControl
|
||||
```
|
||||
### ✅ <a id="user-content-r0s4" href="#user-content-r0s4">MetricsTest</a>
|
||||
```
|
||||
✅ testMetrics
|
||||
```
|
||||
### ✅ <a id="user-content-r0s5" href="#user-content-r0s5">NamingTest</a>
|
||||
```
|
||||
✅ testNaming
|
||||
✅ testFunctionNaming
|
||||
```
|
||||
### ✅ <a id="user-content-r0s6" href="#user-content-r0s6">OptimizationTest</a>
|
||||
```
|
||||
✅ testTextAfterClosingTag
|
||||
```
|
||||
### ❌ <a id="user-content-r0s7" href="#user-content-r0s7">OtherTest</a>
|
||||
```
|
||||
❌ testOther
|
||||
PHPUnit\Framework\ExpectationFailedException
|
||||
❌ testException
|
||||
PHPUnit\Framework\ExpectationFailedException
|
||||
✅ testEmpty
|
||||
✅ testSwitchCaseNeedBreak
|
||||
```
|
||||
### ✅ <a id="user-content-r0s8" href="#user-content-r0s8">PHPTagsTest</a>
|
||||
```
|
||||
✅ testTextAfterClosingTag
|
||||
✅ testClosingTagNotNeeded
|
||||
```
|
||||
### ✅ <a id="user-content-r0s9" href="#user-content-r0s9">ProhibitedTest</a>
|
||||
```
|
||||
✅ testProhibited
|
||||
```
|
||||
### ✅ <a id="user-content-r0s10" href="#user-content-r0s10">StrictCompareTest</a>
|
||||
```
|
||||
✅ testStrictCompare
|
||||
```
|
||||
### ✅ <a id="user-content-r0s11" href="#user-content-r0s11">UnusedTest</a>
|
||||
```
|
||||
✅ testGoodUnused
|
||||
✅ testBadUnused
|
||||
```
|
||||
41
__tests__/__outputs__/phpunit-test-results.md
Normal file
41
__tests__/__outputs__/phpunit-test-results.md
Normal file
@@ -0,0 +1,41 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/phpunit/phpunit.xml](#user-content-r0)|10 ✅|2 ❌||148ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/phpunit/phpunit.xml</a>
|
||||
**12** tests were completed in **148ms** with **10** passed, **2** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[CLI Arguments](#user-content-r0s0)||2 ❌||140ms|
|
||||
|[PHPUnit\Event\CollectingDispatcherTest](#user-content-r0s1)|2 ✅|||4ms|
|
||||
|[PHPUnit\Event\DeferringDispatcherTest](#user-content-r0s2)|4 ✅|||3ms|
|
||||
|[PHPUnit\Event\DirectDispatcherTest](#user-content-r0s3)|4 ✅|||1ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">CLI Arguments</a>
|
||||
```
|
||||
❌ targeting-traits-with-coversclass-attribute-is-deprecated.phpt
|
||||
PHPUnit\Framework\PhptAssertionFailedError
|
||||
❌ targeting-traits-with-usesclass-attribute-is-deprecated.phpt
|
||||
PHPUnit\Framework\PhptAssertionFailedError
|
||||
```
|
||||
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">PHPUnit\Event\CollectingDispatcherTest</a>
|
||||
```
|
||||
PHPUnit.Event.CollectingDispatcherTest
|
||||
✅ testHasNoCollectedEventsWhenFlushedImmediatelyAfterCreation
|
||||
✅ testCollectsDispatchedEventsUntilFlushed
|
||||
```
|
||||
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">PHPUnit\Event\DeferringDispatcherTest</a>
|
||||
```
|
||||
PHPUnit.Event.DeferringDispatcherTest
|
||||
✅ testCollectsEventsUntilFlush
|
||||
✅ testFlushesCollectedEvents
|
||||
✅ testSubscriberCanBeRegistered
|
||||
✅ testTracerCanBeRegistered
|
||||
```
|
||||
### ✅ <a id="user-content-r0s3" href="#user-content-r0s3">PHPUnit\Event\DirectDispatcherTest</a>
|
||||
```
|
||||
PHPUnit.Event.DirectDispatcherTest
|
||||
✅ testDispatchesEventToKnownSubscribers
|
||||
✅ testDispatchesEventToTracers
|
||||
✅ testRegisterRejectsUnknownSubscriber
|
||||
✅ testDispatchRejectsUnknownEventType
|
||||
```
|
||||
@@ -1,25 +1,28 @@
|
||||

|
||||
## ❌ <a id="user-content-r0" href="#r0">fixtures/external/flutter/provider-test-results.json</a>
|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/external/flutter/provider-test-results.json](#user-content-r0)|268 ✅|1 ❌||0ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/flutter/provider-test-results.json</a>
|
||||
**269** tests were completed in **0ms** with **268** passed, **1** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[test/builder_test.dart](#r0s0)|24✅|||402ms|
|
||||
|[test/change_notifier_provider_test.dart](#r0s1)|10✅|||306ms|
|
||||
|[test/consumer_test.dart](#r0s2)|18✅|||340ms|
|
||||
|[test/context_test.dart](#r0s3)|31✅|||698ms|
|
||||
|[test/future_provider_test.dart](#r0s4)|10✅|||305ms|
|
||||
|[test/inherited_provider_test.dart](#r0s5)|81✅|||1s|
|
||||
|[test/listenable_provider_test.dart](#r0s6)|16✅|||353ms|
|
||||
|[test/listenable_proxy_provider_test.dart](#r0s7)|12✅|||373ms|
|
||||
|[test/multi_provider_test.dart](#r0s8)|3✅|||198ms|
|
||||
|[test/provider_test.dart](#r0s9)|11✅|||306ms|
|
||||
|[test/proxy_provider_test.dart](#r0s10)|16✅|||438ms|
|
||||
|[test/reassemble_test.dart](#r0s11)|3✅|||221ms|
|
||||
|[test/selector_test.dart](#r0s12)|17✅|||364ms|
|
||||
|[test/stateful_provider_test.dart](#r0s13)|4✅|||254ms|
|
||||
|[test/stream_provider_test.dart](#r0s14)|8✅|||282ms|
|
||||
|[test/value_listenable_provider_test.dart](#r0s15)|4✅|1❌||327ms|
|
||||
### ✅ <a id="user-content-r0s0" href="#r0s0">test/builder_test.dart</a>
|
||||
|[test/builder_test.dart](#user-content-r0s0)|24 ✅|||402ms|
|
||||
|[test/change_notifier_provider_test.dart](#user-content-r0s1)|10 ✅|||306ms|
|
||||
|[test/consumer_test.dart](#user-content-r0s2)|18 ✅|||340ms|
|
||||
|[test/context_test.dart](#user-content-r0s3)|31 ✅|||698ms|
|
||||
|[test/future_provider_test.dart](#user-content-r0s4)|10 ✅|||305ms|
|
||||
|[test/inherited_provider_test.dart](#user-content-r0s5)|81 ✅|||1s|
|
||||
|[test/listenable_provider_test.dart](#user-content-r0s6)|16 ✅|||353ms|
|
||||
|[test/listenable_proxy_provider_test.dart](#user-content-r0s7)|12 ✅|||373ms|
|
||||
|[test/multi_provider_test.dart](#user-content-r0s8)|3 ✅|||198ms|
|
||||
|[test/provider_test.dart](#user-content-r0s9)|11 ✅|||306ms|
|
||||
|[test/proxy_provider_test.dart](#user-content-r0s10)|16 ✅|||438ms|
|
||||
|[test/reassemble_test.dart](#user-content-r0s11)|3 ✅|||221ms|
|
||||
|[test/selector_test.dart](#user-content-r0s12)|17 ✅|||364ms|
|
||||
|[test/stateful_provider_test.dart](#user-content-r0s13)|4 ✅|||254ms|
|
||||
|[test/stream_provider_test.dart](#user-content-r0s14)|8 ✅|||282ms|
|
||||
|[test/value_listenable_provider_test.dart](#user-content-r0s15)|4 ✅|1 ❌||327ms|
|
||||
### ✅ <a id="user-content-r0s0" href="#user-content-r0s0">test/builder_test.dart</a>
|
||||
```
|
||||
ChangeNotifierProvider
|
||||
✅ default
|
||||
@@ -51,7 +54,7 @@ MultiProvider
|
||||
✅ with ProxyProvider5
|
||||
✅ with ProxyProvider6
|
||||
```
|
||||
### ✅ <a id="user-content-r0s1" href="#r0s1">test/change_notifier_provider_test.dart</a>
|
||||
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">test/change_notifier_provider_test.dart</a>
|
||||
```
|
||||
✅ Use builder property, not child
|
||||
ChangeNotifierProvider
|
||||
@@ -65,7 +68,7 @@ ChangeNotifierProvider
|
||||
✅ builder6
|
||||
✅ builder0
|
||||
```
|
||||
### ✅ <a id="user-content-r0s2" href="#r0s2">test/consumer_test.dart</a>
|
||||
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">test/consumer_test.dart</a>
|
||||
```
|
||||
consumer
|
||||
✅ obtains value from Provider<T>
|
||||
@@ -92,7 +95,7 @@ consumer6
|
||||
✅ crashed with no builder
|
||||
✅ can be used inside MultiProvider
|
||||
```
|
||||
### ✅ <a id="user-content-r0s3" href="#r0s3">test/context_test.dart</a>
|
||||
### ✅ <a id="user-content-r0s3" href="#user-content-r0s3">test/context_test.dart</a>
|
||||
```
|
||||
✅ watch in layoutbuilder
|
||||
✅ select in layoutbuilder
|
||||
@@ -127,7 +130,7 @@ BuildContext
|
||||
✅ context.select deeply compares sets
|
||||
✅ context.watch listens to value changes
|
||||
```
|
||||
### ✅ <a id="user-content-r0s4" href="#r0s4">test/future_provider_test.dart</a>
|
||||
### ✅ <a id="user-content-r0s4" href="#user-content-r0s4">test/future_provider_test.dart</a>
|
||||
```
|
||||
✅ works with MultiProvider
|
||||
✅ (catchError) previous future completes after transition is no-op
|
||||
@@ -141,7 +144,7 @@ BuildContext
|
||||
FutureProvider()
|
||||
✅ crashes if builder is null
|
||||
```
|
||||
### ✅ <a id="user-content-r0s5" href="#r0s5">test/inherited_provider_test.dart</a>
|
||||
### ✅ <a id="user-content-r0s5" href="#user-content-r0s5">test/inherited_provider_test.dart</a>
|
||||
```
|
||||
✅ regression test #377
|
||||
✅ rebuild on dependency flags update
|
||||
@@ -230,7 +233,7 @@ DeferredInheritedProvider()
|
||||
✅ dispose
|
||||
✅ dispose no-op if never built
|
||||
```
|
||||
### ✅ <a id="user-content-r0s6" href="#r0s6">test/listenable_provider_test.dart</a>
|
||||
### ✅ <a id="user-content-r0s6" href="#user-content-r0s6">test/listenable_provider_test.dart</a>
|
||||
```
|
||||
ListenableProvider
|
||||
✅ works with MultiProvider
|
||||
@@ -252,7 +255,7 @@ ListenableProvider stateful constructor
|
||||
✅ pass down key
|
||||
✅ throws if create is null
|
||||
```
|
||||
### ✅ <a id="user-content-r0s7" href="#r0s7">test/listenable_proxy_provider_test.dart</a>
|
||||
### ✅ <a id="user-content-r0s7" href="#user-content-r0s7">test/listenable_proxy_provider_test.dart</a>
|
||||
```
|
||||
ListenableProxyProvider
|
||||
✅ throws if update is missing
|
||||
@@ -269,14 +272,14 @@ ListenableProxyProvider variants
|
||||
✅ ListenableProxyProvider5
|
||||
✅ ListenableProxyProvider6
|
||||
```
|
||||
### ✅ <a id="user-content-r0s8" href="#r0s8">test/multi_provider_test.dart</a>
|
||||
### ✅ <a id="user-content-r0s8" href="#user-content-r0s8">test/multi_provider_test.dart</a>
|
||||
```
|
||||
MultiProvider
|
||||
✅ throw if providers is null
|
||||
✅ MultiProvider children can only access parent providers
|
||||
✅ MultiProvider.providers with ignored child
|
||||
```
|
||||
### ✅ <a id="user-content-r0s9" href="#r0s9">test/provider_test.dart</a>
|
||||
### ✅ <a id="user-content-r0s9" href="#user-content-r0s9">test/provider_test.dart</a>
|
||||
```
|
||||
✅ works with MultiProvider
|
||||
Provider.of
|
||||
@@ -292,7 +295,7 @@ Provider
|
||||
✅ throws an error if no provider found
|
||||
✅ update should notify
|
||||
```
|
||||
### ✅ <a id="user-content-r0s10" href="#r0s10">test/proxy_provider_test.dart</a>
|
||||
### ✅ <a id="user-content-r0s10" href="#user-content-r0s10">test/proxy_provider_test.dart</a>
|
||||
```
|
||||
ProxyProvider
|
||||
✅ throws if the provided value is a Listenable/Stream
|
||||
@@ -313,13 +316,13 @@ ProxyProvider variants
|
||||
✅ ProxyProvider5
|
||||
✅ ProxyProvider6
|
||||
```
|
||||
### ✅ <a id="user-content-r0s11" href="#r0s11">test/reassemble_test.dart</a>
|
||||
### ✅ <a id="user-content-r0s11" href="#user-content-r0s11">test/reassemble_test.dart</a>
|
||||
```
|
||||
✅ ReassembleHandler
|
||||
✅ unevaluated create
|
||||
✅ unevaluated create
|
||||
```
|
||||
### ✅ <a id="user-content-r0s12" href="#r0s12">test/selector_test.dart</a>
|
||||
### ✅ <a id="user-content-r0s12" href="#user-content-r0s12">test/selector_test.dart</a>
|
||||
```
|
||||
✅ asserts that builder/selector are not null
|
||||
✅ Deep compare maps by default
|
||||
@@ -339,14 +342,14 @@ ProxyProvider variants
|
||||
✅ Selector5
|
||||
✅ Selector6
|
||||
```
|
||||
### ✅ <a id="user-content-r0s13" href="#r0s13">test/stateful_provider_test.dart</a>
|
||||
### ✅ <a id="user-content-r0s13" href="#user-content-r0s13">test/stateful_provider_test.dart</a>
|
||||
```
|
||||
✅ asserts
|
||||
✅ works with MultiProvider
|
||||
✅ calls create only once
|
||||
✅ dispose
|
||||
```
|
||||
### ✅ <a id="user-content-r0s14" href="#r0s14">test/stream_provider_test.dart</a>
|
||||
### ✅ <a id="user-content-r0s14" href="#user-content-r0s14">test/stream_provider_test.dart</a>
|
||||
```
|
||||
✅ works with MultiProvider
|
||||
✅ transition from stream to stream preserve state
|
||||
@@ -358,7 +361,7 @@ StreamProvider()
|
||||
✅ create and dispose stream with builder
|
||||
✅ crashes if builder is null
|
||||
```
|
||||
### ❌ <a id="user-content-r0s15" href="#r0s15">test/value_listenable_provider_test.dart</a>
|
||||
### ❌ <a id="user-content-r0s15" href="#user-content-r0s15">test/value_listenable_provider_test.dart</a>
|
||||
```
|
||||
valueListenableProvider
|
||||
✅ rebuilds when value change
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||

|
||||
## ❌ <a id="user-content-r0" href="#r0">fixtures/external/java/TEST-org.apache.pulsar.AddMissingPatchVersionTest.xml</a>
|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/external/java/TEST-org.apache.pulsar.AddMissingPatchVersionTest.xml](#user-content-r0)||1 ❌|1 ⚪|116ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/external/java/TEST-org.apache.pulsar.AddMissingPatchVersionTest.xml</a>
|
||||
**2** tests were completed in **116ms** with **0** passed, **1** failed and **1** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[org.apache.pulsar.AddMissingPatchVersionTest](#r0s0)||1❌|1⚪|116ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#r0s0">org.apache.pulsar.AddMissingPatchVersionTest</a>
|
||||
|[org.apache.pulsar.AddMissingPatchVersionTest](#user-content-r0s0)||1 ❌|1 ⚪|116ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">org.apache.pulsar.AddMissingPatchVersionTest</a>
|
||||
```
|
||||
⚪ testVersionStrings
|
||||
❌ testVersionStrings
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
26
__tests__/__outputs__/python-xunit-pytest.md
Normal file
26
__tests__/__outputs__/python-xunit-pytest.md
Normal file
@@ -0,0 +1,26 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/python-xunit-pytest.xml](#user-content-r0)|6 ✅|2 ❌|2 ⚪|19ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit-pytest.xml</a>
|
||||
**10** tests were completed in **19ms** with **6** passed, **2** failed and **2** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[pytest](#user-content-r0s0)|6 ✅|2 ❌|2 ⚪|19ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">pytest</a>
|
||||
```
|
||||
tests.test_lib
|
||||
✅ test_always_pass
|
||||
✅ test_with_subtests
|
||||
✅ test_parameterized[param1]
|
||||
✅ test_parameterized[param2]
|
||||
⚪ test_always_skip
|
||||
❌ test_always_fail
|
||||
assert False
|
||||
⚪ test_expected_failure
|
||||
❌ test_error
|
||||
Exception: error
|
||||
✅ test_with_record_property
|
||||
custom_classname
|
||||
✅ test_with_record_xml_attribute
|
||||
```
|
||||
23
__tests__/__outputs__/python-xunit-unittest.md
Normal file
23
__tests__/__outputs__/python-xunit-unittest.md
Normal file
@@ -0,0 +1,23 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/python-xunit-unittest.xml](#user-content-r0)|4 ✅|2 ❌|2 ⚪|1ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/python-xunit-unittest.xml</a>
|
||||
**8** tests were completed in **1ms** with **4** passed, **2** failed and **2** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[TestAcme-20251114214921](#user-content-r0s0)|4 ✅|2 ❌|2 ⚪|1ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">TestAcme-20251114214921</a>
|
||||
```
|
||||
TestAcme
|
||||
✅ test_always_pass
|
||||
✅ test_parameterized_0_param1
|
||||
✅ test_parameterized_1_param2
|
||||
✅ test_with_subtests
|
||||
❌ test_always_fail
|
||||
AssertionError: failed
|
||||
❌ test_error
|
||||
Exception: error
|
||||
⚪ test_always_skip
|
||||
⚪ test_expected_failure
|
||||
```
|
||||
@@ -1,10 +1,13 @@
|
||||

|
||||
## ❌ <a id="user-content-r0" href="#r0">fixtures/rspec-json.json</a>
|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/rspec-json.json](#user-content-r0)|1 ✅|1 ❌|1 ⚪|0ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/rspec-json.json</a>
|
||||
**3** tests were completed in **0ms** with **1** passed, **1** failed and **1** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[./spec/config/check_env_vars_spec.rb](#r0s0)|1✅|1❌|1⚪|0ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#r0s0">./spec/config/check_env_vars_spec.rb</a>
|
||||
|[./spec/config/check_env_vars_spec.rb](#user-content-r0s0)|1 ✅|1 ❌|1 ⚪|0ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">./spec/config/check_env_vars_spec.rb</a>
|
||||
```
|
||||
CheckEnvVars#call when all env vars are defined behaves like success load
|
||||
❌ CheckEnvVars#call when all env vars are defined behaves like success load fails in assertion
|
||||
|
||||
@@ -1,22 +1,27 @@
|
||||

|
||||
## ✅ <a id="user-content-r0" href="#r0">fixtures/external/SilentNotes.trx</a>
|
||||
<details><summary>Expand for details</summary>
|
||||
|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/external/SilentNotes.trx](#user-content-r0)|67 ✅||12 ⚪|1s|
|
||||
## ✅ <a id="user-content-r0" href="#user-content-r0">fixtures/external/SilentNotes.trx</a>
|
||||
**79** tests were completed in **1s** with **67** passed, **0** failed and **12** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[VanillaCloudStorageClientTest.CloudStorageCredentialsTest](#r0s0)|6✅|||30ms|
|
||||
|[VanillaCloudStorageClientTest.CloudStorageProviders.DropboxCloudStorageClientTest](#r0s1)|2✅||3⚪|101ms|
|
||||
|[VanillaCloudStorageClientTest.CloudStorageProviders.FtpCloudStorageClientTest](#r0s2)|4✅||3⚪|166ms|
|
||||
|[VanillaCloudStorageClientTest.CloudStorageProviders.GmxCloudStorageClientTest](#r0s3)|2✅|||7ms|
|
||||
|[VanillaCloudStorageClientTest.CloudStorageProviders.GoogleCloudStorageClientTest](#r0s4)|1✅||3⚪|40ms|
|
||||
|[VanillaCloudStorageClientTest.CloudStorageProviders.OnedriveCloudStorageClientTest](#r0s5)|1✅||3⚪|15ms|
|
||||
|[VanillaCloudStorageClientTest.CloudStorageProviders.WebdavCloudStorageClientTest](#r0s6)|5✅|||16ms|
|
||||
|[VanillaCloudStorageClientTest.CloudStorageTokenTest](#r0s7)|9✅|||0ms|
|
||||
|[VanillaCloudStorageClientTest.OAuth2.AuthorizationResponseErrorTest](#r0s8)|3✅|||3ms|
|
||||
|[VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest](#r0s9)|9✅|||12ms|
|
||||
|[VanillaCloudStorageClientTest.OAuth2CloudStorageClientTest](#r0s10)|5✅|||13ms|
|
||||
|[VanillaCloudStorageClientTest.SecureStringExtensionsTest](#r0s11)|7✅|||0ms|
|
||||
|[VanillaCloudStorageClientTest.SerializeableCloudStorageCredentialsTest](#r0s12)|13✅|||43ms|
|
||||
### ✅ <a id="user-content-r0s0" href="#r0s0">VanillaCloudStorageClientTest.CloudStorageCredentialsTest</a>
|
||||
|[VanillaCloudStorageClientTest.CloudStorageCredentialsTest](#user-content-r0s0)|6 ✅|||30ms|
|
||||
|[VanillaCloudStorageClientTest.CloudStorageProviders.DropboxCloudStorageClientTest](#user-content-r0s1)|2 ✅||3 ⚪|101ms|
|
||||
|[VanillaCloudStorageClientTest.CloudStorageProviders.FtpCloudStorageClientTest](#user-content-r0s2)|4 ✅||3 ⚪|166ms|
|
||||
|[VanillaCloudStorageClientTest.CloudStorageProviders.GmxCloudStorageClientTest](#user-content-r0s3)|2 ✅|||7ms|
|
||||
|[VanillaCloudStorageClientTest.CloudStorageProviders.GoogleCloudStorageClientTest](#user-content-r0s4)|1 ✅||3 ⚪|40ms|
|
||||
|[VanillaCloudStorageClientTest.CloudStorageProviders.OnedriveCloudStorageClientTest](#user-content-r0s5)|1 ✅||3 ⚪|15ms|
|
||||
|[VanillaCloudStorageClientTest.CloudStorageProviders.WebdavCloudStorageClientTest](#user-content-r0s6)|5 ✅|||16ms|
|
||||
|[VanillaCloudStorageClientTest.CloudStorageTokenTest](#user-content-r0s7)|9 ✅|||0ms|
|
||||
|[VanillaCloudStorageClientTest.OAuth2.AuthorizationResponseErrorTest](#user-content-r0s8)|3 ✅|||3ms|
|
||||
|[VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest](#user-content-r0s9)|9 ✅|||12ms|
|
||||
|[VanillaCloudStorageClientTest.OAuth2CloudStorageClientTest](#user-content-r0s10)|5 ✅|||13ms|
|
||||
|[VanillaCloudStorageClientTest.SecureStringExtensionsTest](#user-content-r0s11)|7 ✅|||0ms|
|
||||
|[VanillaCloudStorageClientTest.SerializeableCloudStorageCredentialsTest](#user-content-r0s12)|13 ✅|||43ms|
|
||||
### ✅ <a id="user-content-r0s0" href="#user-content-r0s0">VanillaCloudStorageClientTest.CloudStorageCredentialsTest</a>
|
||||
```
|
||||
✅ AreEqualWorksWithDifferentPassword
|
||||
✅ AreEqualWorksWithSameContent
|
||||
@@ -25,7 +30,7 @@
|
||||
✅ ValidateAcceptsValidCredentials
|
||||
✅ ValidateRejectsInvalidCredentials
|
||||
```
|
||||
### ✅ <a id="user-content-r0s1" href="#r0s1">VanillaCloudStorageClientTest.CloudStorageProviders.DropboxCloudStorageClientTest</a>
|
||||
### ✅ <a id="user-content-r0s1" href="#user-content-r0s1">VanillaCloudStorageClientTest.CloudStorageProviders.DropboxCloudStorageClientTest</a>
|
||||
```
|
||||
✅ FileLifecycleWorks
|
||||
⚪ ReallyDoFetchToken
|
||||
@@ -33,7 +38,7 @@
|
||||
⚪ ReallyDoRefreshToken
|
||||
✅ ThrowsAccessDeniedExceptionWithInvalidToken
|
||||
```
|
||||
### ✅ <a id="user-content-r0s2" href="#r0s2">VanillaCloudStorageClientTest.CloudStorageProviders.FtpCloudStorageClientTest</a>
|
||||
### ✅ <a id="user-content-r0s2" href="#user-content-r0s2">VanillaCloudStorageClientTest.CloudStorageProviders.FtpCloudStorageClientTest</a>
|
||||
```
|
||||
✅ FileLifecycleWorks
|
||||
✅ SanitizeCredentials_ChangesInvalidPrefix
|
||||
@@ -43,26 +48,26 @@
|
||||
⚪ ThrowsWithInvalidUrl
|
||||
⚪ ThrowsWithInvalidUsername
|
||||
```
|
||||
### ✅ <a id="user-content-r0s3" href="#r0s3">VanillaCloudStorageClientTest.CloudStorageProviders.GmxCloudStorageClientTest</a>
|
||||
### ✅ <a id="user-content-r0s3" href="#user-content-r0s3">VanillaCloudStorageClientTest.CloudStorageProviders.GmxCloudStorageClientTest</a>
|
||||
```
|
||||
✅ ChoosesCorrectUrlForGmxComEmail
|
||||
✅ ChoosesCorrectUrlForGmxNetEmail
|
||||
```
|
||||
### ✅ <a id="user-content-r0s4" href="#r0s4">VanillaCloudStorageClientTest.CloudStorageProviders.GoogleCloudStorageClientTest</a>
|
||||
### ✅ <a id="user-content-r0s4" href="#user-content-r0s4">VanillaCloudStorageClientTest.CloudStorageProviders.GoogleCloudStorageClientTest</a>
|
||||
```
|
||||
✅ FileLifecycleWorks
|
||||
⚪ ReallyDoFetchToken
|
||||
⚪ ReallyDoOpenAuthorizationPageInBrowser
|
||||
⚪ ReallyDoRefreshToken
|
||||
```
|
||||
### ✅ <a id="user-content-r0s5" href="#r0s5">VanillaCloudStorageClientTest.CloudStorageProviders.OnedriveCloudStorageClientTest</a>
|
||||
### ✅ <a id="user-content-r0s5" href="#user-content-r0s5">VanillaCloudStorageClientTest.CloudStorageProviders.OnedriveCloudStorageClientTest</a>
|
||||
```
|
||||
✅ FileLifecycleWorks
|
||||
⚪ ReallyDoFetchToken
|
||||
⚪ ReallyDoOpenAuthorizationPageInBrowser
|
||||
⚪ ReallyDoRefreshToken
|
||||
```
|
||||
### ✅ <a id="user-content-r0s6" href="#r0s6">VanillaCloudStorageClientTest.CloudStorageProviders.WebdavCloudStorageClientTest</a>
|
||||
### ✅ <a id="user-content-r0s6" href="#user-content-r0s6">VanillaCloudStorageClientTest.CloudStorageProviders.WebdavCloudStorageClientTest</a>
|
||||
```
|
||||
✅ FileLifecycleWorks
|
||||
✅ ParseGmxWebdavResponseCorrectly
|
||||
@@ -70,7 +75,7 @@
|
||||
✅ ThrowsWithInvalidPath
|
||||
✅ ThrowsWithInvalidUsername
|
||||
```
|
||||
### ✅ <a id="user-content-r0s7" href="#r0s7">VanillaCloudStorageClientTest.CloudStorageTokenTest</a>
|
||||
### ✅ <a id="user-content-r0s7" href="#user-content-r0s7">VanillaCloudStorageClientTest.CloudStorageTokenTest</a>
|
||||
```
|
||||
✅ AreEqualWorksWithNullDate
|
||||
✅ AreEqualWorksWithSameContent
|
||||
@@ -82,13 +87,13 @@
|
||||
✅ SetExpiryDateBySecondsWorksWithNull
|
||||
✅ SetExpiryDateBySecondsWorksWithVeryShortPeriod
|
||||
```
|
||||
### ✅ <a id="user-content-r0s8" href="#r0s8">VanillaCloudStorageClientTest.OAuth2.AuthorizationResponseErrorTest</a>
|
||||
### ✅ <a id="user-content-r0s8" href="#user-content-r0s8">VanillaCloudStorageClientTest.OAuth2.AuthorizationResponseErrorTest</a>
|
||||
```
|
||||
✅ ParsesAllErrorCodesCorrectly
|
||||
✅ ParsesNullErrorCodeCorrectly
|
||||
✅ ParsesUnknownErrorCodeCorrectly
|
||||
```
|
||||
### ✅ <a id="user-content-r0s9" href="#r0s9">VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest</a>
|
||||
### ✅ <a id="user-content-r0s9" href="#user-content-r0s9">VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest</a>
|
||||
```
|
||||
✅ BuildAuthorizationRequestUrlEscapesParameters
|
||||
✅ BuildAuthorizationRequestUrlLeavesOutOptionalParameters
|
||||
@@ -100,7 +105,7 @@
|
||||
✅ ParseRealWorldGoogleRejectResponse
|
||||
✅ ParseRealWorldGoogleSuccessResponse
|
||||
```
|
||||
### ✅ <a id="user-content-r0s10" href="#r0s10">VanillaCloudStorageClientTest.OAuth2CloudStorageClientTest</a>
|
||||
### ✅ <a id="user-content-r0s10" href="#user-content-r0s10">VanillaCloudStorageClientTest.OAuth2CloudStorageClientTest</a>
|
||||
```
|
||||
✅ BuildOAuth2AuthorizationRequestUrlWorks
|
||||
✅ FetchTokenCanInterpretGoogleResponse
|
||||
@@ -108,7 +113,7 @@
|
||||
✅ FetchTokenThrowsWithWrongState
|
||||
✅ RefreshTokenCanInterpretGoogleResponse
|
||||
```
|
||||
### ✅ <a id="user-content-r0s11" href="#r0s11">VanillaCloudStorageClientTest.SecureStringExtensionsTest</a>
|
||||
### ✅ <a id="user-content-r0s11" href="#user-content-r0s11">VanillaCloudStorageClientTest.SecureStringExtensionsTest</a>
|
||||
```
|
||||
✅ AreEqualsWorksCorrectly
|
||||
✅ CorrectlyConvertsSecureStringToString
|
||||
@@ -118,7 +123,7 @@
|
||||
✅ CorrectlyConvertsUnicodeBytesToSecureString
|
||||
✅ CorrectlyConvertsUtf8BytesToSecureString
|
||||
```
|
||||
### ✅ <a id="user-content-r0s12" href="#r0s12">VanillaCloudStorageClientTest.SerializeableCloudStorageCredentialsTest</a>
|
||||
### ✅ <a id="user-content-r0s12" href="#user-content-r0s12">VanillaCloudStorageClientTest.SerializeableCloudStorageCredentialsTest</a>
|
||||
```
|
||||
✅ DecryptAfterDesrializationCanReadAllPropertiesBack
|
||||
✅ DecryptAfterDesrializationRespectsNullProperties
|
||||
@@ -133,4 +138,5 @@
|
||||
✅ SerializedXmlCanBeReadBack
|
||||
✅ SerializedXmlDoesNotContainNullProperties
|
||||
✅ SerializedXmlDoesNotContainPlaintextData
|
||||
```
|
||||
```
|
||||
</details>
|
||||
@@ -1,13 +1,17 @@
|
||||

|
||||
## ❌ <a id="user-content-r0" href="#r0">fixtures/swift-xunit.xml</a>
|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/swift-xunit.xml](#user-content-r0)|2 ✅|1 ❌||220ms|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/swift-xunit.xml</a>
|
||||
**3** tests were completed in **220ms** with **2** passed, **1** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[TestResults](#r0s0)|2✅|1❌||220ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#r0s0">TestResults</a>
|
||||
|[TestResults](#user-content-r0s0)|2 ✅|1 ❌||220ms|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">TestResults</a>
|
||||
```
|
||||
AcmeLibTests.AcmeLibTests
|
||||
✅ test_always_pass
|
||||
✅ test_always_skip
|
||||
❌ test_always_fail
|
||||
failed
|
||||
```
|
||||
20
__tests__/__outputs__/tester-bootstrap-test-results.md
Normal file
20
__tests__/__outputs__/tester-bootstrap-test-results.md
Normal file
@@ -0,0 +1,20 @@
|
||||

|
||||
<details><summary>Expand for details</summary>
|
||||
|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/nette-tester/BootstrapFormRenderer-report.xml](#user-content-r0)|4 ✅|||300ms|
|
||||
## ✅ <a id="user-content-r0" href="#user-content-r0">fixtures/nette-tester/BootstrapFormRenderer-report.xml</a>
|
||||
**4** tests were completed in **300ms** with **4** passed, **0** failed and **0** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[BootstrapFormRenderer-report.xml](#user-content-r0s0)|4 ✅|||300ms|
|
||||
### ✅ <a id="user-content-r0s0" href="#user-content-r0s0">BootstrapFormRenderer-report.xml</a>
|
||||
```
|
||||
KdybyTests/BootstrapFormRenderer
|
||||
✅ BootstrapRendererTest.phpt::testRenderingBasics
|
||||
✅ BootstrapRendererTest.phpt::testRenderingIndividual
|
||||
✅ BootstrapRendererTest.phpt::testRenderingComponents
|
||||
✅ BootstrapRendererTest.phpt::testMultipleFormsInTemplate
|
||||
```
|
||||
</details>
|
||||
87
__tests__/__outputs__/tester-v1.7-test-results.md
Normal file
87
__tests__/__outputs__/tester-v1.7-test-results.md
Normal file
@@ -0,0 +1,87 @@
|
||||

|
||||
|Report|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[fixtures/nette-tester/tester-v1.7-report.xml](#user-content-r0)|61 ✅|1 ❌|3 ⚪|2s|
|
||||
## ❌ <a id="user-content-r0" href="#user-content-r0">fixtures/nette-tester/tester-v1.7-report.xml</a>
|
||||
**65** tests were completed in **2s** with **61** passed, **1** failed and **3** skipped.
|
||||
|Test suite|Passed|Failed|Skipped|Time|
|
||||
|:---|---:|---:|---:|---:|
|
||||
|[tester-v1.7-report.xml](#user-content-r0s0)|61 ✅|1 ❌|3 ⚪|2s|
|
||||
### ❌ <a id="user-content-r0s0" href="#user-content-r0s0">tester-v1.7-report.xml</a>
|
||||
```
|
||||
tests/Framework
|
||||
⚪ Dumper.toPhp.php7.phpt
|
||||
✅ Assert.contains.phpt
|
||||
✅ Assert.count.phpt
|
||||
✅ Assert.equal.phpt
|
||||
✅ Assert.equal.recursive.phpt::testSimple
|
||||
✅ Assert.equal.recursive.phpt::testMultiple
|
||||
✅ Assert.equal.recursive.phpt::testDeep
|
||||
✅ Assert.equal.recursive.phpt::testCross
|
||||
✅ Assert.equal.recursive.phpt::testThirdParty
|
||||
✅ Assert.error.phpt
|
||||
✅ Assert.exception.phpt
|
||||
✅ Assert.false.phpt
|
||||
✅ Assert.match.phpt
|
||||
✅ Assert.match.regexp.phpt
|
||||
✅ Assert.nan.phpt
|
||||
✅ Assert.noError.phpt
|
||||
✅ Assert.same.phpt
|
||||
✅ Assert.null.phpt
|
||||
✅ Assert.true.phpt
|
||||
✅ Assert.truthy.phpt
|
||||
✅ DataProvider.load.phpt
|
||||
✅ Assert.type.phpt
|
||||
✅ DataProvider.parseAnnotation.phpt
|
||||
✅ DataProvider.testQuery.phpt
|
||||
✅ DomQuery.css2Xpath.phpt
|
||||
✅ DomQuery.fromHtml.phpt
|
||||
✅ DomQuery.fromXml.phpt
|
||||
✅ Dumper.dumpException.phpt
|
||||
✅ Dumper.color.phpt
|
||||
✅ Dumper.toLine.phpt
|
||||
✅ Dumper.toPhp.recursion.phpt
|
||||
✅ Dumper.toPhp.phpt
|
||||
✅ FileMock.phpt
|
||||
✅ Helpers.escapeArg.phpt
|
||||
✅ Helpers.parseDocComment.phpt
|
||||
✅ TestCase.annotationThrows.phpt
|
||||
✅ TestCase.annotationThrows.setUp.tearDown.phpt
|
||||
✅ TestCase.annotationThrows.syntax.phpt
|
||||
✅ TestCase.basic.phpt
|
||||
✅ TestCase.dataProvider.generator.phpt
|
||||
✅ TestCase.dataProvider.phpt
|
||||
✅ TestCase.invalidMethods.phpt
|
||||
✅ TestCase.invalidProvider.phpt
|
||||
✅ TestCase.order.error.phpt
|
||||
✅ TestCase.order.errorMuted.phpt
|
||||
✅ TestCase.order.phpt
|
||||
✅ Prevent loop in error handling. The #268 regression. (TestCase.ownErrorHandler.phpt)
|
||||
tests/CodeCoverage
|
||||
⚪ Collector.start.phpt
|
||||
✅ PhpParser.parse.lines.phpt
|
||||
✅ PhpParser.parse.methods.phpt
|
||||
✅ CloverXMLGenerator.phpt
|
||||
✅ PhpParser.parse.edge.phpt
|
||||
✅ PhpParser.parse.lines-of-code.phpt
|
||||
✅ PhpParser.parse.namespaces.phpt
|
||||
tests/Runner
|
||||
✅ CommandLine.phpt
|
||||
⚪ HhvmPhpInterpreter.phpt
|
||||
✅ Runner.find-tests.phpt
|
||||
✅ Job.phpt
|
||||
✅ ZendPhpExecutable.phpt
|
||||
✅ Runner.multiple.phpt
|
||||
✅ Runner.edge.phpt
|
||||
✅ Runner.stop-on-fail.phpt
|
||||
❌ Runner.multiple-fails.phpt
|
||||
Failed: '... in /Users/izso/Developer/nette/tester/tests/Runner/multiple-fails/...' should match
|
||||
... '..., unexpected end of file in %a%testcase-syntax-error.phptx on line ...'
|
||||
|
||||
diff '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.expected' '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.actual'
|
||||
|
||||
in tests/Runner/Runner.multiple-fails.phpt(78) Tester\Assert::match()
|
||||
✅ Runner.annotations.phpt
|
||||
tests/RunnerOutput
|
||||
✅ JUnitPrinter.phpt
|
||||
```
|
||||
@@ -1,4 +1,4 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`dart-json tests matches report snapshot 1`] = `
|
||||
TestRunResult {
|
||||
|
||||
107
__tests__/__snapshots__/dotnet-nunit.test.ts.snap
Normal file
107
__tests__/__snapshots__/dotnet-nunit.test.ts.snap
Normal file
@@ -0,0 +1,107 @@
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`dotnet-nunit tests report from ./reports/dotnet test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/dotnet-nunit.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "CalculatorTests",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Is_Even_Number(2)",
|
||||
"result": "success",
|
||||
"time": 0.622,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": " at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.NUnitV3Tests\\CalculatorTests.cs:line 61
|
||||
",
|
||||
"line": undefined,
|
||||
"message": " Expected: True
|
||||
But was: False
|
||||
",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "Is_Even_Number(3)",
|
||||
"result": "failed",
|
||||
"time": 1.098,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": " at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.Unit\\Calculator.cs:line 9
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.NUnitV3Tests\\CalculatorTests.cs:line 33",
|
||||
"line": undefined,
|
||||
"message": "System.DivideByZeroException : Attempted to divide by zero.",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "Exception_In_TargetTest",
|
||||
"result": "failed",
|
||||
"time": 22.805,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": " at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.NUnitV3Tests\\CalculatorTests.cs:line 39",
|
||||
"line": undefined,
|
||||
"message": "System.Exception : Test",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "Exception_In_Test",
|
||||
"result": "failed",
|
||||
"time": 0.528,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": " at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.NUnitV3Tests\\CalculatorTests.cs:line 27
|
||||
",
|
||||
"line": undefined,
|
||||
"message": " Expected: 3
|
||||
But was: 2
|
||||
",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "Failing_Test",
|
||||
"result": "failed",
|
||||
"time": 28.162,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Passing_Test",
|
||||
"result": "success",
|
||||
"time": 0.23800000000000002,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Passing_Test_With_Description",
|
||||
"result": "success",
|
||||
"time": 0.135,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Skipped_Test",
|
||||
"result": "skipped",
|
||||
"time": 0.398,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "",
|
||||
"line": undefined,
|
||||
"message": "",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "Timeout_Test",
|
||||
"result": "failed",
|
||||
"time": 14.949,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "DotnetTests.NUnitV3Tests.dll.DotnetTests.XUnitTests",
|
||||
"totalTime": undefined,
|
||||
},
|
||||
],
|
||||
"totalTime": 230.30800000000002,
|
||||
}
|
||||
`;
|
||||
@@ -1,6 +1,6 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`dotnet-trx tests matches report snapshot 1`] = `
|
||||
exports[`dotnet-trx tests matches dotnet-trx report snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/dotnet-trx.trx",
|
||||
"suites": [
|
||||
@@ -21,7 +21,9 @@ TestRunResult {
|
||||
at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.Unit\\Calculator.cs:line 9
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 33",
|
||||
"line": 9,
|
||||
"message": "System.DivideByZeroException : Attempted to divide by zero.",
|
||||
"message": "System.DivideByZeroException : Attempted to divide by zero.
|
||||
at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.Unit\\Calculator.cs:line 9
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 33",
|
||||
"path": "DotnetTests.Unit/Calculator.cs",
|
||||
},
|
||||
"name": "Exception_In_TargetTest",
|
||||
@@ -33,7 +35,8 @@ TestRunResult {
|
||||
"details": "System.Exception : Test
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 39",
|
||||
"line": 39,
|
||||
"message": "System.Exception : Test",
|
||||
"message": "System.Exception : Test
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 39",
|
||||
"path": "DotnetTests.XUnitTests/CalculatorTests.cs",
|
||||
},
|
||||
"name": "Exception_In_Test",
|
||||
@@ -49,7 +52,8 @@ Actual: 2
|
||||
"line": 27,
|
||||
"message": "Assert.Equal() Failure
|
||||
Expected: 3
|
||||
Actual: 2",
|
||||
Actual: 2
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 27",
|
||||
"path": "DotnetTests.XUnitTests/CalculatorTests.cs",
|
||||
},
|
||||
"name": "Failing_Test",
|
||||
@@ -71,7 +75,8 @@ Actual: False
|
||||
"line": 59,
|
||||
"message": "Assert.True() Failure
|
||||
Expected: True
|
||||
Actual: False",
|
||||
Actual: False
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 59",
|
||||
"path": "DotnetTests.XUnitTests/CalculatorTests.cs",
|
||||
},
|
||||
"name": "Is_Even_Number(i: 3)",
|
||||
@@ -99,7 +104,213 @@ Actual: False
|
||||
"line": 67,
|
||||
"message": "Assert.True() Failure
|
||||
Expected: True
|
||||
Actual: False",
|
||||
Actual: False
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Theory_With_Custom_Name(Int32 i) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 67",
|
||||
"path": "DotnetTests.XUnitTests/CalculatorTests.cs",
|
||||
},
|
||||
"name": "Should be even number(i: 3)",
|
||||
"result": "failed",
|
||||
"time": 0.6537000000000001,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Skipped_Test",
|
||||
"result": "skipped",
|
||||
"time": 1,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Timeout_Test",
|
||||
"result": "success",
|
||||
"time": 108.42580000000001,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "DotnetTests.XUnitTests.CalculatorTests",
|
||||
"totalTime": undefined,
|
||||
},
|
||||
],
|
||||
"totalTime": 1116,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`dotnet-trx tests matches dotnet-xunitv3 report snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/dotnet-xunitv3.trx",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": null,
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "Assert.Null() Failure: Value is not null
|
||||
Expected: null
|
||||
Actual: Fixture { }
|
||||
at DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test() in /_/reports/dotnet/DotnetTests.XUnitV3Tests/FixtureTests.cs:line 25
|
||||
at System.RuntimeMethodHandle.InvokeMethod(Object target, Void** arguments, Signature sig, Boolean isConstructor)
|
||||
at System.Reflection.MethodBaseInvoker.InvokeWithNoArgs(Object obj, BindingFlags invokeAttr)",
|
||||
"line": 25,
|
||||
"message": "Assert.Null() Failure: Value is not null
|
||||
Expected: null
|
||||
Actual: Fixture { }
|
||||
at DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test() in /_/reports/dotnet/DotnetTests.XUnitV3Tests/FixtureTests.cs:line 25
|
||||
at System.RuntimeMethodHandle.InvokeMethod(Object target, Void** arguments, Signature sig, Boolean isConstructor)
|
||||
at System.Reflection.MethodBaseInvoker.InvokeWithNoArgs(Object obj, BindingFlags invokeAttr)",
|
||||
"path": "DotnetTests.XUnitV3Tests/FixtureTests.cs",
|
||||
},
|
||||
"name": "Failing_Test",
|
||||
"result": "failed",
|
||||
"time": 17.0545,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Passing_Test",
|
||||
"result": "success",
|
||||
"time": 0.8786,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "DotnetTests.XUnitV3Tests.FixtureTests",
|
||||
"totalTime": undefined,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": null,
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test)]",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test)]",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Unclassified",
|
||||
"totalTime": undefined,
|
||||
},
|
||||
],
|
||||
"totalTime": 267,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`dotnet-trx tests matches report snapshot (only failed tests) 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/dotnet-trx.trx",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": null,
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Custom Name",
|
||||
"result": "success",
|
||||
"time": 0.1371,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "System.DivideByZeroException : Attempted to divide by zero.
|
||||
at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.Unit\\Calculator.cs:line 9
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 33",
|
||||
"line": 9,
|
||||
"message": "System.DivideByZeroException : Attempted to divide by zero.
|
||||
at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.Unit\\Calculator.cs:line 9
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 33",
|
||||
"path": "DotnetTests.Unit/Calculator.cs",
|
||||
},
|
||||
"name": "Exception_In_TargetTest",
|
||||
"result": "failed",
|
||||
"time": 0.8377,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "System.Exception : Test
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 39",
|
||||
"line": 39,
|
||||
"message": "System.Exception : Test
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 39",
|
||||
"path": "DotnetTests.XUnitTests/CalculatorTests.cs",
|
||||
},
|
||||
"name": "Exception_In_Test",
|
||||
"result": "failed",
|
||||
"time": 2.5175,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "Assert.Equal() Failure
|
||||
Expected: 3
|
||||
Actual: 2
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 27",
|
||||
"line": 27,
|
||||
"message": "Assert.Equal() Failure
|
||||
Expected: 3
|
||||
Actual: 2
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 27",
|
||||
"path": "DotnetTests.XUnitTests/CalculatorTests.cs",
|
||||
},
|
||||
"name": "Failing_Test",
|
||||
"result": "failed",
|
||||
"time": 3.8697,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Is_Even_Number(i: 2)",
|
||||
"result": "success",
|
||||
"time": 0.0078,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "Assert.True() Failure
|
||||
Expected: True
|
||||
Actual: False
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 59",
|
||||
"line": 59,
|
||||
"message": "Assert.True() Failure
|
||||
Expected: True
|
||||
Actual: False
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 59",
|
||||
"path": "DotnetTests.XUnitTests/CalculatorTests.cs",
|
||||
},
|
||||
"name": "Is_Even_Number(i: 3)",
|
||||
"result": "failed",
|
||||
"time": 0.41409999999999997,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Passing_Test",
|
||||
"result": "success",
|
||||
"time": 0.1365,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Should be even number(i: 2)",
|
||||
"result": "success",
|
||||
"time": 0.0097,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "Assert.True() Failure
|
||||
Expected: True
|
||||
Actual: False
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Theory_With_Custom_Name(Int32 i) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 67",
|
||||
"line": 67,
|
||||
"message": "Assert.True() Failure
|
||||
Expected: True
|
||||
Actual: False
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Theory_With_Custom_Name(Int32 i) in C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTests.XUnitTests\\CalculatorTests.cs:line 67",
|
||||
"path": "DotnetTests.XUnitTests/CalculatorTests.cs",
|
||||
},
|
||||
"name": "Should be even number(i: 3)",
|
||||
|
||||
131
__tests__/__snapshots__/golang-json.test.ts.snap
Normal file
131
__tests__/__snapshots__/golang-json.test.ts.snap
Normal file
@@ -0,0 +1,131 @@
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`golang-json tests report from ./reports/dotnet test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/golang-json.json",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": null,
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestPassing",
|
||||
"result": "success",
|
||||
"time": 60,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "calculator_test.go:19: expected 1+1 = 3, got 2
|
||||
",
|
||||
"message": "calculator_test.go:19: expected 1+1 = 3, got 2
|
||||
",
|
||||
},
|
||||
"name": "TestFailing",
|
||||
"result": "failed",
|
||||
"time": 890,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "calculator_test.go:76: caught panic: runtime error: integer divide by zero
|
||||
",
|
||||
"message": "calculator_test.go:76: caught panic: runtime error: integer divide by zero
|
||||
",
|
||||
},
|
||||
"name": "TestPanicInsideFunction",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "calculator_test.go:76: caught panic: bad stuff
|
||||
",
|
||||
"message": "calculator_test.go:76: caught panic: bad stuff
|
||||
",
|
||||
},
|
||||
"name": "TestPanicInsideTest",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "calculator_test.go:45: skipping test
|
||||
",
|
||||
"message": "calculator_test.go:45: skipping test
|
||||
",
|
||||
},
|
||||
"name": "TestSkipped",
|
||||
"result": "skipped",
|
||||
"time": 940,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "",
|
||||
"message": "",
|
||||
},
|
||||
"name": "TestCases",
|
||||
"result": "failed",
|
||||
"time": 2250,
|
||||
},
|
||||
],
|
||||
},
|
||||
TestGroupResult {
|
||||
"name": "TestCases",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "1_+_2_=_3",
|
||||
"result": "success",
|
||||
"time": 400,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "4_+_7_=_11",
|
||||
"result": "success",
|
||||
"time": 460,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "calculator_test.go:67: expected 2 + 3 = 4, got 5
|
||||
",
|
||||
"message": "calculator_test.go:67: expected 2 + 3 = 4, got 5
|
||||
",
|
||||
},
|
||||
"name": "2_+_3_=_4",
|
||||
"result": "failed",
|
||||
"time": 90,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "calculator_test.go:67: expected 1 / 2 = 1, got 0
|
||||
",
|
||||
"message": "calculator_test.go:67: expected 1 / 2 = 1, got 0
|
||||
",
|
||||
},
|
||||
"name": "1_/_2_=_1",
|
||||
"result": "failed",
|
||||
"time": 920,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "9_/_3_=_3",
|
||||
"result": "success",
|
||||
"time": 340,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "14_/_7_=_2",
|
||||
"result": "success",
|
||||
"time": 40,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "_/home/james_t/git/test-reporter/reports/go",
|
||||
"totalTime": undefined,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
@@ -1,4 +1,4 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`java-junit tests report from apache/pulsar single suite test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
@@ -41,7 +41,7 @@ at java.lang.Thread.run(Thread.java:748)
|
||||
|
||||
",
|
||||
"line": 29,
|
||||
"message": undefined,
|
||||
"message": "java.lang.AssertionError: expected [1.2.1] but found [1.2.0]",
|
||||
"path": "pulsar-common/src/test/java/org/apache/pulsar/AddMissingPatchVersionTest.java",
|
||||
},
|
||||
"name": "testVersionStrings",
|
||||
@@ -100,7 +100,7 @@ at java.lang.Thread.run(Thread.java:748)
|
||||
|
||||
",
|
||||
"line": 29,
|
||||
"message": undefined,
|
||||
"message": "java.lang.AssertionError: expected [1.2.1] but found [1.2.0]",
|
||||
"path": "pulsar-common/src/test/java/org/apache/pulsar/AddMissingPatchVersionTest.java",
|
||||
},
|
||||
"name": "testVersionStrings",
|
||||
@@ -6878,3 +6878,153 @@ at java.lang.Thread.run(Thread.java:748)
|
||||
"totalTime": 2126531.0000000005,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`java-junit tests report from testmo/junitxml basic example matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/external/java/junit4-basic.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase1",
|
||||
"result": "success",
|
||||
"time": 2113.871,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase2",
|
||||
"result": "success",
|
||||
"time": 1051,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase3",
|
||||
"result": "success",
|
||||
"time": 3441,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Registration",
|
||||
"totalTime": 6605.870999999999,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase7",
|
||||
"result": "success",
|
||||
"time": 2508,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase8",
|
||||
"result": "success",
|
||||
"time": 1230.8159999999998,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": undefined,
|
||||
"line": undefined,
|
||||
"message": "AssertionError: Assertion error message",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testCase9",
|
||||
"result": "failed",
|
||||
"time": 982,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Authentication",
|
||||
"totalTime": 9076.816,
|
||||
},
|
||||
],
|
||||
"totalTime": 15682.687,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`java-junit tests report from testmo/junitxml complete example matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/external/java/junit4-complete.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase1",
|
||||
"result": "success",
|
||||
"time": 2436,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase2",
|
||||
"result": "success",
|
||||
"time": 1534,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase3",
|
||||
"result": "success",
|
||||
"time": 822,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase4",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": undefined,
|
||||
"line": undefined,
|
||||
"message": "AssertionError: Expected value did not match.",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testCase5",
|
||||
"result": "failed",
|
||||
"time": 2902.412,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": undefined,
|
||||
"line": undefined,
|
||||
"message": "ArithmeticError: Division by zero.",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testCase6",
|
||||
"result": "failed",
|
||||
"time": 3819,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase7",
|
||||
"result": "success",
|
||||
"time": 2944,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase8",
|
||||
"result": "success",
|
||||
"time": 1625.275,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Registration",
|
||||
"totalTime": 16082.687,
|
||||
},
|
||||
],
|
||||
"totalTime": 16082.687,
|
||||
}
|
||||
`;
|
||||
|
||||
@@ -1,4 +1,62 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`jest-junit tests parsing ESLint report without timing information works - PR #134 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/jest-junit-eslint.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "test",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test.jsx",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "test.jsx",
|
||||
"totalTime": 0,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`jest-junit tests parsing junit report with message succeeds 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/junit-with-message.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "Fails",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "error.cpp:01
|
||||
Expected: true
|
||||
Which is: false >",
|
||||
"line": undefined,
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "Test",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Test",
|
||||
"totalTime": 1,
|
||||
},
|
||||
],
|
||||
"totalTime": 1,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`jest-junit tests report from #235 testing react components named <ComponentName /> 1`] = `
|
||||
TestRunResult {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`mocha-json tests report from ./reports/mocha-json test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
|
||||
628
__tests__/__snapshots__/phpunit-junit.test.ts.snap
Normal file
628
__tests__/__snapshots__/phpunit-junit.test.ts.snap
Normal file
@@ -0,0 +1,628 @@
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`phpunit-junit tests report from junit-basic.xml matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/external/phpunit/junit-basic.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase1",
|
||||
"result": "success",
|
||||
"time": 2113.871,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase2",
|
||||
"result": "success",
|
||||
"time": 1051,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase3",
|
||||
"result": "success",
|
||||
"time": 3441,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Registration",
|
||||
"totalTime": 6605.870999999999,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase4",
|
||||
"result": "success",
|
||||
"time": 2244,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase5",
|
||||
"result": "success",
|
||||
"time": 781,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase6",
|
||||
"result": "success",
|
||||
"time": 1331,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Authentication.Login",
|
||||
"totalTime": 4356,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase7",
|
||||
"result": "success",
|
||||
"time": 2508,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCase8",
|
||||
"result": "success",
|
||||
"time": 1230.8159999999998,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "",
|
||||
"line": undefined,
|
||||
"message": "AssertionError: Assertion error message",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testCase9",
|
||||
"result": "failed",
|
||||
"time": 982,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "Tests.Authentication",
|
||||
"totalTime": 9076.816,
|
||||
},
|
||||
],
|
||||
"totalTime": 15682.687,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`phpunit-junit tests report from phpcheckstyle-phpunit.xml matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/external/phpunit/phpcheckstyle-phpunit.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGoodDoc",
|
||||
"result": "success",
|
||||
"time": 5.093,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testComments",
|
||||
"result": "success",
|
||||
"time": 0.921,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testTODOs",
|
||||
"result": "success",
|
||||
"time": 0.6880000000000001,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "CommentsTest",
|
||||
"totalTime": 6.702,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testDeprecations",
|
||||
"result": "success",
|
||||
"time": 0.9740000000000001,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "DeprecationTest",
|
||||
"totalTime": 0.9740000000000001,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGood",
|
||||
"result": "success",
|
||||
"time": 2.6470000000000002,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testDoWhile",
|
||||
"result": "success",
|
||||
"time": 1.0219999999999998,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testAnonymousFunction",
|
||||
"result": "success",
|
||||
"time": 0.8,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testException",
|
||||
"result": "success",
|
||||
"time": 0.888,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "GoodTest",
|
||||
"totalTime": 5.357,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testTabIndentation",
|
||||
"result": "success",
|
||||
"time": 0.857,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testSpaceIndentation",
|
||||
"result": "success",
|
||||
"time": 0.929,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testSpaceIndentationArray",
|
||||
"result": "success",
|
||||
"time": 0.975,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGoodSpaceIndentationArray",
|
||||
"result": "success",
|
||||
"time": 1.212,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGoodIndentationNewLine",
|
||||
"result": "success",
|
||||
"time": 0.859,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGoodIndentationSpaces",
|
||||
"result": "success",
|
||||
"time": 0.78,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testBadSpaces",
|
||||
"result": "success",
|
||||
"time": 1.1199999999999999,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testBadSpaceAfterControl",
|
||||
"result": "success",
|
||||
"time": 0.9219999999999999,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "IndentationTest",
|
||||
"totalTime": 7.654,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testMetrics",
|
||||
"result": "success",
|
||||
"time": 4.146999999999999,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "MetricsTest",
|
||||
"totalTime": 4.146999999999999,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testNaming",
|
||||
"result": "success",
|
||||
"time": 1.426,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testFunctionNaming",
|
||||
"result": "success",
|
||||
"time": 1.271,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "NamingTest",
|
||||
"totalTime": 2.697,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testTextAfterClosingTag",
|
||||
"result": "success",
|
||||
"time": 0.9940000000000001,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "OptimizationTest",
|
||||
"totalTime": 0.9940000000000001,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "OtherTest::testOther
|
||||
We expect 20 warnings
|
||||
Failed asserting that 19 matches expected 20.
|
||||
|
||||
/workspace/phpcheckstyle/test/OtherTest.php:24",
|
||||
"line": 12,
|
||||
"message": "PHPUnit\\Framework\\ExpectationFailedException",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testOther",
|
||||
"result": "failed",
|
||||
"time": 5.2509999999999994,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "OtherTest::testException
|
||||
We expect 1 error
|
||||
Failed asserting that 0 matches expected 1.
|
||||
|
||||
/workspace/phpcheckstyle/test/OtherTest.php:40",
|
||||
"line": 31,
|
||||
"message": "PHPUnit\\Framework\\ExpectationFailedException",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "testException",
|
||||
"result": "failed",
|
||||
"time": 0.751,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testEmpty",
|
||||
"result": "success",
|
||||
"time": 0.42700000000000005,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testSwitchCaseNeedBreak",
|
||||
"result": "success",
|
||||
"time": 0.901,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "OtherTest",
|
||||
"totalTime": 7.329,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testTextAfterClosingTag",
|
||||
"result": "success",
|
||||
"time": 0.641,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testClosingTagNotNeeded",
|
||||
"result": "success",
|
||||
"time": 0.631,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "PHPTagsTest",
|
||||
"totalTime": 1.272,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testProhibited",
|
||||
"result": "success",
|
||||
"time": 0.9380000000000001,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "ProhibitedTest",
|
||||
"totalTime": 0.9380000000000001,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testStrictCompare",
|
||||
"result": "success",
|
||||
"time": 1.578,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "StrictCompareTest",
|
||||
"totalTime": 1.578,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testGoodUnused",
|
||||
"result": "success",
|
||||
"time": 0.94,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testBadUnused",
|
||||
"result": "success",
|
||||
"time": 0.895,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "UnusedTest",
|
||||
"totalTime": 1.835,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`phpunit-junit tests report from phpunit test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/phpunit/phpunit.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "PHPUnit.Event.CollectingDispatcherTest",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testHasNoCollectedEventsWhenFlushedImmediatelyAfterCreation",
|
||||
"result": "success",
|
||||
"time": 1.441,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCollectsDispatchedEventsUntilFlushed",
|
||||
"result": "success",
|
||||
"time": 2.815,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "PHPUnit\\Event\\CollectingDispatcherTest",
|
||||
"totalTime": 4.256,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "PHPUnit.Event.DeferringDispatcherTest",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testCollectsEventsUntilFlush",
|
||||
"result": "success",
|
||||
"time": 1.6720000000000002,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testFlushesCollectedEvents",
|
||||
"result": "success",
|
||||
"time": 0.661,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testSubscriberCanBeRegistered",
|
||||
"result": "success",
|
||||
"time": 0.33399999999999996,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testTracerCanBeRegistered",
|
||||
"result": "success",
|
||||
"time": 0.262,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "PHPUnit\\Event\\DeferringDispatcherTest",
|
||||
"totalTime": 2.928,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "PHPUnit.Event.DirectDispatcherTest",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testDispatchesEventToKnownSubscribers",
|
||||
"result": "success",
|
||||
"time": 0.17,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testDispatchesEventToTracers",
|
||||
"result": "success",
|
||||
"time": 0.248,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testRegisterRejectsUnknownSubscriber",
|
||||
"result": "success",
|
||||
"time": 0.257,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "testDispatchRejectsUnknownEventType",
|
||||
"result": "success",
|
||||
"time": 0.11900000000000001,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "PHPUnit\\Event\\DirectDispatcherTest",
|
||||
"totalTime": 0.794,
|
||||
},
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "targeting-traits-with-coversclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
|
||||
--- Expected
|
||||
+++ Actual
|
||||
@@ @@
|
||||
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
|
||||
Test Runner Configured
|
||||
Test Suite Loaded (1 test)
|
||||
+Test Runner Triggered Warning (No code coverage driver available)
|
||||
Event Facade Sealed
|
||||
Test Runner Started
|
||||
Test Suite Sorted
|
||||
@@ @@
|
||||
Test Preparation Started (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Prepared (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Passed (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
|
||||
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\\TestFixture\\CoveredTrait with #[CoversClass] is deprecated, please refactor your test to use #[CoversTrait] instead.)
|
||||
Test Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Suite Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithCoversClassTest, 1 test)
|
||||
Test Runner Execution Finished
|
||||
Test Runner Finished
|
||||
-PHPUnit Finished (Shell Exit Code: 0)
|
||||
+PHPUnit Finished (Shell Exit Code: 1)
|
||||
|
||||
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-coversclass-attribute-is-deprecated.phpt:28
|
||||
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
|
||||
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
|
||||
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200",
|
||||
"line": undefined,
|
||||
"message": "PHPUnit\\Framework\\PhptAssertionFailedError",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "targeting-traits-with-coversclass-attribute-is-deprecated.phpt",
|
||||
"result": "failed",
|
||||
"time": 68.151,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "targeting-traits-with-usesclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
|
||||
--- Expected
|
||||
+++ Actual
|
||||
@@ @@
|
||||
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
|
||||
Test Runner Configured
|
||||
Test Suite Loaded (1 test)
|
||||
+Test Runner Triggered Warning (No code coverage driver available)
|
||||
Event Facade Sealed
|
||||
Test Runner Started
|
||||
Test Suite Sorted
|
||||
@@ @@
|
||||
Test Preparation Started (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Prepared (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Passed (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
|
||||
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\\TestFixture\\CoveredTrait with #[UsesClass] is deprecated, please refactor your test to use #[UsesTrait] instead.)
|
||||
Test Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Suite Finished (PHPUnit\\DeprecatedAnnotationsTestFixture\\TraitTargetedWithUsesClassTest, 1 test)
|
||||
Test Runner Execution Finished
|
||||
Test Runner Finished
|
||||
-PHPUnit Finished (Shell Exit Code: 0)
|
||||
+PHPUnit Finished (Shell Exit Code: 1)
|
||||
|
||||
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-usesclass-attribute-is-deprecated.phpt:28
|
||||
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
|
||||
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
|
||||
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200",
|
||||
"line": undefined,
|
||||
"message": "PHPUnit\\Framework\\PhptAssertionFailedError",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "targeting-traits-with-usesclass-attribute-is-deprecated.phpt",
|
||||
"result": "failed",
|
||||
"time": 64.268,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "CLI Arguments",
|
||||
"totalTime": 140.397,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
192
__tests__/__snapshots__/python-xunit.test.ts.snap
Normal file
192
__tests__/__snapshots__/python-xunit.test.ts.snap
Normal file
@@ -0,0 +1,192 @@
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`python-xunit pytest report report from python test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/python-xunit-pytest.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "tests.test_lib",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_always_pass",
|
||||
"result": "success",
|
||||
"time": 2,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_with_subtests",
|
||||
"result": "success",
|
||||
"time": 5,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_parameterized[param1]",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_parameterized[param2]",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_always_skip",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "def test_always_fail():
|
||||
> assert False
|
||||
E assert False
|
||||
|
||||
tests/test_lib.py:25: AssertionError
|
||||
",
|
||||
"line": undefined,
|
||||
"message": "assert False",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "test_always_fail",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_expected_failure",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "def test_error():
|
||||
> raise Exception("error")
|
||||
E Exception: error
|
||||
|
||||
tests/test_lib.py:32: Exception
|
||||
",
|
||||
"line": undefined,
|
||||
"message": "Exception: error",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "test_error",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_with_record_property",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
TestGroupResult {
|
||||
"name": "custom_classname",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_with_record_xml_attribute",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "pytest",
|
||||
"totalTime": 19,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`python-xunit unittest report report from python test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/python-xunit-unittest.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "TestAcme",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_always_pass",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_parameterized_0_param1",
|
||||
"result": "success",
|
||||
"time": 1,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_parameterized_1_param2",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_with_subtests",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "Traceback (most recent call last):
|
||||
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail
|
||||
self.fail("failed")
|
||||
AssertionError: failed
|
||||
",
|
||||
"line": undefined,
|
||||
"message": "AssertionError: failed",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "test_always_fail",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "Traceback (most recent call last):
|
||||
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error
|
||||
raise Exception("error")
|
||||
Exception: error
|
||||
",
|
||||
"line": undefined,
|
||||
"message": "Exception: error",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "test_error",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_always_skip",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "test_expected_failure",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "TestAcme-20251114214921",
|
||||
"totalTime": 1,
|
||||
},
|
||||
],
|
||||
"totalTime": 1,
|
||||
}
|
||||
`;
|
||||
@@ -1,4 +1,4 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`rspec-json tests report from ./reports/rspec-json test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`swift-xunit tests report from swift test results matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
@@ -25,7 +25,7 @@ TestRunResult {
|
||||
"error": {
|
||||
"details": undefined,
|
||||
"line": undefined,
|
||||
"message": undefined,
|
||||
"message": "failed",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "test_always_fail",
|
||||
|
||||
485
__tests__/__snapshots__/tester-junit.test.ts.snap
Normal file
485
__tests__/__snapshots__/tester-junit.test.ts.snap
Normal file
@@ -0,0 +1,485 @@
|
||||
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
||||
|
||||
exports[`tester-junit tests parses complex test names from BootstrapFormRenderer-report.xml 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/nette-tester/BootstrapFormRenderer-report.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "KdybyTests/BootstrapFormRenderer",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "BootstrapRendererTest.phpt::testRenderingBasics",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "BootstrapRendererTest.phpt::testRenderingIndividual",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "BootstrapRendererTest.phpt::testRenderingComponents",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "BootstrapRendererTest.phpt::testMultipleFormsInTemplate",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "BootstrapFormRenderer-report.xml",
|
||||
"totalTime": 300,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`tester-junit tests report from tester-v1.7-report.xml matches snapshot 1`] = `
|
||||
TestRunResult {
|
||||
"path": "fixtures/nette-tester/tester-v1.7-report.xml",
|
||||
"suites": [
|
||||
TestSuiteResult {
|
||||
"groups": [
|
||||
TestGroupResult {
|
||||
"name": "tests/Framework",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.toPhp.php7.phpt",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.contains.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.count.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.recursive.phpt::testSimple",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.recursive.phpt::testMultiple",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.recursive.phpt::testDeep",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.recursive.phpt::testCross",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.equal.recursive.phpt::testThirdParty",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.error.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.exception.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.false.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.match.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.match.regexp.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.nan.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.noError.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.same.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.null.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.true.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.truthy.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DataProvider.load.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Assert.type.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DataProvider.parseAnnotation.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DataProvider.testQuery.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DomQuery.css2Xpath.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DomQuery.fromHtml.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "DomQuery.fromXml.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.dumpException.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.color.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.toLine.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.toPhp.recursion.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Dumper.toPhp.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "FileMock.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Helpers.escapeArg.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Helpers.parseDocComment.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.annotationThrows.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.annotationThrows.setUp.tearDown.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.annotationThrows.syntax.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.basic.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.dataProvider.generator.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.dataProvider.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.invalidMethods.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.invalidProvider.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.order.error.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.order.errorMuted.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "TestCase.order.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Prevent loop in error handling. The #268 regression. (TestCase.ownErrorHandler.phpt)",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
TestGroupResult {
|
||||
"name": "tests/CodeCoverage",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Collector.start.phpt",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "PhpParser.parse.lines.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "PhpParser.parse.methods.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "CloverXMLGenerator.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "PhpParser.parse.edge.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "PhpParser.parse.lines-of-code.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "PhpParser.parse.namespaces.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
TestGroupResult {
|
||||
"name": "tests/Runner",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "CommandLine.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "HhvmPhpInterpreter.phpt",
|
||||
"result": "skipped",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Runner.find-tests.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Job.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "ZendPhpExecutable.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Runner.multiple.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Runner.edge.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Runner.stop-on-fail.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": {
|
||||
"details": "Failed: '... in /Users/izso/Developer/nette/tester/tests/Runner/multiple-fails/...' should match
|
||||
... '..., unexpected end of file in %a%testcase-syntax-error.phptx on line ...'
|
||||
|
||||
diff '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.expected' '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.actual'
|
||||
|
||||
in tests/Runner/Runner.multiple-fails.phpt(78) Tester\\Assert::match()",
|
||||
"line": undefined,
|
||||
"message": "Failed: '... in /Users/izso/Developer/nette/tester/tests/Runner/multiple-fails/...' should match
|
||||
... '..., unexpected end of file in %a%testcase-syntax-error.phptx on line ...'
|
||||
|
||||
diff '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.expected' '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.actual'
|
||||
|
||||
in tests/Runner/Runner.multiple-fails.phpt(78) Tester\\Assert::match()",
|
||||
"path": undefined,
|
||||
},
|
||||
"name": "Runner.multiple-fails.phpt",
|
||||
"result": "failed",
|
||||
"time": 0,
|
||||
},
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "Runner.annotations.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
TestGroupResult {
|
||||
"name": "tests/RunnerOutput",
|
||||
"tests": [
|
||||
TestCaseResult {
|
||||
"error": undefined,
|
||||
"name": "JUnitPrinter.phpt",
|
||||
"result": "success",
|
||||
"time": 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"name": "tester-v1.7-report.xml",
|
||||
"totalTime": 2100,
|
||||
},
|
||||
],
|
||||
"totalTime": undefined,
|
||||
}
|
||||
`;
|
||||
@@ -3,7 +3,7 @@ import * as path from 'path'
|
||||
|
||||
import {DartJsonParser} from '../src/parsers/dart-json/dart-json-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {getReport} from '../src/report/get-report'
|
||||
import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
describe('dart-json tests', () => {
|
||||
@@ -66,4 +66,66 @@ describe('dart-json tests', () => {
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report does not include a title by default', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'dart-json.json')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new DartJsonParser(opts, 'dart')
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result])
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it.each([
|
||||
['empty string', ''],
|
||||
['space', ' '],
|
||||
['tab', '\t'],
|
||||
['newline', '\n']
|
||||
])('report does not include a title when configured value is %s', async (_, reportTitle) => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'dart-json.json')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new DartJsonParser(opts, 'dart')
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle
|
||||
})
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it('report includes a custom report title', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'dart-json.json')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new DartJsonParser(opts, 'dart')
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle: 'My Custom Title'
|
||||
})
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^# My Custom Title\n/)
|
||||
})
|
||||
})
|
||||
|
||||
91
__tests__/dotnet-nunit.test.ts
Normal file
91
__tests__/dotnet-nunit.test.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
|
||||
import {DotnetNunitParser} from '../src/parsers/dotnet-nunit/dotnet-nunit-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
describe('dotnet-nunit tests', () => {
|
||||
it('report from ./reports/dotnet test results matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'dotnet-nunit.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'dotnet-nunit.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: ['DotnetTests.Unit/Calculator.cs', 'DotnetTests.NUnitV3Tests/CalculatorTests.cs']
|
||||
}
|
||||
|
||||
const parser = new DotnetNunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report does not include a title by default', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'dotnet-nunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new DotnetNunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result])
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it.each([
|
||||
['empty string', ''],
|
||||
['space', ' '],
|
||||
['tab', '\t'],
|
||||
['newline', '\n']
|
||||
])('report does not include a title when configured value is %s', async (_, reportTitle) => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'dotnet-nunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new DotnetNunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle
|
||||
})
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it('report includes a custom report title', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'dotnet-nunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new DotnetNunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle: 'My Custom Title'
|
||||
})
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^# My Custom Title\n/)
|
||||
})
|
||||
})
|
||||
@@ -3,7 +3,7 @@ import * as path from 'path'
|
||||
|
||||
import {DotnetTrxParser} from '../src/parsers/dotnet-trx/dotnet-trx-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {getReport} from '../src/report/get-report'
|
||||
import {DEFAULT_OPTIONS, getReport, ReportOptions} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
describe('dotnet-trx tests', () => {
|
||||
@@ -23,9 +23,50 @@ describe('dotnet-trx tests', () => {
|
||||
expect(result.result).toBe('success')
|
||||
})
|
||||
|
||||
it('matches report snapshot', async () => {
|
||||
it('produces empty test run result when TestDefinitions is empty', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'empty', 'dotnet-trx-empty-test-definitions.trx')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new DotnetTrxParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result.tests).toBe(0)
|
||||
expect(result.result).toBe('success')
|
||||
})
|
||||
|
||||
it.each([['dotnet-trx'], ['dotnet-xunitv3']])('matches %s report snapshot', async reportName => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', `${reportName}.trx`)
|
||||
const outputPath = path.join(__dirname, '__outputs__', `${reportName}.md`)
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: [
|
||||
'DotnetTests.Unit/Calculator.cs',
|
||||
'DotnetTests.XUnitTests/CalculatorTests.cs',
|
||||
'DotnetTests.XUnitV3Tests/FixtureTests.cs'
|
||||
]
|
||||
//workDir: 'C:/Users/Michal/Workspace/dorny/test-check/reports/dotnet/'
|
||||
}
|
||||
|
||||
const parser = new DotnetTrxParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('matches report snapshot (only failed tests)', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'dotnet-trx.trx')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'dotnet-trx.md')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'dotnet-trx-only-failed.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
@@ -39,7 +80,12 @@ describe('dotnet-trx tests', () => {
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
const reportOptions: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
listSuites: 'all',
|
||||
listTests: 'failed'
|
||||
}
|
||||
const report = getReport([result], reportOptions)
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
@@ -83,4 +129,66 @@ describe('dotnet-trx tests', () => {
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report does not include a title by default', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'dotnet-trx.trx')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new DotnetTrxParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result])
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it.each([
|
||||
['empty string', ''],
|
||||
['space', ' '],
|
||||
['tab', '\t'],
|
||||
['newline', '\n']
|
||||
])('report does not include a title when configured value is %s', async (_, reportTitle) => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'dotnet-trx.trx')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new DotnetTrxParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle
|
||||
})
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it('report includes a custom report title', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'dotnet-trx.trx')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new DotnetTrxParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle: 'My Custom Title'
|
||||
})
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^# My Custom Title\n/)
|
||||
})
|
||||
})
|
||||
|
||||
112
__tests__/fixtures/dotnet-nunit.xml
Normal file
112
__tests__/fixtures/dotnet-nunit.xml
Normal file
@@ -0,0 +1,112 @@
|
||||
<?xml version="1.0" encoding="utf-8" standalone="no"?>
|
||||
<test-run id="0" runstate="Runnable" testcasecount="9" result="Failed" total="9" passed="3" failed="5" inconclusive="0" skipped="1" asserts="5" engine-version="3.12.0.0" clr-version="3.1.16" start-time="2021-06-28 20:23:41Z" end-time="2021-06-28 20:23:41Z" duration="0.230308">
|
||||
<command-line><![CDATA[C:\Users\Michal\.dotnet\tools\.store\nunit.consolerunner.netcore\3.12.0-beta2\nunit.consolerunner.netcore\3.12.0-beta2\tools\netcoreapp3.1\any\nunit3-console.dll reports/dotnet/DotnetTests.NUnitV3Tests/bin/Debug/netcoreapp3.1/DotnetTests.NUnitV3Tests.dll --result=__tests__/fixtures/dotnet-nunit.xml]]></command-line>
|
||||
<test-suite type="Assembly" id="1-1011" name="DotnetTests.NUnitV3Tests.dll" fullname="C:/Users/Michal/Workspace/dorny/test-reporter/reports/dotnet/DotnetTests.NUnitV3Tests/bin/Debug/netcoreapp3.1/DotnetTests.NUnitV3Tests.dll" runstate="Runnable" testcasecount="9" result="Failed" site="Child" start-time="2021-06-28T20:23:41.4594179Z" end-time="2021-06-28T20:23:41.5420313Z" duration="0.082553" total="9" passed="3" failed="5" warnings="0" inconclusive="0" skipped="1" asserts="5">
|
||||
<environment framework-version="3.13.2.0" clr-version="3.1.16" os-version="Microsoft Windows 10.0.19041" platform="Win32NT" cwd="C:\Users\Michal\Workspace\dorny\test-reporter" machine-name="DORNY-PC" user="Michal" user-domain="DORNY-PC" culture="sk-SK" uiculture="en-US" os-architecture="x64" />
|
||||
<settings>
|
||||
<setting name="DisposeRunners" value="True" />
|
||||
<setting name="WorkDirectory" value="C:\Users\Michal\Workspace\dorny\test-reporter" />
|
||||
<setting name="NumberOfTestWorkers" value="4" />
|
||||
</settings>
|
||||
<properties>
|
||||
<property name="_PID" value="30996" />
|
||||
<property name="_APPDOMAIN" value="nunit3-console" />
|
||||
</properties>
|
||||
<failure>
|
||||
<message><![CDATA[One or more child tests had errors]]></message>
|
||||
</failure>
|
||||
<test-suite type="TestSuite" id="1-1012" name="DotnetTests" fullname="DotnetTests" runstate="Runnable" testcasecount="9" result="Failed" site="Child" start-time="2021-06-28T20:23:41.4647482Z" end-time="2021-06-28T20:23:41.5420271Z" duration="0.077277" total="9" passed="3" failed="5" warnings="0" inconclusive="0" skipped="1" asserts="5">
|
||||
<failure>
|
||||
<message><![CDATA[One or more child tests had errors]]></message>
|
||||
</failure>
|
||||
<test-suite type="TestSuite" id="1-1013" name="XUnitTests" fullname="DotnetTests.XUnitTests" runstate="Runnable" testcasecount="9" result="Failed" site="Child" start-time="2021-06-28T20:23:41.4649710Z" end-time="2021-06-28T20:23:41.5420231Z" duration="0.077053" total="9" passed="3" failed="5" warnings="0" inconclusive="0" skipped="1" asserts="5">
|
||||
<failure>
|
||||
<message><![CDATA[One or more child tests had errors]]></message>
|
||||
</failure>
|
||||
<test-suite type="TestFixture" id="1-1000" name="CalculatorTests" fullname="DotnetTests.XUnitTests.CalculatorTests" classname="DotnetTests.XUnitTests.CalculatorTests" runstate="Runnable" testcasecount="9" result="Failed" site="Child" start-time="2021-06-28T20:23:41.4661195Z" end-time="2021-06-28T20:23:41.5420143Z" duration="0.075896" total="9" passed="3" failed="5" warnings="0" inconclusive="0" skipped="1" asserts="5">
|
||||
<failure>
|
||||
<message><![CDATA[One or more child tests had errors]]></message>
|
||||
</failure>
|
||||
<test-case id="1-1004" name="Exception_In_TargetTest" fullname="DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest" methodname="Exception_In_TargetTest" classname="DotnetTests.XUnitTests.CalculatorTests" runstate="Runnable" seed="2033520428" result="Failed" label="Error" start-time="2021-06-28T20:23:41.4684284Z" end-time="2021-06-28T20:23:41.4911288Z" duration="0.022805" asserts="0">
|
||||
<failure>
|
||||
<message><![CDATA[System.DivideByZeroException : Attempted to divide by zero.]]></message>
|
||||
<stack-trace><![CDATA[ at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.Unit\Calculator.cs:line 9
|
||||
at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.NUnitV3Tests\CalculatorTests.cs:line 33]]></stack-trace>
|
||||
</failure>
|
||||
</test-case>
|
||||
<test-case id="1-1005" name="Exception_In_Test" fullname="DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test" methodname="Exception_In_Test" classname="DotnetTests.XUnitTests.CalculatorTests" runstate="Runnable" seed="145176317" result="Failed" label="Error" start-time="2021-06-28T20:23:41.4930398Z" end-time="2021-06-28T20:23:41.4935666Z" duration="0.000528" asserts="0">
|
||||
<failure>
|
||||
<message><![CDATA[System.Exception : Test]]></message>
|
||||
<stack-trace><![CDATA[ at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.NUnitV3Tests\CalculatorTests.cs:line 39]]></stack-trace>
|
||||
</failure>
|
||||
</test-case>
|
||||
<test-case id="1-1003" name="Failing_Test" fullname="DotnetTests.XUnitTests.CalculatorTests.Failing_Test" methodname="Failing_Test" classname="DotnetTests.XUnitTests.CalculatorTests" runstate="Runnable" seed="189717168" result="Failed" start-time="2021-06-28T20:23:41.4935910Z" end-time="2021-06-28T20:23:41.5217516Z" duration="0.028162" asserts="1">
|
||||
<failure>
|
||||
<message><![CDATA[ Expected: 3
|
||||
But was: 2
|
||||
]]></message>
|
||||
<stack-trace><![CDATA[ at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.NUnitV3Tests\CalculatorTests.cs:line 27
|
||||
]]></stack-trace>
|
||||
</failure>
|
||||
<assertions>
|
||||
<assertion result="Failed">
|
||||
<message><![CDATA[ Expected: 3
|
||||
But was: 2
|
||||
]]></message>
|
||||
<stack-trace><![CDATA[ at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.NUnitV3Tests\CalculatorTests.cs:line 27
|
||||
]]></stack-trace>
|
||||
</assertion>
|
||||
</assertions>
|
||||
</test-case>
|
||||
<test-suite type="Theory" id="1-1010" name="Is_Even_Number" fullname="DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number" classname="DotnetTests.XUnitTests.CalculatorTests" runstate="Runnable" testcasecount="2" result="Failed" site="Child" start-time="2021-06-28T20:23:41.5217837Z" end-time="2021-06-28T20:23:41.5251025Z" duration="0.003318" total="2" passed="1" failed="1" warnings="0" inconclusive="0" skipped="0" asserts="2">
|
||||
<properties>
|
||||
<property name="_JOINTYPE" value="Combinatorial" />
|
||||
</properties>
|
||||
<failure>
|
||||
<message><![CDATA[One or more child tests had errors]]></message>
|
||||
</failure>
|
||||
<test-case id="1-1008" name="Is_Even_Number(2)" fullname="DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(2)" methodname="Is_Even_Number" classname="DotnetTests.XUnitTests.CalculatorTests" runstate="Runnable" seed="2002556739" result="Passed" start-time="2021-06-28T20:23:41.5222381Z" end-time="2021-06-28T20:23:41.5228607Z" duration="0.000622" asserts="1" />
|
||||
<test-case id="1-1009" name="Is_Even_Number(3)" fullname="DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(3)" methodname="Is_Even_Number" classname="DotnetTests.XUnitTests.CalculatorTests" runstate="Runnable" seed="1722214143" result="Failed" start-time="2021-06-28T20:23:41.5228803Z" end-time="2021-06-28T20:23:41.5239781Z" duration="0.001098" asserts="1">
|
||||
<failure>
|
||||
<message><![CDATA[ Expected: True
|
||||
But was: False
|
||||
]]></message>
|
||||
<stack-trace><![CDATA[ at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.NUnitV3Tests\CalculatorTests.cs:line 61
|
||||
]]></stack-trace>
|
||||
</failure>
|
||||
<assertions>
|
||||
<assertion result="Failed">
|
||||
<message><![CDATA[ Expected: True
|
||||
But was: False
|
||||
]]></message>
|
||||
<stack-trace><![CDATA[ at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.NUnitV3Tests\CalculatorTests.cs:line 61
|
||||
]]></stack-trace>
|
||||
</assertion>
|
||||
</assertions>
|
||||
</test-case>
|
||||
</test-suite>
|
||||
<test-case id="1-1001" name="Passing_Test" fullname="DotnetTests.XUnitTests.CalculatorTests.Passing_Test" methodname="Passing_Test" classname="DotnetTests.XUnitTests.CalculatorTests" runstate="Runnable" seed="550330290" result="Passed" start-time="2021-06-28T20:23:41.5260365Z" end-time="2021-06-28T20:23:41.5262756Z" duration="0.000238" asserts="1" />
|
||||
<test-case id="1-1002" name="Passing_Test_With_Description" fullname="DotnetTests.XUnitTests.CalculatorTests.Passing_Test_With_Description" methodname="Passing_Test_With_Description" classname="DotnetTests.XUnitTests.CalculatorTests" runstate="Runnable" seed="1693317298" result="Passed" start-time="2021-06-28T20:23:41.5263998Z" end-time="2021-06-28T20:23:41.5265354Z" duration="0.000135" asserts="1">
|
||||
<properties>
|
||||
<property name="Description" value="Some description" />
|
||||
</properties>
|
||||
</test-case>
|
||||
<test-case id="1-1007" name="Skipped_Test" fullname="DotnetTests.XUnitTests.CalculatorTests.Skipped_Test" methodname="Skipped_Test" classname="DotnetTests.XUnitTests.CalculatorTests" runstate="Ignored" seed="1512653931" result="Skipped" label="Ignored" start-time="2021-06-28T20:23:41.5265550Z" end-time="2021-06-28T20:23:41.5269525Z" duration="0.000398" asserts="0">
|
||||
<properties>
|
||||
<property name="_SKIPREASON" value="Skipped" />
|
||||
</properties>
|
||||
<reason>
|
||||
<message><![CDATA[Skipped]]></message>
|
||||
</reason>
|
||||
</test-case>
|
||||
<test-case id="1-1006" name="Timeout_Test" fullname="DotnetTests.XUnitTests.CalculatorTests.Timeout_Test" methodname="Timeout_Test" classname="DotnetTests.XUnitTests.CalculatorTests" runstate="Runnable" seed="258810529" result="Failed" label="Test exceeded Timeout value 1ms." start-time="2021-06-28T20:23:41.5269651Z" end-time="2021-06-28T20:23:41.5419118Z" duration="0.014949" asserts="0">
|
||||
<properties>
|
||||
<property name="Timeout" value="1" />
|
||||
</properties>
|
||||
<failure />
|
||||
</test-case>
|
||||
</test-suite>
|
||||
</test-suite>
|
||||
</test-suite>
|
||||
</test-suite>
|
||||
</test-run>
|
||||
60
__tests__/fixtures/dotnet-xunitv3.trx
Normal file
60
__tests__/fixtures/dotnet-xunitv3.trx
Normal file
@@ -0,0 +1,60 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<TestRun id="54e29175-539e-48a3-a634-3a1855a0ed38" name="@Asterix 2025-06-22 14:17:12.022" xmlns="http://microsoft.com/schemas/VisualStudio/TeamTest/2010">
|
||||
<Times creation="2025-06-22T14:17:11.756535Z" queuing="2025-06-22T14:17:11.756535Z" start="2025-06-22T14:17:11.756535Z" finish="2025-06-22T14:17:12.023063Z" />
|
||||
<TestSettings name="default" id="932e6c6f-3e5b-4392-ad65-e04c1ef476b5">
|
||||
<Deployment runDeploymentRoot="_Asterix_2025-06-22_14_17_12.022" />
|
||||
</TestSettings>
|
||||
<Results>
|
||||
<UnitTestResult executionId="37242a1f-ca3e-44b3-8142-71e510480975" testId="f846a1e6-0b68-2ac6-9a66-f417926e3238" testName="DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test" computerName="Asterix" duration="00:00:00.0170545" startTime="2025-06-22T14:17:11.9339840+00:00" endTime="2025-06-22T14:17:11.9750850+00:00" testType="13CDC9D9-DDB5-4fa4-A97D-D965CCFC6D4B" outcome="Failed" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" relativeResultsDirectory="37242a1f-ca3e-44b3-8142-71e510480975">
|
||||
<Output>
|
||||
<ErrorInfo>
|
||||
<Message>Assert.Null() Failure: Value is not null
|
||||
Expected: null
|
||||
Actual: Fixture { }</Message>
|
||||
<StackTrace> at DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test() in /_/reports/dotnet/DotnetTests.XUnitV3Tests/FixtureTests.cs:line 25
|
||||
at System.RuntimeMethodHandle.InvokeMethod(Object target, Void** arguments, Signature sig, Boolean isConstructor)
|
||||
at System.Reflection.MethodBaseInvoker.InvokeWithNoArgs(Object obj, BindingFlags invokeAttr)</StackTrace>
|
||||
</ErrorInfo>
|
||||
</Output>
|
||||
</UnitTestResult>
|
||||
<UnitTestResult executionId="592aaafb-4dc0-49dc-b3c7-bcd81218d58a" testId="3ee930dd-8a75-92a0-0d90-373833166db1" testName="DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test" computerName="Asterix" duration="00:00:00.0008786" startTime="2025-06-22T14:17:11.9819890+00:00" endTime="2025-06-22T14:17:11.9833560+00:00" testType="13CDC9D9-DDB5-4fa4-A97D-D965CCFC6D4B" outcome="Passed" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" relativeResultsDirectory="592aaafb-4dc0-49dc-b3c7-bcd81218d58a" />
|
||||
<UnitTestResult executionId="19c42d36-f4d7-4046-bcc6-dd9b85c9ca2b" testId="372fb60f-1f5b-a52e-032e-41a7556021e8" testName="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test)]" computerName="Asterix" duration="00:00:00" startTime="2025-06-22T14:17:12.0320280+00:00" endTime="2025-06-22T14:17:12.0320290+00:00" testType="13CDC9D9-DDB5-4fa4-A97D-D965CCFC6D4B" outcome="Failed" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" relativeResultsDirectory="19c42d36-f4d7-4046-bcc6-dd9b85c9ca2b" />
|
||||
<UnitTestResult executionId="b7f40170-1e2c-45ce-b5e4-5bf49fd4c360" testId="a69083a1-56b4-3da3-2d7c-66fda374fd8e" testName="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test)]" computerName="Asterix" duration="00:00:00" startTime="2025-06-22T14:17:12.0320420+00:00" endTime="2025-06-22T14:17:12.0320430+00:00" testType="13CDC9D9-DDB5-4fa4-A97D-D965CCFC6D4B" outcome="Failed" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" relativeResultsDirectory="b7f40170-1e2c-45ce-b5e4-5bf49fd4c360" />
|
||||
</Results>
|
||||
<TestDefinitions>
|
||||
<UnitTest name="DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test" storage="~/test-reporter/reports/dotnet/dotnettests.xunitv3tests/bin/debug/net8.0/dotnettests.xunitv3tests.dll" id="f846a1e6-0b68-2ac6-9a66-f417926e3238">
|
||||
<Execution id="37242a1f-ca3e-44b3-8142-71e510480975" />
|
||||
<TestMethod codeBase="~/test-reporter/reports/dotnet/DotnetTests.XUnitV3Tests/bin/Debug/net8.0/DotnetTests.XUnitV3Tests.dll" adapterTypeName="executor://30ea7c6e-dd24-4152-a360-1387158cd41d/2.0.3" className="DotnetTests.XUnitV3Tests.FixtureTests" name="DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test" />
|
||||
</UnitTest>
|
||||
<UnitTest name="DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test" storage="~/test-reporter/reports/dotnet/dotnettests.xunitv3tests/bin/debug/net8.0/dotnettests.xunitv3tests.dll" id="3ee930dd-8a75-92a0-0d90-373833166db1">
|
||||
<Execution id="592aaafb-4dc0-49dc-b3c7-bcd81218d58a" />
|
||||
<TestMethod codeBase="~/test-reporter/reports/dotnet/DotnetTests.XUnitV3Tests/bin/Debug/net8.0/DotnetTests.XUnitV3Tests.dll" adapterTypeName="executor://30ea7c6e-dd24-4152-a360-1387158cd41d/2.0.3" className="DotnetTests.XUnitV3Tests.FixtureTests" name="DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test" />
|
||||
</UnitTest>
|
||||
<UnitTest name="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test)]" storage="~/test-reporter/reports/dotnet/dotnettests.xunitv3tests/bin/debug/net8.0/dotnettests.xunitv3tests.dll" id="372fb60f-1f5b-a52e-032e-41a7556021e8">
|
||||
<Execution id="19c42d36-f4d7-4046-bcc6-dd9b85c9ca2b" />
|
||||
<TestMethod codeBase="~/test-reporter/reports/dotnet/DotnetTests.XUnitV3Tests/bin/Debug/net8.0/DotnetTests.XUnitV3Tests.dll" adapterTypeName="executor://30ea7c6e-dd24-4152-a360-1387158cd41d/2.0.3" name="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Passing_Test)]" />
|
||||
</UnitTest>
|
||||
<UnitTest name="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test)]" storage="~/test-reporter/reports/dotnet/dotnettests.xunitv3tests/bin/debug/net8.0/dotnettests.xunitv3tests.dll" id="a69083a1-56b4-3da3-2d7c-66fda374fd8e">
|
||||
<Execution id="b7f40170-1e2c-45ce-b5e4-5bf49fd4c360" />
|
||||
<TestMethod codeBase="~/test-reporter/reports/dotnet/DotnetTests.XUnitV3Tests/bin/Debug/net8.0/DotnetTests.XUnitV3Tests.dll" adapterTypeName="executor://30ea7c6e-dd24-4152-a360-1387158cd41d/2.0.3" name="[Test Class Cleanup Failure (DotnetTests.XUnitV3Tests.FixtureTests.Failing_Test)]" />
|
||||
</UnitTest>
|
||||
</TestDefinitions>
|
||||
<TestEntries>
|
||||
<TestEntry testId="f846a1e6-0b68-2ac6-9a66-f417926e3238" executionId="37242a1f-ca3e-44b3-8142-71e510480975" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" />
|
||||
<TestEntry testId="3ee930dd-8a75-92a0-0d90-373833166db1" executionId="592aaafb-4dc0-49dc-b3c7-bcd81218d58a" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" />
|
||||
<TestEntry testId="372fb60f-1f5b-a52e-032e-41a7556021e8" executionId="19c42d36-f4d7-4046-bcc6-dd9b85c9ca2b" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" />
|
||||
<TestEntry testId="a69083a1-56b4-3da3-2d7c-66fda374fd8e" executionId="b7f40170-1e2c-45ce-b5e4-5bf49fd4c360" testListId="8C84FA94-04C1-424b-9868-57A2D4851A1D" />
|
||||
</TestEntries>
|
||||
<TestLists>
|
||||
<TestList name="Results Not in a List" id="8C84FA94-04C1-424b-9868-57A2D4851A1D" />
|
||||
<TestList name="All Loaded Results" id="19431567-8539-422a-85d7-44ee4e166bda" />
|
||||
</TestLists>
|
||||
<ResultSummary outcome="Failed">
|
||||
<Counters total="4" executed="4" passed="1" failed="3" error="0" timeout="0" aborted="0" inconclusive="0" passedButRunAborted="0" notRunnable="0" notExecuted="0" disconnected="0" warning="0" completed="0" inProgress="0" pending="0" />
|
||||
<RunInfos>
|
||||
<RunInfo computerName="Asterix" outcome="Error" timestamp="2025-06-22T14:17:12.033401">
|
||||
<Text>Exit code indicates failure: '2'. Please refer to https://aka.ms/testingplatform/exitcodes for more information.</Text>
|
||||
</RunInfo>
|
||||
</RunInfos>
|
||||
</ResultSummary>
|
||||
</TestRun>
|
||||
@@ -0,0 +1,22 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<TestRun id="80e4c095-f726-4ab2-9441-416daa162672" name="..." runUser="..." xmlns="http://microsoft.com/schemas/VisualStudio/TeamTest/2010">
|
||||
<Times creation="2021-02-26T10:36:33.7131022+02:00" queuing="2021-02-26T10:36:33.7131029+02:00" start="2021-02-26T10:36:33.3278956+02:00" finish="2021-02-26T10:36:33.7139830+02:00" />
|
||||
<TestSettings name="default" id="863a1d8b-ee3b-45f9-86ee-1869bc4e889f">
|
||||
<Deployment runDeploymentRoot="..." />
|
||||
</TestSettings>
|
||||
<Results />
|
||||
<TestDefinitions />
|
||||
<TestEntries />
|
||||
<TestLists>
|
||||
<TestList name="Results Not in a List" id="8c84fa94-04c1-424b-9868-57a2d4851a1d" />
|
||||
<TestList name="All Loaded Results" id="19431567-8539-422a-85d7-44ee4e166bda" />
|
||||
</TestLists>
|
||||
<ResultSummary outcome="Completed">
|
||||
<Counters total="0" executed="0" passed="0" failed="0" error="0" timeout="0" aborted="0" inconclusive="0" passedButRunAborted="0" notRunnable="0" notExecuted="0" disconnected="0" warning="0" completed="0" inProgress="0" pending="0" />
|
||||
<RunInfos>
|
||||
<RunInfo computerName="..." outcome="Warning" timestamp="2021-02-26T10:36:33.6676104+02:00">
|
||||
<Text>No test is available in (...). Make sure that test discoverer & executors are registered and platform & framework version settings are appropriate and try again.</Text>
|
||||
</RunInfo>
|
||||
</RunInfos>
|
||||
</ResultSummary>
|
||||
</TestRun>
|
||||
2
__tests__/fixtures/empty/phpunit-empty.xml
Normal file
2
__tests__/fixtures/empty/phpunit-empty.xml
Normal file
@@ -0,0 +1,2 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites/>
|
||||
31
__tests__/fixtures/external/java/junit4-basic.xml
vendored
Normal file
31
__tests__/fixtures/external/java/junit4-basic.xml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
This is a basic JUnit-style XML example to highlight the basis structure.
|
||||
|
||||
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
|
||||
Testmo test management software - https://www.testmo.com/
|
||||
-->
|
||||
<testsuites time="15.682687">
|
||||
<testsuite name="Tests.Registration" time="6.605871">
|
||||
<testcase name="testCase1" classname="Tests.Registration" time="2.113871" />
|
||||
<testcase name="testCase2" classname="Tests.Registration" time="1.051" />
|
||||
<testcase name="testCase3" classname="Tests.Registration" time="3.441" />
|
||||
</testsuite>
|
||||
<testsuite name="Tests.Authentication" time="9.076816">
|
||||
<!-- Java JUni4 XML files does not nest <testsuite> elements -->
|
||||
<!--
|
||||
<testsuite name="Tests.Authentication.Login" time="4.356">
|
||||
<testcase name="testCase4" classname="Tests.Authentication.Login" time="2.244" />
|
||||
<testcase name="testCase5" classname="Tests.Authentication.Login" time="0.781" />
|
||||
<testcase name="testCase6" classname="Tests.Authentication.Login" time="1.331" />
|
||||
</testsuite>
|
||||
-->
|
||||
<testcase name="testCase7" classname="Tests.Authentication" time="2.508" />
|
||||
<testcase name="testCase8" classname="Tests.Authentication" time="1.230816" />
|
||||
<testcase name="testCase9" classname="Tests.Authentication" time="0.982">
|
||||
<failure message="Assertion error message" type="AssertionError">
|
||||
<!-- Call stack printed here -->
|
||||
</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
141
__tests__/fixtures/external/java/junit4-complete.xml
vendored
Normal file
141
__tests__/fixtures/external/java/junit4-complete.xml
vendored
Normal file
@@ -0,0 +1,141 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
This is a JUnit-style XML example with commonly used tags and attributes.
|
||||
|
||||
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
|
||||
Testmo test management software - https://www.testmo.com/
|
||||
-->
|
||||
|
||||
<!-- <testsuites> Usually the root element of a JUnit XML file. Some tools leave out
|
||||
the <testsuites> element if there is only a single top-level <testsuite> element (which
|
||||
is then used as the root element).
|
||||
|
||||
name Name of the entire test run
|
||||
tests Total number of tests in this file
|
||||
failures Total number of failed tests in this file
|
||||
errors Total number of errored tests in this file
|
||||
skipped Total number of skipped tests in this file
|
||||
assertions Total number of assertions for all tests in this file
|
||||
time Aggregated time of all tests in this file in seconds
|
||||
timestamp Date and time of when the test run was executed (in ISO 8601 format)
|
||||
-->
|
||||
<testsuites name="Test run" tests="8" failures="1" errors="1" skipped="1"
|
||||
assertions="20" time="16.082687" timestamp="2021-04-02T15:48:23">
|
||||
|
||||
<!-- <testsuite> A test suite usually represents a class, folder or group of tests.
|
||||
There can be many test suites in an XML file, and there can be test suites under other
|
||||
test suites.
|
||||
|
||||
name Name of the test suite (e.g. class name or folder name)
|
||||
tests Total number of tests in this suite
|
||||
failures Total number of failed tests in this suite
|
||||
errors Total number of errored tests in this suite
|
||||
skipped Total number of skipped tests in this suite
|
||||
assertions Total number of assertions for all tests in this suite
|
||||
time Aggregated time of all tests in this file in seconds
|
||||
timestamp Date and time of when the test suite was executed (in ISO 8601 format)
|
||||
file Source code file of this test suite
|
||||
-->
|
||||
<testsuite name="Tests.Registration" tests="8" failures="1" errors="1" skipped="1"
|
||||
assertions="20" time="16.082687" timestamp="2021-04-02T15:48:23"
|
||||
file="tests/registration.code">
|
||||
|
||||
<!-- <properties> Test suites (and test cases, see below) can have additional
|
||||
properties such as environment variables or version numbers. -->
|
||||
<properties>
|
||||
<!-- <property> Each property has a name and value. Some tools also support
|
||||
properties with text values instead of value attributes. -->
|
||||
<property name="version" value="1.774" />
|
||||
<property name="commit" value="ef7bebf" />
|
||||
<property name="browser" value="Google Chrome" />
|
||||
<property name="ci" value="https://github.com/actions/runs/1234" />
|
||||
<property name="config">
|
||||
Config line #1
|
||||
Config line #2
|
||||
Config line #3
|
||||
</property>
|
||||
</properties>
|
||||
|
||||
<!-- <system-out> Optionally data written to standard out for the suite.
|
||||
Also supported on a test case level, see below. -->
|
||||
<system-out>Data written to standard out.</system-out>
|
||||
|
||||
<!-- <system-err> Optionally data written to standard error for the suite.
|
||||
Also supported on a test case level, see below. -->
|
||||
<system-err>Data written to standard error.</system-err>
|
||||
|
||||
<!-- <testcase> There are one or more test cases in a test suite. A test passed
|
||||
if there isn't an additional result element (skipped, failure, error).
|
||||
|
||||
name The name of this test case, often the method name
|
||||
classname The name of the parent class/folder, often the same as the suite's name
|
||||
assertions Number of assertions checked during test case execution
|
||||
time Execution time of the test in seconds
|
||||
file Source code file of this test case
|
||||
line Source code line number of the start of this test case
|
||||
-->
|
||||
<testcase name="testCase1" classname="Tests.Registration" assertions="2"
|
||||
time="2.436" file="tests/registration.code" line="24" />
|
||||
<testcase name="testCase2" classname="Tests.Registration" assertions="6"
|
||||
time="1.534" file="tests/registration.code" line="62" />
|
||||
<testcase name="testCase3" classname="Tests.Registration" assertions="3"
|
||||
time="0.822" file="tests/registration.code" line="102" />
|
||||
|
||||
<!-- Example of a test case that was skipped -->
|
||||
<testcase name="testCase4" classname="Tests.Registration" assertions="0"
|
||||
time="0" file="tests/registration.code" line="164">
|
||||
<!-- <skipped> Indicates that the test was not executed. Can have an optional
|
||||
message describing why the test was skipped. -->
|
||||
<skipped message="Test was skipped." />
|
||||
</testcase>
|
||||
|
||||
<!-- Example of a test case that failed. -->
|
||||
<testcase name="testCase5" classname="Tests.Registration" assertions="2"
|
||||
time="2.902412" file="tests/registration.code" line="202">
|
||||
<!-- <failure> The test failed because one of the assertions/checks failed.
|
||||
Can have a message and failure type, often the assertion type or class. The text
|
||||
content of the element often includes the failure description or stack trace. -->
|
||||
<failure message="Expected value did not match." type="AssertionError">
|
||||
<!-- Failure description or stack trace -->
|
||||
</failure>
|
||||
</testcase>
|
||||
|
||||
<!-- Example of a test case that had errors. -->
|
||||
<testcase name="testCase6" classname="Tests.Registration" assertions="0"
|
||||
time="3.819" file="tests/registration.code" line="235">
|
||||
<!-- <error> The test had an unexpected error during execution. Can have a
|
||||
message and error type, often the exception type or class. The text
|
||||
content of the element often includes the error description or stack trace. -->
|
||||
<error message="Division by zero." type="ArithmeticError">
|
||||
<!-- Error description or stack trace -->
|
||||
</error>
|
||||
</testcase>
|
||||
|
||||
<!-- Example of a test case with outputs. -->
|
||||
<testcase name="testCase7" classname="Tests.Registration" assertions="3"
|
||||
time="2.944" file="tests/registration.code" line="287">
|
||||
<!-- <system-out> Optional data written to standard out for the test case. -->
|
||||
<system-out>Data written to standard out.</system-out>
|
||||
|
||||
<!-- <system-err> Optional data written to standard error for the test case. -->
|
||||
<system-err>Data written to standard error.</system-err>
|
||||
</testcase>
|
||||
|
||||
<!-- Example of a test case with properties -->
|
||||
<testcase name="testCase8" classname="Tests.Registration" assertions="4"
|
||||
time="1.625275" file="tests/registration.code" line="302">
|
||||
<!-- <properties> Some tools also support properties for test cases. -->
|
||||
<properties>
|
||||
<property name="priority" value="high" />
|
||||
<property name="language" value="english" />
|
||||
<property name="author" value="Adrian" />
|
||||
<property name="attachment" value="screenshots/dashboard.png" />
|
||||
<property name="attachment" value="screenshots/users.png" />
|
||||
<property name="description">
|
||||
This text describes the purpose of this test case and provides
|
||||
an overview of what the test does and how it works.
|
||||
</property>
|
||||
</properties>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
125
__tests__/fixtures/external/nunit-sample.xml
vendored
Normal file
125
__tests__/fixtures/external/nunit-sample.xml
vendored
Normal file
@@ -0,0 +1,125 @@
|
||||
<?xml version="1.0" encoding="utf-8" standalone="no"?>
|
||||
<test-run id="2" name="mock-assembly.dll" fullname="D:\Dev\NUnit\nunit-3.0\work\bin\vs2008\Debug\mock-assembly.dll" testcasecount="25" result="Failed" time="0.154" total="18" passed="12" failed="2" inconclusive="1" skipped="3" asserts="2" run-date="2011-07-26" start-time="11:34:27">
|
||||
<environment nunit-version="1.0.0.0" clr-version="2.0.50727.4961" os-version="Microsoft Windows NT 6.1.7600.0" platform="Win32NT" cwd="D:\Dev\NUnit\nunit-3.0\work\bin\vs2008\Debug" machine-name="CHARLIE-LAPTOP" user="charlie" user-domain="charlie-laptop" culture="en-US" uiculture="en-US" />
|
||||
<test-suite type="Assembly" id="1036" name="mock-assembly.dll" fullname="D:\Dev\NUnit\nunit-3.0\work\bin\vs2008\Debug\mock-assembly.dll" testcasecount="25" result="Failed" time="0.154" total="18" passed="12" failed="2" inconclusive="1" skipped="3" asserts="2">
|
||||
<properties>
|
||||
<property name="_PID" value="11928" />
|
||||
<property name="_APPDOMAIN" value="test-domain-mock-assembly.dll" />
|
||||
</properties>
|
||||
<failure>
|
||||
<message><![CDATA[Child test failed]]></message>
|
||||
</failure>
|
||||
<test-suite type="TestFixture" id="1000" name="MockTestFixture" fullname="NUnit.Tests.Assemblies.MockTestFixture" testcasecount="11" result="Failed" time="0.119" total="10" passed="4" failed="2" inconclusive="1" skipped="3" asserts="0">
|
||||
<properties>
|
||||
<property name="Category" value="FixtureCategory" />
|
||||
<property name="Description" value="Fake Test Fixture" />
|
||||
</properties>
|
||||
<failure>
|
||||
<message><![CDATA[Child test failed]]></message>
|
||||
</failure>
|
||||
<test-case id="1005" name="FailingTest" fullname="NUnit.Tests.Assemblies.MockTestFixture.FailingTest" result="Failed" time="0.023" asserts="0">
|
||||
<failure>
|
||||
<message><![CDATA[Intentional failure]]></message>
|
||||
<stack-trace><![CDATA[ at NUnit.Framework.Assert.Fail(String message, Object[] args) in D:\Dev\NUnit\nunit-3.0\work\NUnitFramework\src\framework\Assert.cs:line 142
|
||||
at NUnit.Framework.Assert.Fail(String message) in D:\Dev\NUnit\nunit-3.0\work\NUnitFramework\src\framework\Assert.cs:line 152
|
||||
at NUnit.Tests.Assemblies.MockTestFixture.FailingTest() in D:\Dev\NUnit\nunit-3.0\work\NUnitFramework\src\mock-assembly\MockAssembly.cs:line 121]]></stack-trace>
|
||||
</failure>
|
||||
</test-case>
|
||||
<test-case id="1010" name="InconclusiveTest" fullname="NUnit.Tests.Assemblies.MockTestFixture.InconclusiveTest" result="Inconclusive" time="0.001" asserts="0" />
|
||||
<test-case id="1001" name="MockTest1" fullname="NUnit.Tests.Assemblies.MockTestFixture.MockTest1" result="Passed" time="0.000" asserts="0">
|
||||
<properties>
|
||||
<property name="Description" value="Mock Test #1" />
|
||||
</properties>
|
||||
</test-case>
|
||||
<test-case id="1002" name="MockTest2" fullname="NUnit.Tests.Assemblies.MockTestFixture.MockTest2" result="Passed" time="0.000" asserts="0">
|
||||
<properties>
|
||||
<property name="Severity" value="Critical" />
|
||||
<property name="Description" value="This is a really, really, really, really, really, really, really, really, really, really, really, really, really, really, really, really, really, really, really, really, really, really, really, really, really long description" />
|
||||
<property name="Category" value="MockCategory" />
|
||||
</properties>
|
||||
</test-case>
|
||||
<test-case id="1003" name="MockTest3" fullname="NUnit.Tests.Assemblies.MockTestFixture.MockTest3" result="Passed" time="0.000" asserts="0">
|
||||
<properties>
|
||||
<property name="Category" value="AnotherCategory" />
|
||||
<property name="Category" value="MockCategory" />
|
||||
</properties>
|
||||
</test-case>
|
||||
<test-case id="1007" name="MockTest4" fullname="NUnit.Tests.Assemblies.MockTestFixture.MockTest4" result="Skipped" label="Ignored" time="0.000" asserts="0">
|
||||
<properties>
|
||||
<property name="Category" value="Foo" />
|
||||
<property name="_SKIPREASON" value="ignoring this test method for now" />
|
||||
</properties>
|
||||
<reason>
|
||||
<message><![CDATA[ignoring this test method for now]]></message>
|
||||
</reason>
|
||||
</test-case>
|
||||
<test-case id="1004" name="MockTest5" fullname="NUnit.Tests.Assemblies.MockTestFixture.MockTest5" result="Skipped" label="Invalid" time="0.000" asserts="0">
|
||||
<properties>
|
||||
<property name="_SKIPREASON" value="Method is not public" />
|
||||
</properties>
|
||||
<reason>
|
||||
<message><![CDATA[Method is not public]]></message>
|
||||
</reason>
|
||||
</test-case>
|
||||
<test-case id="1009" name="NotRunnableTest" fullname="NUnit.Tests.Assemblies.MockTestFixture.NotRunnableTest" result="Skipped" label="Invalid" time="0.000" asserts="0">
|
||||
<properties>
|
||||
<property name="_SKIPREASON" value="No arguments were provided" />
|
||||
</properties>
|
||||
<reason>
|
||||
<message><![CDATA[No arguments were provided]]></message>
|
||||
</reason>
|
||||
</test-case>
|
||||
<test-case id="1011" name="TestWithException" fullname="NUnit.Tests.Assemblies.MockTestFixture.TestWithException" result="Failed" label="Error" time="0.002" asserts="0">
|
||||
<failure>
|
||||
<message><![CDATA[System.ApplicationException : Intentional Exception]]></message>
|
||||
<stack-trace><![CDATA[ at NUnit.Tests.Assemblies.MockTestFixture.MethodThrowsException() in D:\Dev\NUnit\nunit-3.0\work\NUnitFramework\src\mock-assembly\MockAssembly.cs:line 158
|
||||
at NUnit.Tests.Assemblies.MockTestFixture.TestWithException() in D:\Dev\NUnit\nunit-3.0\work\NUnitFramework\src\mock-assembly\MockAssembly.cs:line 153]]></stack-trace>
|
||||
</failure>
|
||||
</test-case>
|
||||
<test-case id="1006" name="TestWithManyProperties" fullname="NUnit.Tests.Assemblies.MockTestFixture.TestWithManyProperties" result="Passed" time="0.000" asserts="0">
|
||||
<properties>
|
||||
<property name="TargetMethod" value="SomeClassName" />
|
||||
<property name="Size" value="5" />
|
||||
</properties>
|
||||
</test-case>
|
||||
</test-suite>
|
||||
<test-suite type="TestFixture" id="1023" name="BadFixture" fullname="NUnit.Tests.BadFixture" testcasecount="1" result="Skipped" label="Invalid" time="0.000" total="0" passed="0" failed="0" inconclusive="0" skipped="0" asserts="0">
|
||||
<properties>
|
||||
<property name="_SKIPREASON" value="No suitable constructor was found" />
|
||||
</properties>
|
||||
<reason>
|
||||
<message><![CDATA[No suitable constructor was found]]></message>
|
||||
</reason>
|
||||
</test-suite>
|
||||
<test-suite type="TestFixture" id="1025" name="FixtureWithTestCases" fullname="NUnit.Tests.FixtureWithTestCases" testcasecount="2" result="Passed" time="0.010" total="2" passed="2" failed="0" inconclusive="0" skipped="0" asserts="2">
|
||||
<test-suite type="ParameterizedMethod" id="1026" name="MethodWithParameters" fullname="NUnit.Tests.FixtureWithTestCases.MethodWithParameters" testcasecount="2" result="Passed" time="0.009" total="2" passed="2" failed="0" inconclusive="0" skipped="0" asserts="2">
|
||||
<test-case id="1027" name="MethodWithParameters(2,2)" fullname="NUnit.Tests.FixtureWithTestCases.MethodWithParameters(2,2)" result="Passed" time="0.006" asserts="1" />
|
||||
<test-case id="1028" name="MethodWithParameters(9,11)" fullname="NUnit.Tests.FixtureWithTestCases.MethodWithParameters(9,11)" result="Passed" time="0.000" asserts="1" />
|
||||
</test-suite>
|
||||
</test-suite>
|
||||
<test-suite type="TestFixture" id="1016" name="IgnoredFixture" fullname="NUnit.Tests.IgnoredFixture" testcasecount="3" result="Skipped" label="Ignored" time="0.000" total="0" passed="0" failed="0" inconclusive="0" skipped="0" asserts="0">
|
||||
<properties>
|
||||
<property name="_SKIPREASON" value="" />
|
||||
</properties>
|
||||
<reason>
|
||||
<message><![CDATA[]]></message>
|
||||
</reason>
|
||||
</test-suite>
|
||||
<test-suite type="ParameterizedFixture" id="1029" name="ParameterizedFixture" fullname="NUnit.Tests.ParameterizedFixture" testcasecount="4" result="Passed" time="0.007" total="4" passed="4" failed="0" inconclusive="0" skipped="0" asserts="0">
|
||||
<test-suite type="TestFixture" id="1030" name="ParameterizedFixture(42)" fullname="NUnit.Tests.ParameterizedFixture(42)" testcasecount="2" result="Passed" time="0.003" total="2" passed="2" failed="0" inconclusive="0" skipped="0" asserts="0">
|
||||
<test-case id="1031" name="Test1" fullname="NUnit.Tests.ParameterizedFixture(42).Test1" result="Passed" time="0.000" asserts="0" />
|
||||
<test-case id="1032" name="Test2" fullname="NUnit.Tests.ParameterizedFixture(42).Test2" result="Passed" time="0.000" asserts="0" />
|
||||
</test-suite>
|
||||
<test-suite type="TestFixture" id="1033" name="ParameterizedFixture(5)" fullname="NUnit.Tests.ParameterizedFixture(5)" testcasecount="2" result="Passed" time="0.002" total="2" passed="2" failed="0" inconclusive="0" skipped="0" asserts="0">
|
||||
<test-case id="1034" name="Test1" fullname="NUnit.Tests.ParameterizedFixture(5).Test1" result="Passed" time="0.000" asserts="0" />
|
||||
<test-case id="1035" name="Test2" fullname="NUnit.Tests.ParameterizedFixture(5).Test2" result="Passed" time="0.000" asserts="0" />
|
||||
</test-suite>
|
||||
</test-suite>
|
||||
<test-suite type="TestFixture" id="1012" name="OneTestCase" fullname="NUnit.Tests.Singletons.OneTestCase" testcasecount="1" result="Passed" time="0.001" total="1" passed="1" failed="0" inconclusive="0" skipped="0" asserts="0">
|
||||
<test-case id="1013" name="TestCase" fullname="NUnit.Tests.Singletons.OneTestCase.TestCase" result="Passed" time="0.000" asserts="0" />
|
||||
</test-suite>
|
||||
<test-suite type="TestFixture" id="1014" name="MockTestFixture" fullname="NUnit.Tests.TestAssembly.MockTestFixture" testcasecount="1" result="Passed" time="0.001" total="1" passed="1" failed="0" inconclusive="0" skipped="0" asserts="0">
|
||||
<test-case id="1015" name="MyTest" fullname="NUnit.Tests.TestAssembly.MockTestFixture.MyTest" result="Passed" time="0.001" asserts="0" />
|
||||
</test-suite>
|
||||
</test-suite>
|
||||
</test-run>
|
||||
28
__tests__/fixtures/external/phpunit/junit-basic.xml
vendored
Normal file
28
__tests__/fixtures/external/phpunit/junit-basic.xml
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
This is a basic JUnit-style XML example to highlight the basis structure.
|
||||
|
||||
Example by Testmo. Copyright 2023 Testmo GmbH. All rights reserved.
|
||||
Testmo test management software - https://www.testmo.com/
|
||||
-->
|
||||
<testsuites time="15.682687">
|
||||
<testsuite name="Tests.Registration" time="6.605871">
|
||||
<testcase name="testCase1" classname="Tests.Registration" time="2.113871" />
|
||||
<testcase name="testCase2" classname="Tests.Registration" time="1.051" />
|
||||
<testcase name="testCase3" classname="Tests.Registration" time="3.441" />
|
||||
</testsuite>
|
||||
<testsuite name="Tests.Authentication" time="9.076816">
|
||||
<testsuite name="Tests.Authentication.Login" time="4.356">
|
||||
<testcase name="testCase4" classname="Tests.Authentication.Login" time="2.244" />
|
||||
<testcase name="testCase5" classname="Tests.Authentication.Login" time="0.781" />
|
||||
<testcase name="testCase6" classname="Tests.Authentication.Login" time="1.331" />
|
||||
</testsuite>
|
||||
<testcase name="testCase7" classname="Tests.Authentication" time="2.508" />
|
||||
<testcase name="testCase8" classname="Tests.Authentication" time="1.230816" />
|
||||
<testcase name="testCase9" classname="Tests.Authentication" time="0.982">
|
||||
<failure message="Assertion error message" type="AssertionError">
|
||||
<!-- Call stack printed here -->
|
||||
</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
212
__tests__/fixtures/external/phpunit/phpcheckstyle-phpunit.xml
vendored
Normal file
212
__tests__/fixtures/external/phpunit/phpcheckstyle-phpunit.xml
vendored
Normal file
@@ -0,0 +1,212 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites>
|
||||
<testsuite name="/workspace/phpcheckstyle/phpunit.xml" tests="30" assertions="117" errors="0" failures="2" skipped="0" time="0.041478">
|
||||
<testsuite name="PHPUnitTestSuite" tests="30" assertions="117" errors="0" failures="2" skipped="0" time="0.041478">
|
||||
<testsuite name="CommentsTest" file="/workspace/phpcheckstyle/test/CommentsTest.php" tests="3" assertions="12" errors="0" failures="0" skipped="0" time="0.006702">
|
||||
<testcase name="testGoodDoc" file="/workspace/phpcheckstyle/test/CommentsTest.php" line="12" class="CommentsTest" classname="CommentsTest" assertions="4" time="0.005093"/>
|
||||
<testcase name="testComments" file="/workspace/phpcheckstyle/test/CommentsTest.php" line="30" class="CommentsTest" classname="CommentsTest" assertions="4" time="0.000921">
|
||||
<system-out>File "./test/sample/bad_comments.php" warning, line 4 - Avoid Shell/Perl like comments.
|
||||
File "./test/sample/bad_comments.php" warning, line 6 - The class Comments must have a docblock comment.
|
||||
File "./test/sample/bad_comments.php" warning, line 10 - The function testComment must have a docblock comment.
|
||||
File "./test/sample/bad_comments.php" warning, line 18 - The function testComment returns a value and must include @returns in its docblock.
|
||||
File "./test/sample/bad_comments.php" warning, line 18 - The function testComment parameters must match those in its docblock @param.
|
||||
File "./test/sample/bad_comments.php" warning, line 18 - The function testComment throws an exception and must include @throws in its docblock.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testTODOs" file="/workspace/phpcheckstyle/test/CommentsTest.php" line="48" class="CommentsTest" classname="CommentsTest" assertions="4" time="0.000688">
|
||||
<system-out>File "./test/sample/todo.php" warning, line 3 - TODO: The todo message.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="DeprecationTest" file="/workspace/phpcheckstyle/test/DeprecationTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.000974">
|
||||
<testcase name="testDeprecations" file="/workspace/phpcheckstyle/test/DeprecationTest.php" line="12" class="DeprecationTest" classname="DeprecationTest" assertions="4" time="0.000974">
|
||||
<system-out>File "./test/sample/bad_deprecation.php" warning, line 17 - split is deprecated since PHP 5.3. explode($pattern, $string) or preg_split('@'.$pattern.'@', $string) must be used instead.
|
||||
File "./test/sample/bad_deprecation.php" warning, line 19 - ereg is deprecated since PHP 5.3. preg_match('@'.$pattern.'@', $string) must be used instead.
|
||||
File "./test/sample/bad_deprecation.php" warning, line 21 - session_register is deprecated since PHP 5.3. $_SESSION must be used instead.
|
||||
File "./test/sample/bad_deprecation.php" warning, line 23 - mysql_db_query is deprecated since PHP 5.3. mysql_select_db and mysql_query must be used instead.
|
||||
File "./test/sample/bad_deprecation.php" warning, line 25 - $HTTP_GET_VARS is deprecated since PHP 5.3. $_GET must be used instead.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="GoodTest" file="/workspace/phpcheckstyle/test/GoodTest.php" tests="4" assertions="16" errors="0" failures="0" skipped="0" time="0.005357">
|
||||
<testcase name="testGood" file="/workspace/phpcheckstyle/test/GoodTest.php" line="12" class="GoodTest" classname="GoodTest" assertions="4" time="0.002647"/>
|
||||
<testcase name="testDoWhile" file="/workspace/phpcheckstyle/test/GoodTest.php" line="32" class="GoodTest" classname="GoodTest" assertions="4" time="0.001022"/>
|
||||
<testcase name="testAnonymousFunction" file="/workspace/phpcheckstyle/test/GoodTest.php" line="50" class="GoodTest" classname="GoodTest" assertions="4" time="0.000800"/>
|
||||
<testcase name="testException" file="/workspace/phpcheckstyle/test/GoodTest.php" line="68" class="GoodTest" classname="GoodTest" assertions="4" time="0.000888"/>
|
||||
</testsuite>
|
||||
<testsuite name="IndentationTest" file="/workspace/phpcheckstyle/test/IndentationTest.php" tests="8" assertions="32" errors="0" failures="0" skipped="0" time="0.007654">
|
||||
<testcase name="testTabIndentation" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="12" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000857">
|
||||
<system-out>File "./test/sample/bad_indentation.php" warning, line 8 - Whitespace indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 15 - Whitespace indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 17 - Whitespace indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 18 - Whitespace indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 19 - Whitespace indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 20 - Whitespace indentation must not be used.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testSpaceIndentation" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="30" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000929">
|
||||
<system-out>File "./test/sample/bad_indentation.php" warning, line 10 - Tab indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 10 - The indentation level must be 4 but was 1.
|
||||
File "./test/sample/bad_indentation.php" warning, line 13 - Tab indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 13 - The indentation level must be 4 but was 1.
|
||||
File "./test/sample/bad_indentation.php" warning, line 15 - The indentation level must be 8 but was 4.
|
||||
File "./test/sample/bad_indentation.php" warning, line 16 - Tab indentation must not be used.
|
||||
File "./test/sample/bad_indentation.php" warning, line 16 - The indentation level must be 8 but was 1.
|
||||
File "./test/sample/bad_indentation.php" warning, line 17 - The indentation level must be 8 but was 3.
|
||||
File "./test/sample/bad_indentation.php" warning, line 18 - The indentation level must be 8 but was 5.
|
||||
File "./test/sample/bad_indentation.php" warning, line 19 - The indentation level must be 8 but was 6.
|
||||
File "./test/sample/bad_indentation.php" warning, line 20 - The indentation level must be 4 but was 1.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testSpaceIndentationArray" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="51" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000975">
|
||||
<system-out>File "./test/sample/bad_indentation_array.php" warning, line 10 - Tab indentation must not be used.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 10 - The indentation level must be 4 but was 1.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 13 - Tab indentation must not be used.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 13 - The indentation level must be 4 but was 1.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 16 - The indentation level must be 12 but was 8.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 24 - The indentation level must be 12 but was 8.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 29 - The indentation level must be 8 but was 12.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 15 - Undeclared or unused variable: $aVar.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 19 - Undeclared or unused variable: $bVar.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 23 - Undeclared or unused variable: $cVar.
|
||||
File "./test/sample/bad_indentation_array.php" warning, line 27 - Undeclared or unused variable: $dVar.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testGoodSpaceIndentationArray" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="72" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.001212"/>
|
||||
<testcase name="testGoodIndentationNewLine" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="93" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000859"/>
|
||||
<testcase name="testGoodIndentationSpaces" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="116" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000780"/>
|
||||
<testcase name="testBadSpaces" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="137" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.001120">
|
||||
<system-out>File "./test/sample/bad_spaces.php" warning, line 17 - Whitespace must follow ,.
|
||||
File "./test/sample/bad_spaces.php" warning, line 17 - Whitespace must precede {.
|
||||
File "./test/sample/bad_spaces.php" warning, line 19 - Whitespace must follow if.
|
||||
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must precede =.
|
||||
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must follow =.
|
||||
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must precede +.
|
||||
File "./test/sample/bad_spaces.php" warning, line 23 - Whitespace must follow +.
|
||||
File "./test/sample/bad_spaces.php" info, line 25 - Whitespace must not precede ,.
|
||||
File "./test/sample/bad_spaces.php" info, line 26 - Whitespace must not follow !.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testBadSpaceAfterControl" file="/workspace/phpcheckstyle/test/IndentationTest.php" line="155" class="IndentationTest" classname="IndentationTest" assertions="4" time="0.000922">
|
||||
<system-out>File "./test/sample/bad_space_after_control.php" warning, line 19 - Whitespace must not follow if.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="MetricsTest" file="/workspace/phpcheckstyle/test/MetricsTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.004147">
|
||||
<testcase name="testMetrics" file="/workspace/phpcheckstyle/test/MetricsTest.php" line="12" class="MetricsTest" classname="MetricsTest" assertions="4" time="0.004147">
|
||||
<system-out>File "./test/sample/bad_metrics.php" warning, line 21 - The function testMetrics's number of parameters (6) must not exceed 4.
|
||||
File "./test/sample/bad_metrics.php" info, line 55 - Line is too long. [233/160]
|
||||
File "./test/sample/bad_metrics.php" warning, line 21 - The Cyclomatic Complexity of function testMetrics is too high. [15/10]
|
||||
File "./test/sample/bad_metrics.php" warning, line 244 - The testMetrics function body length is too long. [223/200]
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="NamingTest" file="/workspace/phpcheckstyle/test/NamingTest.php" tests="2" assertions="8" errors="0" failures="0" skipped="0" time="0.002697">
|
||||
<testcase name="testNaming" file="/workspace/phpcheckstyle/test/NamingTest.php" line="12" class="NamingTest" classname="NamingTest" assertions="4" time="0.001426">
|
||||
<system-out>File "./test/sample/_bad_naming.php" error, line 11 - Constant _badly_named_constant name should follow the pattern /^[A-Z][A-Z0-9_]*$/.
|
||||
File "./test/sample/_bad_naming.php" error, line 13 - Constant bad_CONST name should follow the pattern /^[A-Z][A-Z0-9_]*$/.
|
||||
File "./test/sample/_bad_naming.php" warning, line 17 - Top level variable $XXX name should follow the pattern /^[a-z_][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" warning, line 20 - Variable x name length is too short.
|
||||
File "./test/sample/_bad_naming.php" error, line 28 - Class badlynamedclass name should follow the pattern /^[A-Z][a-zA-Z0-9_]*$/.
|
||||
File "./test/sample/_bad_naming.php" warning, line 32 - Member variable $YYY name should follow the pattern /^[a-z_][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" warning, line 37 - The constructor name must be __construct().
|
||||
File "./test/sample/_bad_naming.php" error, line 44 - Function Badlynamedfunction name should follow the pattern /^[a-z][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" warning, line 47 - Local variable $ZZZ name should follow the pattern /^[a-z_][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" error, line 54 - Protected function Badlynamedfunction2 name should follow the pattern /^[a-z][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" error, line 61 - Private function badlynamedfunction3 name should follow the pattern /^_[a-z][a-zA-Z0-9]*$/.
|
||||
File "./test/sample/_bad_naming.php" error, line 70 - Interface _badlynamedinterface name should follow the pattern /^[A-Z][a-zA-Z0-9_]*$/.
|
||||
File "./test/sample/_bad_naming.php" error, line 75 - File _bad_naming.php name should follow the pattern /^[a-zA-Z][a-zA-Z0-9._]*$/.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testFunctionNaming" file="/workspace/phpcheckstyle/test/NamingTest.php" line="32" class="NamingTest" classname="NamingTest" assertions="4" time="0.001271"/>
|
||||
</testsuite>
|
||||
<testsuite name="OptimizationTest" file="/workspace/phpcheckstyle/test/OptimizationTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.000994">
|
||||
<testcase name="testTextAfterClosingTag" file="/workspace/phpcheckstyle/test/OptimizationTest.php" line="12" class="OptimizationTest" classname="OptimizationTest" assertions="4" time="0.000994">
|
||||
<system-out>File "./test/sample/bad_optimisation.php" warning, line 18 - count function must not be used inside a loop.
|
||||
File "./test/sample/bad_optimisation.php" warning, line 23 - count function must not be used inside a loop.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="OtherTest" file="/workspace/phpcheckstyle/test/OtherTest.php" tests="4" assertions="13" errors="0" failures="2" skipped="0" time="0.007329">
|
||||
<testcase name="testOther" file="/workspace/phpcheckstyle/test/OtherTest.php" line="12" class="OtherTest" classname="OtherTest" assertions="4" time="0.005251">
|
||||
<failure type="PHPUnit\Framework\ExpectationFailedException">OtherTest::testOther
|
||||
We expect 20 warnings
|
||||
Failed asserting that 19 matches expected 20.
|
||||
|
||||
/workspace/phpcheckstyle/test/OtherTest.php:24</failure>
|
||||
<system-out>File "./test/sample/bad_other.php" warning, line 17 - All arguments with default values must be at the end of the block or statement.
|
||||
File "./test/sample/bad_other.php" warning, line 21 - Errors must not be silenced when calling a function.
|
||||
File "./test/sample/bad_other.php" warning, line 23 - Prefer single-quoted strings when you don't need string interpolation.
|
||||
File "./test/sample/bad_other.php" warning, line 23 - Encapsed variables must not be used inside a string.
|
||||
File "./test/sample/bad_other.php" warning, line 23 - Encapsed variables must not be used inside a string.
|
||||
File "./test/sample/bad_other.php" warning, line 23 - Prefer single-quoted strings when you don't need string interpolation.
|
||||
File "./test/sample/bad_other.php" warning, line 37 - TODO: Show todos
|
||||
File "./test/sample/bad_other.php" warning, line 40 - Avoid empty statements (;;).
|
||||
File "./test/sample/bad_other.php" warning, line 42 - Boolean operators (&&) must be used instead of logical operators (AND).
|
||||
File "./test/sample/bad_other.php" warning, line 42 - Empty if block.
|
||||
File "./test/sample/bad_other.php" warning, line 48 - Heredoc syntax must not be used.
|
||||
File "./test/sample/bad_other.php" warning, line 52 - The statement if must contain its code within a {} block.
|
||||
File "./test/sample/bad_other.php" warning, line 54 - Consider using a strict comparison operator instead of ==.
|
||||
File "./test/sample/bad_other.php" warning, line 54 - The statement while must contain its code within a {} block.
|
||||
File "./test/sample/bad_other.php" warning, line 66 - The switch statement must have a default case.
|
||||
File "./test/sample/bad_other.php" warning, line 79 - The default case of a switch statement must be located after all other cases.
|
||||
File "./test/sample/bad_other.php" warning, line 93 - Unary operators (++ or --) must not be used inside a control statement
|
||||
File "./test/sample/bad_other.php" warning, line 95 - Assigments (=) must not be used inside a control statement.
|
||||
File "./test/sample/bad_other.php" warning, line 106 - File ./test/sample/bad_other.php must not have multiple class declarations.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testException" file="/workspace/phpcheckstyle/test/OtherTest.php" line="31" class="OtherTest" classname="OtherTest" assertions="1" time="0.000751">
|
||||
<failure type="PHPUnit\Framework\ExpectationFailedException">OtherTest::testException
|
||||
We expect 1 error
|
||||
Failed asserting that 0 matches expected 1.
|
||||
|
||||
/workspace/phpcheckstyle/test/OtherTest.php:40</failure>
|
||||
</testcase>
|
||||
<testcase name="testEmpty" file="/workspace/phpcheckstyle/test/OtherTest.php" line="50" class="OtherTest" classname="OtherTest" assertions="4" time="0.000427">
|
||||
<system-out>File "./test/sample/empty.php" warning, line 1 - The file ./test/sample/empty.php is empty.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testSwitchCaseNeedBreak" file="/workspace/phpcheckstyle/test/OtherTest.php" line="69" class="OtherTest" classname="OtherTest" assertions="4" time="0.000901">
|
||||
<system-out>File "./test/sample/switch_multi_case.php" warning, line 10 - The case statement must contain a break.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="PHPTagsTest" file="/workspace/phpcheckstyle/test/PHPTagsTest.php" tests="2" assertions="8" errors="0" failures="0" skipped="0" time="0.001272">
|
||||
<testcase name="testTextAfterClosingTag" file="/workspace/phpcheckstyle/test/PHPTagsTest.php" line="12" class="PHPTagsTest" classname="PHPTagsTest" assertions="4" time="0.000641">
|
||||
<system-out>File "./test/sample/bad_php_tags_text_after_end.php" warning, line 9 - A PHP close tag must not be included at the end of the file.
|
||||
</system-out>
|
||||
</testcase>
|
||||
<testcase name="testClosingTagNotNeeded" file="/workspace/phpcheckstyle/test/PHPTagsTest.php" line="30" class="PHPTagsTest" classname="PHPTagsTest" assertions="4" time="0.000631">
|
||||
<system-out>File "./test/sample/bad_php_tags_end_not_needed.php" warning, line 1 - PHP tag should be at the beginning of the line.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="ProhibitedTest" file="/workspace/phpcheckstyle/test/ProhibitedTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.000938">
|
||||
<testcase name="testProhibited" file="/workspace/phpcheckstyle/test/ProhibitedTest.php" line="13" class="ProhibitedTest" classname="ProhibitedTest" assertions="4" time="0.000938">
|
||||
<system-out>File "./test/sample/bad_prohibited.php" warning, line 18 - The function exec must not be called.
|
||||
File "./test/sample/bad_prohibited.php" warning, line 20 - Token T_PRINT must not be used.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="StrictCompareTest" file="/workspace/phpcheckstyle/test/StrictCompareTest.php" tests="1" assertions="4" errors="0" failures="0" skipped="0" time="0.001578">
|
||||
<testcase name="testStrictCompare" file="/workspace/phpcheckstyle/test/StrictCompareTest.php" line="12" class="StrictCompareTest" classname="StrictCompareTest" assertions="4" time="0.001578">
|
||||
<system-out>File "./test/sample/bad_strictcompare.php" warning, line 14 - Consider using a strict comparison operator instead of ==.
|
||||
File "./test/sample/bad_strictcompare.php" warning, line 19 - Consider using a strict comparison operator instead of !=.
|
||||
File "./test/sample/bad_strictcompare.php" warning, line 24 - Consider using a strict comparison operator instead of ==.
|
||||
File "./test/sample/bad_strictcompare.php" warning, line 29 - Consider using a strict comparison operator instead of ==.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="UnusedTest" file="/workspace/phpcheckstyle/test/UnusedTest.php" tests="2" assertions="8" errors="0" failures="0" skipped="0" time="0.001835">
|
||||
<testcase name="testGoodUnused" file="/workspace/phpcheckstyle/test/UnusedTest.php" line="13" class="UnusedTest" classname="UnusedTest" assertions="4" time="0.000940"/>
|
||||
<testcase name="testBadUnused" file="/workspace/phpcheckstyle/test/UnusedTest.php" line="32" class="UnusedTest" classname="UnusedTest" assertions="4" time="0.000895">
|
||||
<system-out>File "./test/sample/bad_unused.php" warning, line 23 - Function _testUnused has unused code after RETURN.
|
||||
File "./test/sample/bad_unused.php" warning, line 27 - The function _testUnused parameter $b is not used.
|
||||
File "./test/sample/bad_unused.php" warning, line 18 - Unused private function: _testUnused.
|
||||
File "./test/sample/bad_unused.php" warning, line 20 - Undeclared or unused variable: $c.
|
||||
</system-out>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuite>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
59
__tests__/fixtures/golang-json.json
Normal file
59
__tests__/fixtures/golang-json.json
Normal file
@@ -0,0 +1,59 @@
|
||||
{"Time":"2025-04-22T08:59:55.364618802-05:00","Action":"start","Package":"_/home/james_t/git/test-reporter/reports/go"}
|
||||
{"Time":"2025-04-22T08:59:55.371779289-05:00","Action":"run","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestPassing"}
|
||||
{"Time":"2025-04-22T08:59:55.371805677-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestPassing","Output":"=== RUN TestPassing\n"}
|
||||
{"Time":"2025-04-22T08:59:55.428201983-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestPassing","Output":" calculator_test.go:11: pass!\n"}
|
||||
{"Time":"2025-04-22T08:59:55.428265529-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestPassing","Output":"--- PASS: TestPassing (0.06s)\n"}
|
||||
{"Time":"2025-04-22T08:59:55.428285649-05:00","Action":"pass","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestPassing","Elapsed":0.06}
|
||||
{"Time":"2025-04-22T08:59:55.428299886-05:00","Action":"run","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestFailing"}
|
||||
{"Time":"2025-04-22T08:59:55.428309029-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestFailing","Output":"=== RUN TestFailing\n"}
|
||||
{"Time":"2025-04-22T08:59:56.317425091-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestFailing","Output":" calculator_test.go:19: expected 1+1 = 3, got 2\n"}
|
||||
{"Time":"2025-04-22T08:59:56.31748077-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestFailing","Output":"--- FAIL: TestFailing (0.89s)\n"}
|
||||
{"Time":"2025-04-22T08:59:56.317493452-05:00","Action":"fail","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestFailing","Elapsed":0.89}
|
||||
{"Time":"2025-04-22T08:59:56.317506107-05:00","Action":"run","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestPanicInsideFunction"}
|
||||
{"Time":"2025-04-22T08:59:56.317514487-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestPanicInsideFunction","Output":"=== RUN TestPanicInsideFunction\n"}
|
||||
{"Time":"2025-04-22T08:59:56.317530448-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestPanicInsideFunction","Output":" calculator_test.go:76: caught panic: runtime error: integer divide by zero\n"}
|
||||
{"Time":"2025-04-22T08:59:56.317541866-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestPanicInsideFunction","Output":"--- FAIL: TestPanicInsideFunction (0.00s)\n"}
|
||||
{"Time":"2025-04-22T08:59:56.317552981-05:00","Action":"fail","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestPanicInsideFunction","Elapsed":0}
|
||||
{"Time":"2025-04-22T08:59:56.317561057-05:00","Action":"run","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestPanicInsideTest"}
|
||||
{"Time":"2025-04-22T08:59:56.317568742-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestPanicInsideTest","Output":"=== RUN TestPanicInsideTest\n"}
|
||||
{"Time":"2025-04-22T08:59:56.317584113-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestPanicInsideTest","Output":" calculator_test.go:76: caught panic: bad stuff\n"}
|
||||
{"Time":"2025-04-22T08:59:56.317598524-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestPanicInsideTest","Output":"--- FAIL: TestPanicInsideTest (0.00s)\n"}
|
||||
{"Time":"2025-04-22T08:59:56.317608268-05:00","Action":"fail","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestPanicInsideTest","Elapsed":0}
|
||||
{"Time":"2025-04-22T08:59:56.317615472-05:00","Action":"run","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestSkipped"}
|
||||
{"Time":"2025-04-22T08:59:56.317623959-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestSkipped","Output":"=== RUN TestSkipped\n"}
|
||||
{"Time":"2025-04-22T08:59:57.256475698-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestSkipped","Output":" calculator_test.go:45: skipping test\n"}
|
||||
{"Time":"2025-04-22T08:59:57.256536372-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestSkipped","Output":"--- SKIP: TestSkipped (0.94s)\n"}
|
||||
{"Time":"2025-04-22T08:59:57.256549142-05:00","Action":"skip","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestSkipped","Elapsed":0.94}
|
||||
{"Time":"2025-04-22T08:59:57.256562053-05:00","Action":"run","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases"}
|
||||
{"Time":"2025-04-22T08:59:57.256569388-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases","Output":"=== RUN TestCases\n"}
|
||||
{"Time":"2025-04-22T08:59:57.256580104-05:00","Action":"run","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/1_+_2_=_3"}
|
||||
{"Time":"2025-04-22T08:59:57.256587408-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/1_+_2_=_3","Output":"=== RUN TestCases/1_+_2_=_3\n"}
|
||||
{"Time":"2025-04-22T08:59:57.653005399-05:00","Action":"run","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/4_+_7_=_11"}
|
||||
{"Time":"2025-04-22T08:59:57.653036336-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/4_+_7_=_11","Output":"=== RUN TestCases/4_+_7_=_11\n"}
|
||||
{"Time":"2025-04-22T08:59:58.112825221-05:00","Action":"run","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/2_+_3_=_4"}
|
||||
{"Time":"2025-04-22T08:59:58.112858016-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/2_+_3_=_4","Output":"=== RUN TestCases/2_+_3_=_4\n"}
|
||||
{"Time":"2025-04-22T08:59:58.201204209-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/2_+_3_=_4","Output":" calculator_test.go:67: expected 2 + 3 = 4, got 5\n"}
|
||||
{"Time":"2025-04-22T08:59:58.201245827-05:00","Action":"run","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/1_/_2_=_1"}
|
||||
{"Time":"2025-04-22T08:59:58.201255566-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/1_/_2_=_1","Output":"=== RUN TestCases/1_/_2_=_1\n"}
|
||||
{"Time":"2025-04-22T08:59:59.119852965-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/1_/_2_=_1","Output":" calculator_test.go:67: expected 1 / 2 = 1, got 0\n"}
|
||||
{"Time":"2025-04-22T08:59:59.119877603-05:00","Action":"run","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/9_/_3_=_3"}
|
||||
{"Time":"2025-04-22T08:59:59.119879955-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/9_/_3_=_3","Output":"=== RUN TestCases/9_/_3_=_3\n"}
|
||||
{"Time":"2025-04-22T08:59:59.460576385-05:00","Action":"run","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/14_/_7_=_2"}
|
||||
{"Time":"2025-04-22T08:59:59.460607599-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/14_/_7_=_2","Output":"=== RUN TestCases/14_/_7_=_2\n"}
|
||||
{"Time":"2025-04-22T08:59:59.504952672-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases","Output":"--- FAIL: TestCases (2.25s)\n"}
|
||||
{"Time":"2025-04-22T08:59:59.504995938-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/1_+_2_=_3","Output":" --- PASS: TestCases/1_+_2_=_3 (0.40s)\n"}
|
||||
{"Time":"2025-04-22T08:59:59.505006062-05:00","Action":"pass","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/1_+_2_=_3","Elapsed":0.4}
|
||||
{"Time":"2025-04-22T08:59:59.505017551-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/4_+_7_=_11","Output":" --- PASS: TestCases/4_+_7_=_11 (0.46s)\n"}
|
||||
{"Time":"2025-04-22T08:59:59.505026099-05:00","Action":"pass","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/4_+_7_=_11","Elapsed":0.46}
|
||||
{"Time":"2025-04-22T08:59:59.505033963-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/2_+_3_=_4","Output":" --- FAIL: TestCases/2_+_3_=_4 (0.09s)\n"}
|
||||
{"Time":"2025-04-22T08:59:59.505042238-05:00","Action":"fail","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/2_+_3_=_4","Elapsed":0.09}
|
||||
{"Time":"2025-04-22T08:59:59.505050917-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/1_/_2_=_1","Output":" --- FAIL: TestCases/1_/_2_=_1 (0.92s)\n"}
|
||||
{"Time":"2025-04-22T08:59:59.505059901-05:00","Action":"fail","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/1_/_2_=_1","Elapsed":0.92}
|
||||
{"Time":"2025-04-22T08:59:59.505068125-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/9_/_3_=_3","Output":" --- PASS: TestCases/9_/_3_=_3 (0.34s)\n"}
|
||||
{"Time":"2025-04-22T08:59:59.505076976-05:00","Action":"pass","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/9_/_3_=_3","Elapsed":0.34}
|
||||
{"Time":"2025-04-22T08:59:59.5050845-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/14_/_7_=_2","Output":" --- PASS: TestCases/14_/_7_=_2 (0.04s)\n"}
|
||||
{"Time":"2025-04-22T08:59:59.505091554-05:00","Action":"pass","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases/14_/_7_=_2","Elapsed":0.04}
|
||||
{"Time":"2025-04-22T08:59:59.505098998-05:00","Action":"fail","Package":"_/home/james_t/git/test-reporter/reports/go","Test":"TestCases","Elapsed":2.25}
|
||||
{"Time":"2025-04-22T08:59:59.505107502-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Output":"FAIL\n"}
|
||||
{"Time":"2025-04-22T08:59:59.505552861-05:00","Action":"output","Package":"_/home/james_t/git/test-reporter/reports/go","Output":"FAIL\t_/home/james_t/git/test-reporter/reports/go\t4.141s\n"}
|
||||
{"Time":"2025-04-22T08:59:59.505584529-05:00","Action":"fail","Package":"_/home/james_t/git/test-reporter/reports/go","Elapsed":4.141}
|
||||
6
__tests__/fixtures/jest-junit-eslint.xml
Normal file
6
__tests__/fixtures/jest-junit-eslint.xml
Normal file
@@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<testsuites>
|
||||
<testsuite package="org.eslint" time="0" tests="1" errors="0" name="test.jsx">
|
||||
<testcase time="0" name="test.jsx" classname="test" />
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
10
__tests__/fixtures/junit-with-message.xml
Normal file
10
__tests__/fixtures/junit-with-message.xml
Normal file
@@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites tests="1" failures="1" disabled="0" errors="0" time="0.001" name="Failure">
|
||||
<testsuite name="Test" tests="6" failures="1" disabled="0" errors="0" time="0.001">
|
||||
<testcase name="Test" status="run" time="0" classname="Fails">
|
||||
<failure message="error" type=""><![CDATA[error.cpp:01
|
||||
Expected: true
|
||||
Which is: false >]]></failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
@@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites>
|
||||
<testsuite errors="0" skipped="0" tests="4" time="0.3" timestamp="2026-01-01T16:36:10">
|
||||
<testcase classname="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingBasics]" name="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingBasics]"/>
|
||||
<testcase classname="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingIndividual]" name="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingIndividual]"/>
|
||||
<testcase classname="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingComponents]" name="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testRenderingComponents]"/>
|
||||
<testcase classname="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testMultipleFormsInTemplate]" name="Kdyby\BootstrapFormRenderer\BootstrapRenderer. | KdybyTests/BootstrapFormRenderer/BootstrapRendererTest.phpt [method=testMultipleFormsInTemplate]"/>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
83
__tests__/fixtures/nette-tester/tester-v1.7-report.xml
Normal file
83
__tests__/fixtures/nette-tester/tester-v1.7-report.xml
Normal file
@@ -0,0 +1,83 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites>
|
||||
<testsuite errors="1" skipped="3" tests="65" time="2.1" timestamp="2026-01-01T16:50:52">
|
||||
<testcase classname="tests/Framework/Dumper.toPhp.php7.phpt" name="tests/Framework/Dumper.toPhp.php7.phpt">
|
||||
<skipped/>
|
||||
</testcase>
|
||||
<testcase classname="tests/CodeCoverage/Collector.start.phpt" name="tests/CodeCoverage/Collector.start.phpt">
|
||||
<skipped/>
|
||||
</testcase>
|
||||
<testcase classname="tests/CodeCoverage/PhpParser.parse.lines.phpt" name="tests/CodeCoverage/PhpParser.parse.lines.phpt"/>
|
||||
<testcase classname="tests/CodeCoverage/PhpParser.parse.methods.phpt" name="tests/CodeCoverage/PhpParser.parse.methods.phpt"/>
|
||||
<testcase classname="tests/CodeCoverage/CloverXMLGenerator.phpt" name="tests/CodeCoverage/CloverXMLGenerator.phpt"/>
|
||||
<testcase classname="tests/CodeCoverage/PhpParser.parse.edge.phpt" name="tests/CodeCoverage/PhpParser.parse.edge.phpt"/>
|
||||
<testcase classname="tests/CodeCoverage/PhpParser.parse.lines-of-code.phpt" name="tests/CodeCoverage/PhpParser.parse.lines-of-code.phpt"/>
|
||||
<testcase classname="tests/CodeCoverage/PhpParser.parse.namespaces.phpt" name="tests/CodeCoverage/PhpParser.parse.namespaces.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.contains.phpt" name="tests/Framework/Assert.contains.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.count.phpt" name="tests/Framework/Assert.count.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.phpt" name="tests/Framework/Assert.equal.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.recursive.phpt [method=testSimple]" name="tests/Framework/Assert.equal.recursive.phpt [method=testSimple]"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.recursive.phpt [method=testMultiple]" name="tests/Framework/Assert.equal.recursive.phpt [method=testMultiple]"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.recursive.phpt [method=testDeep]" name="tests/Framework/Assert.equal.recursive.phpt [method=testDeep]"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.recursive.phpt [method=testCross]" name="tests/Framework/Assert.equal.recursive.phpt [method=testCross]"/>
|
||||
<testcase classname="tests/Framework/Assert.equal.recursive.phpt [method=testThirdParty]" name="tests/Framework/Assert.equal.recursive.phpt [method=testThirdParty]"/>
|
||||
<testcase classname="tests/Framework/Assert.error.phpt" name="tests/Framework/Assert.error.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.exception.phpt" name="tests/Framework/Assert.exception.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.false.phpt" name="tests/Framework/Assert.false.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.match.phpt" name="tests/Framework/Assert.match.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.match.regexp.phpt" name="tests/Framework/Assert.match.regexp.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.nan.phpt" name="tests/Framework/Assert.nan.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.noError.phpt" name="tests/Framework/Assert.noError.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.same.phpt" name="tests/Framework/Assert.same.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.null.phpt" name="tests/Framework/Assert.null.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.true.phpt" name="tests/Framework/Assert.true.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.truthy.phpt" name="tests/Framework/Assert.truthy.phpt"/>
|
||||
<testcase classname="tests/Framework/DataProvider.load.phpt" name="tests/Framework/DataProvider.load.phpt"/>
|
||||
<testcase classname="tests/Framework/Assert.type.phpt" name="tests/Framework/Assert.type.phpt"/>
|
||||
<testcase classname="tests/Framework/DataProvider.parseAnnotation.phpt" name="tests/Framework/DataProvider.parseAnnotation.phpt"/>
|
||||
<testcase classname="tests/Framework/DataProvider.testQuery.phpt" name="tests/Framework/DataProvider.testQuery.phpt"/>
|
||||
<testcase classname="tests/Framework/DomQuery.css2Xpath.phpt" name="tests/Framework/DomQuery.css2Xpath.phpt"/>
|
||||
<testcase classname="tests/Framework/DomQuery.fromHtml.phpt" name="tests/Framework/DomQuery.fromHtml.phpt"/>
|
||||
<testcase classname="tests/Framework/DomQuery.fromXml.phpt" name="tests/Framework/DomQuery.fromXml.phpt"/>
|
||||
<testcase classname="tests/Framework/Dumper.dumpException.phpt" name="tests/Framework/Dumper.dumpException.phpt"/>
|
||||
<testcase classname="tests/Framework/Dumper.color.phpt" name="tests/Framework/Dumper.color.phpt"/>
|
||||
<testcase classname="tests/Framework/Dumper.toLine.phpt" name="tests/Framework/Dumper.toLine.phpt"/>
|
||||
<testcase classname="tests/Framework/Dumper.toPhp.recursion.phpt" name="tests/Framework/Dumper.toPhp.recursion.phpt"/>
|
||||
<testcase classname="tests/Framework/Dumper.toPhp.phpt" name="tests/Framework/Dumper.toPhp.phpt"/>
|
||||
<testcase classname="tests/Framework/FileMock.phpt" name="tests/Framework/FileMock.phpt"/>
|
||||
<testcase classname="tests/Framework/Helpers.escapeArg.phpt" name="tests/Framework/Helpers.escapeArg.phpt"/>
|
||||
<testcase classname="tests/Framework/Helpers.parseDocComment.phpt" name="tests/Framework/Helpers.parseDocComment.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.annotationThrows.phpt" name="tests/Framework/TestCase.annotationThrows.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.annotationThrows.setUp.tearDown.phpt" name="tests/Framework/TestCase.annotationThrows.setUp.tearDown.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.annotationThrows.syntax.phpt" name="tests/Framework/TestCase.annotationThrows.syntax.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.basic.phpt" name="tests/Framework/TestCase.basic.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.dataProvider.generator.phpt" name="tests/Framework/TestCase.dataProvider.generator.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.dataProvider.phpt" name="tests/Framework/TestCase.dataProvider.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.invalidMethods.phpt" name="tests/Framework/TestCase.invalidMethods.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.invalidProvider.phpt" name="tests/Framework/TestCase.invalidProvider.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.order.error.phpt" name="tests/Framework/TestCase.order.error.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.order.errorMuted.phpt" name="tests/Framework/TestCase.order.errorMuted.phpt"/>
|
||||
<testcase classname="tests/Framework/TestCase.order.phpt" name="tests/Framework/TestCase.order.phpt"/>
|
||||
<testcase classname="Prevent loop in error handling. The #268 regression. | tests/Framework/TestCase.ownErrorHandler.phpt" name="Prevent loop in error handling. The #268 regression. | tests/Framework/TestCase.ownErrorHandler.phpt"/>
|
||||
<testcase classname="tests/Runner/CommandLine.phpt" name="tests/Runner/CommandLine.phpt"/>
|
||||
<testcase classname="tests/Runner/HhvmPhpInterpreter.phpt" name="tests/Runner/HhvmPhpInterpreter.phpt">
|
||||
<skipped/>
|
||||
</testcase>
|
||||
<testcase classname="tests/Runner/Runner.find-tests.phpt" name="tests/Runner/Runner.find-tests.phpt"/>
|
||||
<testcase classname="tests/Runner/Job.phpt" name="tests/Runner/Job.phpt"/>
|
||||
<testcase classname="tests/Runner/ZendPhpExecutable.phpt" name="tests/Runner/ZendPhpExecutable.phpt"/>
|
||||
<testcase classname="tests/Runner/Runner.multiple.phpt" name="tests/Runner/Runner.multiple.phpt"/>
|
||||
<testcase classname="tests/Runner/Runner.edge.phpt" name="tests/Runner/Runner.edge.phpt"/>
|
||||
<testcase classname="tests/Runner/Runner.stop-on-fail.phpt" name="tests/Runner/Runner.stop-on-fail.phpt"/>
|
||||
<testcase classname="tests/Runner/Runner.multiple-fails.phpt" name="tests/Runner/Runner.multiple-fails.phpt">
|
||||
<failure message="Failed: '... in /Users/izso/Developer/nette/tester/tests/Runner/multiple-fails/...' should match
|
||||
... '..., unexpected end of file in %a%testcase-syntax-error.phptx on line ...'
|
||||
|
||||
diff '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.expected' '/Users/izso/Developer/nette/tester/tests/Runner/output/Runner.multiple-fails.actual'
|
||||
|
||||
in tests/Runner/Runner.multiple-fails.phpt(78) Tester\Assert::match()"/>
|
||||
</testcase>
|
||||
<testcase classname="tests/RunnerOutput/JUnitPrinter.phpt" name="tests/RunnerOutput/JUnitPrinter.phpt"/>
|
||||
<testcase classname="tests/Runner/Runner.annotations.phpt" name="tests/Runner/Runner.annotations.phpt"/>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
23
__tests__/fixtures/phpunit/phpunit-paths.xml
Normal file
23
__tests__/fixtures/phpunit/phpunit-paths.xml
Normal file
@@ -0,0 +1,23 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites>
|
||||
<testsuite name="SampleSuite" tests="6" failures="6" time="0.006">
|
||||
<testcase name="testFailure" classname="SampleSuite" file="/home/runner/work/repo/src/Fake.php" line="42" time="0.001">
|
||||
<failure type="Exception" message="Boom">/home/runner/work/repo/src/Fake.php:42</failure>
|
||||
</testcase>
|
||||
<testcase name="testStringFailure" classname="SampleSuite" file="/home/runner/work/repo/src/Other.php" line="10" time="0.001">
|
||||
<failure>/home/runner/work/repo/src/Other.php:10</failure>
|
||||
</testcase>
|
||||
<testcase name="testParenFailure" classname="SampleSuite" file="/home/runner/work/repo/src/Paren.php" line="123" time="0.001">
|
||||
<failure>at /home/runner/work/repo/src/Paren.php(123)</failure>
|
||||
</testcase>
|
||||
<testcase name="testWindowsFailure" classname="SampleSuite" file="C:\repo\src\Win.php" line="77" time="0.001">
|
||||
<failure>C:\repo\src\Win.php:77</failure>
|
||||
</testcase>
|
||||
<testcase name="testWindowsParenFailure" classname="SampleSuite" file="C:\repo\src\WinParen.php" line="88" time="0.001">
|
||||
<failure>at C:\repo\src\WinParen.php(88)</failure>
|
||||
</testcase>
|
||||
<testcase name="testPhptFailure" classname="SampleSuite" file="/home/runner/work/repo/tests/Sample.phpt" line="12" time="0.001">
|
||||
<failure>/home/runner/work/repo/tests/Sample.phpt:12</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
79
__tests__/fixtures/phpunit/phpunit.xml
Normal file
79
__tests__/fixtures/phpunit/phpunit.xml
Normal file
@@ -0,0 +1,79 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites>
|
||||
<testsuite name="CLI Arguments" tests="12" assertions="12" errors="0" failures="2" skipped="0" time="0.140397">
|
||||
<testcase name="targeting-traits-with-coversclass-attribute-is-deprecated.phpt" file="/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-coversclass-attribute-is-deprecated.phpt" assertions="1" time="0.068151">
|
||||
<failure type="PHPUnit\Framework\PhptAssertionFailedError">targeting-traits-with-coversclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
|
||||
--- Expected
|
||||
+++ Actual
|
||||
@@ @@
|
||||
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
|
||||
Test Runner Configured
|
||||
Test Suite Loaded (1 test)
|
||||
+Test Runner Triggered Warning (No code coverage driver available)
|
||||
Event Facade Sealed
|
||||
Test Runner Started
|
||||
Test Suite Sorted
|
||||
@@ @@
|
||||
Test Preparation Started (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Prepared (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Passed (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
|
||||
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\TestFixture\CoveredTrait with #[CoversClass] is deprecated, please refactor your test to use #[CoversTrait] instead.)
|
||||
Test Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest::testSomething)
|
||||
Test Suite Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithCoversClassTest, 1 test)
|
||||
Test Runner Execution Finished
|
||||
Test Runner Finished
|
||||
-PHPUnit Finished (Shell Exit Code: 0)
|
||||
+PHPUnit Finished (Shell Exit Code: 1)
|
||||
|
||||
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-coversclass-attribute-is-deprecated.phpt:28
|
||||
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
|
||||
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
|
||||
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200</failure>
|
||||
</testcase>
|
||||
<testcase name="targeting-traits-with-usesclass-attribute-is-deprecated.phpt" file="/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-usesclass-attribute-is-deprecated.phpt" assertions="1" time="0.064268">
|
||||
<failure type="PHPUnit\Framework\PhptAssertionFailedError">targeting-traits-with-usesclass-attribute-is-deprecated.phptFailed asserting that string matches format description.
|
||||
--- Expected
|
||||
+++ Actual
|
||||
@@ @@
|
||||
PHPUnit Started (PHPUnit 11.2-g0c2333363 using PHP 8.2.17 (cli) on Linux)
|
||||
Test Runner Configured
|
||||
Test Suite Loaded (1 test)
|
||||
+Test Runner Triggered Warning (No code coverage driver available)
|
||||
Event Facade Sealed
|
||||
Test Runner Started
|
||||
Test Suite Sorted
|
||||
@@ @@
|
||||
Test Preparation Started (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Prepared (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Passed (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
|
||||
-Test Runner Triggered Deprecation (Targeting a trait such as PHPUnit\TestFixture\CoveredTrait with #[UsesClass] is deprecated, please refactor your test to use #[UsesTrait] instead.)
|
||||
Test Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest::testSomething)
|
||||
Test Suite Finished (PHPUnit\DeprecatedAnnotationsTestFixture\TraitTargetedWithUsesClassTest, 1 test)
|
||||
Test Runner Execution Finished
|
||||
Test Runner Finished
|
||||
-PHPUnit Finished (Shell Exit Code: 0)
|
||||
+PHPUnit Finished (Shell Exit Code: 1)
|
||||
|
||||
/home/matteo/OSS/phpunit/tests/end-to-end/metadata/targeting-traits-with-usesclass-attribute-is-deprecated.phpt:28
|
||||
/home/matteo/OSS/phpunit/src/Framework/TestSuite.php:369
|
||||
/home/matteo/OSS/phpunit/src/TextUI/TestRunner.php:62
|
||||
/home/matteo/OSS/phpunit/src/TextUI/Application.php:200</failure>
|
||||
</testcase>
|
||||
<testsuite name="PHPUnit\Event\CollectingDispatcherTest" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/CollectingDispatcherTest.php" tests="2" assertions="2" errors="0" failures="0" skipped="0" time="0.004256">
|
||||
<testcase name="testHasNoCollectedEventsWhenFlushedImmediatelyAfterCreation" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/CollectingDispatcherTest.php" line="20" class="PHPUnit\Event\CollectingDispatcherTest" classname="PHPUnit.Event.CollectingDispatcherTest" assertions="1" time="0.001441"/>
|
||||
<testcase name="testCollectsDispatchedEventsUntilFlushed" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/CollectingDispatcherTest.php" line="27" class="PHPUnit\Event\CollectingDispatcherTest" classname="PHPUnit.Event.CollectingDispatcherTest" assertions="1" time="0.002815"/>
|
||||
</testsuite>
|
||||
<testsuite name="PHPUnit\Event\DeferringDispatcherTest" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" tests="4" assertions="4" errors="0" failures="0" skipped="0" time="0.002928">
|
||||
<testcase name="testCollectsEventsUntilFlush" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="22" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.001672"/>
|
||||
<testcase name="testFlushesCollectedEvents" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="35" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.000661"/>
|
||||
<testcase name="testSubscriberCanBeRegistered" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="53" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.000334"/>
|
||||
<testcase name="testTracerCanBeRegistered" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DeferringDispatcherTest.php" line="69" class="PHPUnit\Event\DeferringDispatcherTest" classname="PHPUnit.Event.DeferringDispatcherTest" assertions="1" time="0.000262"/>
|
||||
</testsuite>
|
||||
<testsuite name="PHPUnit\Event\DirectDispatcherTest" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" tests="4" assertions="4" errors="0" failures="0" skipped="0" time="0.000794">
|
||||
<testcase name="testDispatchesEventToKnownSubscribers" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="24" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000170"/>
|
||||
<testcase name="testDispatchesEventToTracers" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="43" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000248"/>
|
||||
<testcase name="testRegisterRejectsUnknownSubscriber" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="62" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000257"/>
|
||||
<testcase name="testDispatchRejectsUnknownEventType" file="/home/matteo/OSS/phpunit/tests/unit/Event/Dispatcher/DirectDispatcherTest.php" line="73" class="PHPUnit\Event\DirectDispatcherTest" classname="PHPUnit.Event.DirectDispatcherTest" assertions="1" time="0.000119"/>
|
||||
</testsuite>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
42
__tests__/fixtures/python-xunit-pytest.xml
Normal file
42
__tests__/fixtures/python-xunit-pytest.xml
Normal file
@@ -0,0 +1,42 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<testsuites name="pytest tests">
|
||||
<testsuite name="pytest" errors="0" failures="2" skipped="2" tests="15" time="0.019"
|
||||
timestamp="2025-11-15T11:51:49.548396-05:00" hostname="Mac.hsd1.va.comcast.net">
|
||||
<properties>
|
||||
<property name="custom_prop" value="custom_val"/>
|
||||
</properties>
|
||||
<testcase classname="tests.test_lib" name="test_always_pass" time="0.002"/>
|
||||
<testcase classname="tests.test_lib" name="test_with_subtests" time="0.005"/>
|
||||
<testcase classname="tests.test_lib" name="test_parameterized[param1]" time="0.000"/>
|
||||
<testcase classname="tests.test_lib" name="test_parameterized[param2]" time="0.000"/>
|
||||
<testcase classname="tests.test_lib" name="test_always_skip" time="0.000">
|
||||
<skipped type="pytest.skip" message="skipped">/Users/mike/Projects/python-test/tests/test_lib.py:20: skipped
|
||||
</skipped>
|
||||
</testcase>
|
||||
<testcase classname="tests.test_lib" name="test_always_fail" time="0.000">
|
||||
<failure message="assert False">def test_always_fail():
|
||||
> assert False
|
||||
E assert False
|
||||
|
||||
tests/test_lib.py:25: AssertionError
|
||||
</failure>
|
||||
</testcase>
|
||||
<testcase classname="tests.test_lib" name="test_expected_failure" time="0.000">
|
||||
<skipped type="pytest.xfail" message=""/>
|
||||
</testcase>
|
||||
<testcase classname="tests.test_lib" name="test_error" time="0.000">
|
||||
<failure message="Exception: error">def test_error():
|
||||
> raise Exception("error")
|
||||
E Exception: error
|
||||
|
||||
tests/test_lib.py:32: Exception
|
||||
</failure>
|
||||
</testcase>
|
||||
<testcase classname="tests.test_lib" name="test_with_record_property" time="0.000">
|
||||
<properties>
|
||||
<property name="example_key" value="1"/>
|
||||
</properties>
|
||||
</testcase>
|
||||
<testcase classname="custom_classname" name="test_with_record_xml_attribute" time="0.000"/>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
27
__tests__/fixtures/python-xunit-unittest.xml
Normal file
27
__tests__/fixtures/python-xunit-unittest.xml
Normal file
@@ -0,0 +1,27 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuite name="TestAcme-20251114214921" tests="8" file=".py" time="0.001" timestamp="2025-11-14T21:49:22" failures="1" errors="1" skipped="2">
|
||||
<testcase classname="TestAcme" name="test_always_pass" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="8"/>
|
||||
<testcase classname="TestAcme" name="test_parameterized_0_param1" time="0.001" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="618"/>
|
||||
<testcase classname="TestAcme" name="test_parameterized_1_param2" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="618"/>
|
||||
<testcase classname="TestAcme" name="test_with_subtests" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="11"/>
|
||||
<testcase classname="TestAcme" name="test_always_fail" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="23">
|
||||
<failure type="AssertionError" message="failed"><![CDATA[Traceback (most recent call last):
|
||||
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 24, in test_always_fail
|
||||
self.fail("failed")
|
||||
AssertionError: failed
|
||||
]]></failure>
|
||||
</testcase>
|
||||
<testcase classname="TestAcme" name="test_error" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="30">
|
||||
<error type="Exception" message="error"><![CDATA[Traceback (most recent call last):
|
||||
File "/Users/foo/Projects/python-test/tests/test_lib.py", line 31, in test_error
|
||||
raise Exception("error")
|
||||
Exception: error
|
||||
]]></error>
|
||||
</testcase>
|
||||
<testcase classname="TestAcme" name="test_always_skip" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="20">
|
||||
<skipped type="skip" message="skipped"/>
|
||||
</testcase>
|
||||
<testcase classname="TestAcme" name="test_expected_failure" time="0.000" timestamp="2025-11-14T21:49:22" file="tests/test_lib.py" line="26">
|
||||
<skipped type="XFAIL" message="expected failure: (<class 'AssertionError'>, AssertionError('expected failure'), <traceback object at 0x100c125c0>)"/>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
29
__tests__/golang-json.test.ts
Normal file
29
__tests__/golang-json.test.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
|
||||
import {GolangJsonParser} from '../src/parsers/golang-json/golang-json-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {getReport} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
describe('golang-json tests', () => {
|
||||
it('report from ./reports/dotnet test results matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'golang-json.json')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'golang-json.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: ['calculator.go', 'calculator_test.go']
|
||||
}
|
||||
|
||||
const parser = new GolangJsonParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
})
|
||||
@@ -3,7 +3,7 @@ import * as path from 'path'
|
||||
|
||||
import {JavaJunitParser} from '../src/parsers/java-junit/java-junit-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {getReport} from '../src/report/get-report'
|
||||
import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
describe('java-junit tests', () => {
|
||||
@@ -73,6 +73,46 @@ describe('java-junit tests', () => {
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report from testmo/junitxml basic example matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'junit4-basic.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'junit-basic.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JavaJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report from testmo/junitxml complete example matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'junit4-complete.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'junit-complete.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JavaJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('parses empty failures in test results', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'java', 'empty_failures.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
@@ -90,4 +130,66 @@ describe('java-junit tests', () => {
|
||||
expect(result.result === 'failed')
|
||||
expect(result.failed === 1)
|
||||
})
|
||||
|
||||
it('report does not include a title by default', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'junit-with-message.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JavaJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result])
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it.each([
|
||||
['empty string', ''],
|
||||
['space', ' '],
|
||||
['tab', '\t'],
|
||||
['newline', '\n']
|
||||
])('report does not include a title when configured value is %s', async (_, reportTitle) => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'junit-with-message.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JavaJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle
|
||||
})
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it('report includes a custom report title', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'empty', 'java-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JavaJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle: 'My Custom Title'
|
||||
})
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^# My Custom Title\n/)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -3,7 +3,7 @@ import * as path from 'path'
|
||||
|
||||
import {JestJunitParser} from '../src/parsers/jest-junit/jest-junit-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {getReport} from '../src/report/get-report'
|
||||
import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
describe('jest-junit tests', () => {
|
||||
@@ -105,4 +105,245 @@ describe('jest-junit tests', () => {
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('parsing ESLint report without timing information works - PR #134', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit-eslint.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'jest-junit-eslint.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: ['test.js']
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('parsing junit report with message succeeds', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'junit-with-message.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'junit-with-message.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: ['test.js']
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report does not include a title by default', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result])
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it.each([
|
||||
['empty string', ''],
|
||||
['space', ' '],
|
||||
['tab', '\t'],
|
||||
['newline', '\n']
|
||||
])('report does not include a title when configured value is %s', async (_, reportTitle) => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle
|
||||
})
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it('report includes a custom report title', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle: 'My Custom Title'
|
||||
})
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^# My Custom Title\n/)
|
||||
})
|
||||
|
||||
it('report can be collapsed when configured', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
collapsed: 'always'
|
||||
})
|
||||
// Report should include collapsible details
|
||||
expect(report).toContain('<details><summary>Expand for details</summary>')
|
||||
expect(report).toContain('</details>')
|
||||
})
|
||||
|
||||
it('report is not collapsed when configured to never', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
collapsed: 'never'
|
||||
})
|
||||
// Report should not include collapsible details
|
||||
expect(report).not.toContain('<details><summary>Expand for details</summary>')
|
||||
expect(report).not.toContain('</details>')
|
||||
})
|
||||
|
||||
it('report auto-collapses when all tests pass', async () => {
|
||||
// Test with a fixture that has all passing tests (no failures)
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit-eslint.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify this fixture has no failures
|
||||
expect(result.failed).toBe(0)
|
||||
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
collapsed: 'auto'
|
||||
})
|
||||
|
||||
// Should collapse when all tests pass
|
||||
expect(report).toContain('<details><summary>Expand for details</summary>')
|
||||
expect(report).toContain('</details>')
|
||||
})
|
||||
|
||||
it('report does not auto-collapse when tests fail', async () => {
|
||||
// Test with a fixture that has failing tests
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify this fixture has failures
|
||||
expect(result.failed).toBeGreaterThan(0)
|
||||
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
collapsed: 'auto'
|
||||
})
|
||||
|
||||
// Should not collapse when there are failures
|
||||
expect(report).not.toContain('<details><summary>Expand for details</summary>')
|
||||
expect(report).not.toContain('</details>')
|
||||
})
|
||||
|
||||
it('report includes the short summary', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const shortSummary = '1 passed, 4 failed and 1 skipped'
|
||||
const report = getReport([result], DEFAULT_OPTIONS, shortSummary)
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^## 1 passed, 4 failed and 1 skipped\n/)
|
||||
})
|
||||
|
||||
it('report includes a custom report title and short summary', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'jest-junit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new JestJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const shortSummary = '1 passed, 4 failed and 1 skipped'
|
||||
const report = getReport(
|
||||
[result],
|
||||
{
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle: 'My Custom Title'
|
||||
},
|
||||
shortSummary
|
||||
)
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^# My Custom Title\n## 1 passed, 4 failed and 1 skipped\n/)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -3,7 +3,7 @@ import * as path from 'path'
|
||||
|
||||
import {MochaJsonParser} from '../src/parsers/mocha-json/mocha-json-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {getReport} from '../src/report/get-report'
|
||||
import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
describe('mocha-json tests', () => {
|
||||
@@ -64,4 +64,66 @@ describe('mocha-json tests', () => {
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report does not include a title by default', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'mocha-json.json')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new MochaJsonParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result])
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it.each([
|
||||
['empty string', ''],
|
||||
['space', ' '],
|
||||
['tab', '\t'],
|
||||
['newline', '\n']
|
||||
])('report does not include a title when configured value is %s', async (_, reportTitle) => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'mocha-json.json')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new MochaJsonParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle
|
||||
})
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it('report includes a custom report title', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'mocha-json.json')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new MochaJsonParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle: 'My Custom Title'
|
||||
})
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^# My Custom Title\n/)
|
||||
})
|
||||
})
|
||||
|
||||
347
__tests__/phpunit-junit.test.ts
Normal file
347
__tests__/phpunit-junit.test.ts
Normal file
@@ -0,0 +1,347 @@
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
|
||||
import {PhpunitJunitParser} from '../src/parsers/phpunit-junit/phpunit-junit-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {getReport} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
describe('phpunit-junit tests', () => {
|
||||
it('produces empty test run result when there are no test cases', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'empty', 'phpunit-empty.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result.tests).toBe(0)
|
||||
expect(result.result).toBe('success')
|
||||
})
|
||||
|
||||
it('report from phpunit test results matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'phpunit-test-results.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('parses nested test suites correctly', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Should have 4 test suites (3 nested ones plus the parent with direct testcases)
|
||||
expect(result.suites.length).toBe(4)
|
||||
|
||||
// Verify suite names
|
||||
const suiteNames = result.suites.map(s => s.name)
|
||||
expect(suiteNames).toContain('PHPUnit\\Event\\CollectingDispatcherTest')
|
||||
expect(suiteNames).toContain('PHPUnit\\Event\\DeferringDispatcherTest')
|
||||
expect(suiteNames).toContain('PHPUnit\\Event\\DirectDispatcherTest')
|
||||
expect(suiteNames).toContain('CLI Arguments')
|
||||
|
||||
// Verify total test count
|
||||
expect(result.tests).toBe(12)
|
||||
expect(result.passed).toBe(10)
|
||||
expect(result.failed).toBe(2)
|
||||
})
|
||||
|
||||
it('extracts error details from failures', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find the CLI Arguments suite which has failures
|
||||
const cliSuite = result.suites.find(s => s.name === 'CLI Arguments')
|
||||
expect(cliSuite).toBeDefined()
|
||||
|
||||
// Get the failed tests
|
||||
const failedTests = cliSuite!.groups.flatMap(g => g.tests).filter(t => t.result === 'failed')
|
||||
expect(failedTests.length).toBe(2)
|
||||
|
||||
// Verify error details are captured
|
||||
for (const test of failedTests) {
|
||||
expect(test.error).toBeDefined()
|
||||
expect(test.error!.details).toContain('Failed asserting that string matches format description')
|
||||
}
|
||||
})
|
||||
|
||||
it('maps absolute paths to tracked files for annotations', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'phpunit', 'phpunit-paths.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: [
|
||||
'src/Fake.php',
|
||||
'src/Other.php',
|
||||
'src/Paren.php',
|
||||
'src/Win.php',
|
||||
'src/WinParen.php',
|
||||
'tests/Sample.phpt'
|
||||
]
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
const suite = result.suites.find(s => s.name === 'SampleSuite')
|
||||
expect(suite).toBeDefined()
|
||||
|
||||
const tests = suite!.groups.flatMap(g => g.tests)
|
||||
const fileFailure = tests.find(t => t.name === 'testFailure')
|
||||
expect(fileFailure).toBeDefined()
|
||||
expect(fileFailure!.error).toBeDefined()
|
||||
expect(fileFailure!.error!.path).toBe('src/Fake.php')
|
||||
expect(fileFailure!.error!.line).toBe(42)
|
||||
|
||||
const stringFailure = tests.find(t => t.name === 'testStringFailure')
|
||||
expect(stringFailure).toBeDefined()
|
||||
expect(stringFailure!.error).toBeDefined()
|
||||
expect(stringFailure!.error!.path).toBe('src/Other.php')
|
||||
expect(stringFailure!.error!.line).toBe(10)
|
||||
|
||||
const parenFailure = tests.find(t => t.name === 'testParenFailure')
|
||||
expect(parenFailure).toBeDefined()
|
||||
expect(parenFailure!.error).toBeDefined()
|
||||
expect(parenFailure!.error!.path).toBe('src/Paren.php')
|
||||
expect(parenFailure!.error!.line).toBe(123)
|
||||
|
||||
const windowsFailure = tests.find(t => t.name === 'testWindowsFailure')
|
||||
expect(windowsFailure).toBeDefined()
|
||||
expect(windowsFailure!.error).toBeDefined()
|
||||
expect(windowsFailure!.error!.path).toBe('src/Win.php')
|
||||
expect(windowsFailure!.error!.line).toBe(77)
|
||||
|
||||
const windowsParenFailure = tests.find(t => t.name === 'testWindowsParenFailure')
|
||||
expect(windowsParenFailure).toBeDefined()
|
||||
expect(windowsParenFailure!.error).toBeDefined()
|
||||
expect(windowsParenFailure!.error!.path).toBe('src/WinParen.php')
|
||||
expect(windowsParenFailure!.error!.line).toBe(88)
|
||||
|
||||
const phptFailure = tests.find(t => t.name === 'testPhptFailure')
|
||||
expect(phptFailure).toBeDefined()
|
||||
expect(phptFailure!.error).toBeDefined()
|
||||
expect(phptFailure!.error!.path).toBe('tests/Sample.phpt')
|
||||
expect(phptFailure!.error!.line).toBe(12)
|
||||
})
|
||||
|
||||
it('parses junit-basic.xml with nested suites and failure', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'junit-basic.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify test counts
|
||||
expect(result.tests).toBe(9)
|
||||
expect(result.passed).toBe(8)
|
||||
expect(result.failed).toBe(1)
|
||||
expect(result.result).toBe('failed')
|
||||
|
||||
// Verify suites - should have Tests.Registration, Tests.Authentication.Login, and Tests.Authentication
|
||||
expect(result.suites.length).toBe(3)
|
||||
|
||||
const suiteNames = result.suites.map(s => s.name)
|
||||
expect(suiteNames).toContain('Tests.Registration')
|
||||
expect(suiteNames).toContain('Tests.Authentication.Login')
|
||||
expect(suiteNames).toContain('Tests.Authentication')
|
||||
|
||||
// Verify the Registration suite has 3 tests
|
||||
const registrationSuite = result.suites.find(s => s.name === 'Tests.Registration')
|
||||
expect(registrationSuite).toBeDefined()
|
||||
const registrationTests = registrationSuite!.groups.flatMap(g => g.tests)
|
||||
expect(registrationTests.length).toBe(3)
|
||||
|
||||
// Verify the Authentication suite has 3 direct tests (not counting nested suite)
|
||||
const authSuite = result.suites.find(s => s.name === 'Tests.Authentication')
|
||||
expect(authSuite).toBeDefined()
|
||||
const authTests = authSuite!.groups.flatMap(g => g.tests)
|
||||
expect(authTests.length).toBe(3)
|
||||
|
||||
// Verify the Login nested suite has 3 tests
|
||||
const loginSuite = result.suites.find(s => s.name === 'Tests.Authentication.Login')
|
||||
expect(loginSuite).toBeDefined()
|
||||
const loginTests = loginSuite!.groups.flatMap(g => g.tests)
|
||||
expect(loginTests.length).toBe(3)
|
||||
|
||||
// Verify failure is captured
|
||||
const failedTest = authTests.find(t => t.name === 'testCase9')
|
||||
expect(failedTest).toBeDefined()
|
||||
expect(failedTest!.result).toBe('failed')
|
||||
expect(failedTest!.error).toBeDefined()
|
||||
expect(failedTest!.error!.message).toBe('AssertionError: Assertion error message')
|
||||
})
|
||||
|
||||
it('parses phpcheckstyle-phpunit.xml with deeply nested suites', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify test counts from the XML: tests="30", failures="2"
|
||||
expect(result.tests).toBe(30)
|
||||
expect(result.passed).toBe(28)
|
||||
expect(result.failed).toBe(2)
|
||||
expect(result.result).toBe('failed')
|
||||
|
||||
// Verify the number of test suites extracted (leaf suites with testcases)
|
||||
// CommentsTest, DeprecationTest, GoodTest, IndentationTest, MetricsTest,
|
||||
// NamingTest, OptimizationTest, OtherTest, PHPTagsTest, ProhibitedTest,
|
||||
// StrictCompareTest, UnusedTest = 12 suites
|
||||
expect(result.suites.length).toBe(12)
|
||||
|
||||
const suiteNames = result.suites.map(s => s.name)
|
||||
expect(suiteNames).toContain('CommentsTest')
|
||||
expect(suiteNames).toContain('GoodTest')
|
||||
expect(suiteNames).toContain('IndentationTest')
|
||||
expect(suiteNames).toContain('OtherTest')
|
||||
})
|
||||
|
||||
it('extracts test data from phpcheckstyle-phpunit.xml', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find the CommentsTest suite
|
||||
const commentsSuite = result.suites.find(s => s.name === 'CommentsTest')
|
||||
expect(commentsSuite).toBeDefined()
|
||||
|
||||
// Verify tests are extracted correctly
|
||||
const tests = commentsSuite!.groups.flatMap(g => g.tests)
|
||||
expect(tests.length).toBe(3)
|
||||
|
||||
const testGoodDoc = tests.find(t => t.name === 'testGoodDoc')
|
||||
expect(testGoodDoc).toBeDefined()
|
||||
expect(testGoodDoc!.result).toBe('success')
|
||||
})
|
||||
|
||||
it('captures failure details from phpcheckstyle-phpunit.xml', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find the OtherTest suite which has failures
|
||||
const otherSuite = result.suites.find(s => s.name === 'OtherTest')
|
||||
expect(otherSuite).toBeDefined()
|
||||
|
||||
const failedTests = otherSuite!.groups.flatMap(g => g.tests).filter(t => t.result === 'failed')
|
||||
expect(failedTests.length).toBe(2)
|
||||
|
||||
// Verify failure details
|
||||
const testOther = failedTests.find(t => t.name === 'testOther')
|
||||
expect(testOther).toBeDefined()
|
||||
expect(testOther!.error).toBeDefined()
|
||||
expect(testOther!.error!.details).toContain('We expect 20 warnings')
|
||||
expect(testOther!.error!.details).toContain('Failed asserting that 19 matches expected 20')
|
||||
|
||||
const testException = failedTests.find(t => t.name === 'testException')
|
||||
expect(testException).toBeDefined()
|
||||
expect(testException!.error).toBeDefined()
|
||||
expect(testException!.error!.details).toContain('We expect 1 error')
|
||||
})
|
||||
|
||||
it('report from junit-basic.xml matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'junit-basic.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'phpunit-junit-basic-results.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report from phpcheckstyle-phpunit.xml matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'external', 'phpunit', 'phpcheckstyle-phpunit.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'phpunit-phpcheckstyle-results.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new PhpunitJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
})
|
||||
93
__tests__/python-xunit.test.ts
Normal file
93
__tests__/python-xunit.test.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
|
||||
import {PythonXunitParser} from '../src/parsers/python-xunit/python-xunit-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
const defaultOpts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
describe('python-xunit unittest report', () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-unittest.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-unittest.md')
|
||||
|
||||
it('report from python test results matches snapshot', async () => {
|
||||
const trackedFiles = ['tests/test_lib.py']
|
||||
const opts: ParseOptions = {
|
||||
...defaultOpts,
|
||||
trackedFiles
|
||||
}
|
||||
|
||||
const parser = new PythonXunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report does not include a title by default', async () => {
|
||||
const parser = new PythonXunitParser(defaultOpts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result])
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it.each([
|
||||
['empty string', ''],
|
||||
['space', ' '],
|
||||
['tab', '\t'],
|
||||
['newline', '\n']
|
||||
])('report does not include a title when configured value is %s', async (_, reportTitle) => {
|
||||
const parser = new PythonXunitParser(defaultOpts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle
|
||||
})
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it('report includes a custom report title', async () => {
|
||||
const parser = new PythonXunitParser(defaultOpts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle: 'My Custom Title'
|
||||
})
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^# My Custom Title\n/)
|
||||
})
|
||||
})
|
||||
|
||||
describe('python-xunit pytest report', () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'python-xunit-pytest.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'python-xunit-pytest.md')
|
||||
|
||||
it('report from python test results matches snapshot', async () => {
|
||||
const trackedFiles = ['tests/test_lib.py']
|
||||
const opts: ParseOptions = {
|
||||
...defaultOpts,
|
||||
trackedFiles
|
||||
}
|
||||
|
||||
const parser = new PythonXunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
})
|
||||
120
__tests__/report/get-report.test.ts
Normal file
120
__tests__/report/get-report.test.ts
Normal file
@@ -0,0 +1,120 @@
|
||||
import {getBadge, DEFAULT_OPTIONS, ReportOptions} from '../../src/report/get-report'
|
||||
|
||||
describe('getBadge', () => {
|
||||
describe('URI encoding with special characters', () => {
|
||||
it('generates correct URI with simple badge title', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'tests'
|
||||
}
|
||||
const badge = getBadge(5, 0, 1, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('handles badge title with single hyphen', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'unit-tests'
|
||||
}
|
||||
const badge = getBadge(3, 0, 0, options)
|
||||
// The hyphen in the badge title should be encoded as --
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('handles badge title with multiple hyphens', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'integration-api-tests'
|
||||
}
|
||||
const badge = getBadge(10, 0, 0, options)
|
||||
// All hyphens in the title should be encoded as --
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('handles badge title with multiple underscores', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'my_integration_test'
|
||||
}
|
||||
const badge = getBadge(10, 0, 0, options)
|
||||
// All underscores in the title should be encoded as __
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('handles badge title with version format containing hyphen', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'MariaDb 12.0-ubi database tests'
|
||||
}
|
||||
const badge = getBadge(1, 0, 0, options)
|
||||
// The hyphen in "12.0-ubi" should be encoded as --
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('handles badge title with dots and hyphens', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'v1.2.3-beta-test'
|
||||
}
|
||||
const badge = getBadge(4, 1, 0, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('preserves structural hyphens between label and message', () => {
|
||||
const options: ReportOptions = {
|
||||
...DEFAULT_OPTIONS,
|
||||
badgeTitle: 'test-suite'
|
||||
}
|
||||
const badge = getBadge(2, 3, 1, options)
|
||||
// The URI should have literal hyphens separating title-message-color
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
})
|
||||
|
||||
describe('generates test outcome as color name for imgshields', () => {
|
||||
it('uses success color when all tests pass', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(5, 0, 0, options)
|
||||
expect(badge).toContain('-success)')
|
||||
})
|
||||
|
||||
it('uses critical color when tests fail', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(5, 2, 0, options)
|
||||
expect(badge).toContain('-critical)')
|
||||
})
|
||||
|
||||
it('uses yellow color when no tests found', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(0, 0, 0, options)
|
||||
expect(badge).toContain('-yellow)')
|
||||
})
|
||||
})
|
||||
|
||||
describe('badge message composition', () => {
|
||||
it('includes only passed count when no failures or skips', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(5, 0, 0, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('includes passed and failed counts', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(5, 2, 0, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('includes passed, failed and skipped counts', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(5, 2, 1, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
|
||||
it('uses "none" message when no tests', () => {
|
||||
const options: ReportOptions = {...DEFAULT_OPTIONS}
|
||||
const badge = getBadge(0, 0, 0, options)
|
||||
expect(badge).toBe('')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -3,7 +3,7 @@ import * as path from 'path'
|
||||
|
||||
import {RspecJsonParser} from '../src/parsers/rspec-json/rspec-json-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {getReport} from '../src/report/get-report'
|
||||
import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
describe('rspec-json tests', () => {
|
||||
@@ -42,4 +42,66 @@ describe('rspec-json tests', () => {
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report does not include a title by default', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'rspec-json.json')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new RspecJsonParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result])
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it.each([
|
||||
['empty string', ''],
|
||||
['space', ' '],
|
||||
['tab', '\t'],
|
||||
['newline', '\n']
|
||||
])('report does not include a title when configured value is %s', async (_, reportTitle) => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'rspec-json.json')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new RspecJsonParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle
|
||||
})
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it('report includes a custom report title', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'rspec-json.json')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new RspecJsonParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle: 'My Custom Title'
|
||||
})
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^# My Custom Title\n/)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -3,7 +3,7 @@ import * as path from 'path'
|
||||
|
||||
import {SwiftXunitParser} from '../src/parsers/swift-xunit/swift-xunit-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {getReport} from '../src/report/get-report'
|
||||
import {DEFAULT_OPTIONS, getReport} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
describe('swift-xunit tests', () => {
|
||||
@@ -27,4 +27,66 @@ describe('swift-xunit tests', () => {
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('report does not include a title by default', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'swift-xunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new SwiftXunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result])
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it.each([
|
||||
['empty string', ''],
|
||||
['space', ' '],
|
||||
['tab', '\t'],
|
||||
['newline', '\n']
|
||||
])('report does not include a title when configured value is %s', async (_, reportTitle) => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'swift-xunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new SwiftXunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle
|
||||
})
|
||||
// Report should have the badge as the first line
|
||||
expect(report).toMatch(/^!\[Tests failed]/)
|
||||
})
|
||||
|
||||
it('report includes a custom report title', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'swift-xunit.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new SwiftXunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
const report = getReport([result], {
|
||||
...DEFAULT_OPTIONS,
|
||||
reportTitle: 'My Custom Title'
|
||||
})
|
||||
// Report should have the title as the first line
|
||||
expect(report).toMatch(/^# My Custom Title\n/)
|
||||
})
|
||||
})
|
||||
|
||||
224
__tests__/tester-junit.test.ts
Normal file
224
__tests__/tester-junit.test.ts
Normal file
@@ -0,0 +1,224 @@
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
|
||||
import {NetteTesterJunitParser} from '../src/parsers/tester-junit/tester-junit-parser'
|
||||
import {ParseOptions} from '../src/test-parser'
|
||||
import {getReport} from '../src/report/get-report'
|
||||
import {normalizeFilePath} from '../src/utils/path-utils'
|
||||
|
||||
describe('tester-junit tests', () => {
|
||||
it('produces empty test run result when there are no test cases', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'empty', 'phpunit-empty.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result.tests).toBe(0)
|
||||
expect(result.result).toBe('success')
|
||||
})
|
||||
|
||||
it('report from tester-v1.7-report.xml matches snapshot', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'tester-v1.7-test-results.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('parses tester-v1.7-report.xml correctly', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify test counts from XML: tests="65" errors="1" skipped="3"
|
||||
expect(result.tests).toBe(65)
|
||||
expect(result.failed).toBe(1)
|
||||
expect(result.skipped).toBe(3)
|
||||
expect(result.passed).toBe(61)
|
||||
|
||||
// Verify suite name uses file name
|
||||
expect(result.suites.length).toBe(1)
|
||||
expect(result.suites[0].name).toBe('tester-v1.7-report.xml')
|
||||
})
|
||||
|
||||
it('groups tests by directory structure', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Get all group names
|
||||
const groupNames = result.suites[0].groups.map(g => g.name)
|
||||
|
||||
// Verify expected directory groups exist
|
||||
expect(groupNames).toContain('tests/Framework')
|
||||
expect(groupNames).toContain('tests/CodeCoverage')
|
||||
expect(groupNames).toContain('tests/Runner')
|
||||
expect(groupNames).toContain('tests/RunnerOutput')
|
||||
})
|
||||
|
||||
it('parses test names with method suffixes correctly', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find the Framework group which has tests with method suffixes
|
||||
const frameworkGroup = result.suites[0].groups.find(g => g.name === 'tests/Framework')
|
||||
expect(frameworkGroup).toBeDefined()
|
||||
|
||||
// Find tests with method suffixes
|
||||
const testWithMethod = frameworkGroup!.tests.find(t => t.name.includes('::testSimple'))
|
||||
expect(testWithMethod).toBeDefined()
|
||||
expect(testWithMethod!.name).toBe('Assert.equal.recursive.phpt::testSimple')
|
||||
})
|
||||
|
||||
it('parses complex test names from BootstrapFormRenderer-report.xml', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'BootstrapFormRenderer-report.xml')
|
||||
const outputPath = path.join(__dirname, '__outputs__', 'tester-bootstrap-test-results.md')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Verify test counts: 4 tests, all passed
|
||||
expect(result.tests).toBe(4)
|
||||
expect(result.passed).toBe(4)
|
||||
expect(result.failed).toBe(0)
|
||||
expect(result.skipped).toBe(0)
|
||||
|
||||
// Verify suite name
|
||||
expect(result.suites[0].name).toBe('BootstrapFormRenderer-report.xml')
|
||||
|
||||
// All tests should have method names
|
||||
const allTests = result.suites[0].groups.flatMap(g => g.tests)
|
||||
expect(allTests.every(t => t.name.includes('::'))).toBe(true)
|
||||
expect(allTests.some(t => t.name.includes('::testRenderingBasics'))).toBe(true)
|
||||
expect(allTests.some(t => t.name.includes('::testRenderingIndividual'))).toBe(true)
|
||||
|
||||
expect(result).toMatchSnapshot()
|
||||
|
||||
const report = getReport([result])
|
||||
fs.mkdirSync(path.dirname(outputPath), {recursive: true})
|
||||
fs.writeFileSync(outputPath, report)
|
||||
})
|
||||
|
||||
it('extracts error details from failures', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find the failed test
|
||||
const allTests = result.suites[0].groups.flatMap(g => g.tests)
|
||||
const failedTests = allTests.filter(t => t.result === 'failed')
|
||||
|
||||
expect(failedTests.length).toBe(1)
|
||||
|
||||
// Verify error details are captured
|
||||
const failedTest = failedTests[0]
|
||||
expect(failedTest.error).toBeDefined()
|
||||
expect(failedTest.error!.details).toContain('Failed:')
|
||||
expect(failedTest.error!.details).toContain('multiple-fails')
|
||||
})
|
||||
|
||||
it('correctly identifies skipped tests', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find skipped tests
|
||||
const allTests = result.suites[0].groups.flatMap(g => g.tests)
|
||||
const skippedTests = allTests.filter(t => t.result === 'skipped')
|
||||
|
||||
expect(skippedTests.length).toBe(3)
|
||||
|
||||
// Verify some known skipped tests
|
||||
expect(skippedTests.some(t => t.name.includes('Dumper.toPhp.php7.phpt'))).toBe(true)
|
||||
expect(skippedTests.some(t => t.name.includes('Collector.start.phpt'))).toBe(true)
|
||||
})
|
||||
|
||||
it('parses test with description prefix correctly', async () => {
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'nette-tester', 'tester-v1.7-report.xml')
|
||||
const filePath = normalizeFilePath(path.relative(__dirname, fixturePath))
|
||||
const fileContent = fs.readFileSync(fixturePath, {encoding: 'utf8'})
|
||||
|
||||
const opts: ParseOptions = {
|
||||
parseErrors: true,
|
||||
trackedFiles: []
|
||||
}
|
||||
|
||||
const parser = new NetteTesterJunitParser(opts)
|
||||
const result = await parser.parse(filePath, fileContent)
|
||||
|
||||
// Find test with description prefix
|
||||
const allTests = result.suites[0].groups.flatMap(g => g.tests)
|
||||
// The test name is generated from the basename, and the description is shown in parentheses
|
||||
const testWithDescription = allTests.find(t => t.name.includes('Prevent loop'))
|
||||
|
||||
expect(testWithDescription).toBeDefined()
|
||||
expect(testWithDescription!.name).toContain('Prevent loop')
|
||||
expect(testWithDescription!.name).toContain('TestCase.ownErrorHandler.phpt')
|
||||
})
|
||||
})
|
||||
@@ -32,6 +32,6 @@ describe('parseNetDuration', () => {
|
||||
})
|
||||
|
||||
it('throws when string has invalid format', () => {
|
||||
expect(() => parseNetDuration('12:34:56 not a duration')).toThrowError(/^Invalid format/)
|
||||
expect(() => parseNetDuration('12:34:56 not a duration')).toThrow(/^Invalid format/)
|
||||
})
|
||||
})
|
||||
|
||||
35
action.yml
35
action.yml
@@ -1,6 +1,5 @@
|
||||
name: Test Reporter
|
||||
description: |
|
||||
Shows test results in GitHub UI: .NET (xUnit, NUnit, MSTest), Dart, Flutter, Java (JUnit), JavaScript (JEST, Mocha)
|
||||
description: Displays test results from popular testing frameworks directly in GitHub
|
||||
author: Michal Dorner <dorner.michal@gmail.com>
|
||||
inputs:
|
||||
artifact:
|
||||
@@ -26,11 +25,16 @@ inputs:
|
||||
description: |
|
||||
Format of test results. Supported options:
|
||||
- dart-json
|
||||
- dotnet-nunit
|
||||
- dotnet-trx
|
||||
- flutter-json
|
||||
- golang-json
|
||||
- java-junit
|
||||
- jest-junit
|
||||
- mocha-json
|
||||
- tester-junit
|
||||
- phpunit-junit
|
||||
- python-xunit
|
||||
- rspec-json
|
||||
- swift-xunit
|
||||
required: true
|
||||
@@ -38,14 +42,15 @@ inputs:
|
||||
description: |
|
||||
Limits which test suites are listed. Supported options:
|
||||
- all
|
||||
- only-failed
|
||||
- failed
|
||||
- none
|
||||
required: false
|
||||
default: 'all'
|
||||
list-tests:
|
||||
description: |
|
||||
Limits which test cases are listed. Supported options:
|
||||
- all
|
||||
- only-failed
|
||||
- failed
|
||||
- none
|
||||
required: false
|
||||
default: 'all'
|
||||
@@ -66,6 +71,10 @@ inputs:
|
||||
working-directory:
|
||||
description: Relative path under $GITHUB_WORKSPACE where the repository was checked out
|
||||
required: false
|
||||
report-title:
|
||||
description: Title for the test report summary
|
||||
required: false
|
||||
default: ''
|
||||
only-summary:
|
||||
description: |
|
||||
Allows you to generate only the summary.
|
||||
@@ -73,6 +82,24 @@ inputs:
|
||||
Detailed listing of test suites and test cases will be skipped.
|
||||
default: 'false'
|
||||
required: false
|
||||
use-actions-summary:
|
||||
description: |
|
||||
Allows you to generate reports for Actions Summary
|
||||
https://github.com/orgs/github/teams/engineering/discussions/871
|
||||
default: 'true'
|
||||
required: false
|
||||
badge-title:
|
||||
description: Customize badge title
|
||||
required: false
|
||||
default: 'tests'
|
||||
collapsed:
|
||||
description: |
|
||||
Controls whether test report details are collapsed or expanded. Supported options:
|
||||
- auto: Collapse only if all tests pass (default behavior)
|
||||
- always: Always collapse the report details
|
||||
- never: Always expand the report details
|
||||
required: false
|
||||
default: 'auto'
|
||||
token:
|
||||
description: GitHub Access Token
|
||||
required: false
|
||||
|
||||
8615
dist/index.js
generated
vendored
8615
dist/index.js
generated
vendored
File diff suppressed because one or more lines are too long
2
dist/index.js.map
generated
vendored
2
dist/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
105
dist/licenses.txt
generated
vendored
105
dist/licenses.txt
generated
vendored
@@ -378,16 +378,6 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
|
||||
@vercel/ncc
|
||||
MIT
|
||||
Copyright 2018 ZEIT, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
adm-zip
|
||||
MIT
|
||||
MIT License
|
||||
@@ -622,7 +612,7 @@ braces
|
||||
MIT
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014-2018, Jon Schlinkert.
|
||||
Copyright (c) 2014-present, Jon Schlinkert.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
@@ -1360,48 +1350,62 @@ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
|
||||
sax
|
||||
ISC
|
||||
The ISC License
|
||||
BlueOak-1.0.0
|
||||
# Blue Oak Model License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
Version 1.0.0
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
## Purpose
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
This license gives everyone as much permission to work with
|
||||
this software as possible, while protecting contributors
|
||||
from liability.
|
||||
|
||||
====
|
||||
## Acceptance
|
||||
|
||||
`String.fromCodePoint` by Mathias Bynens used according to terms of MIT
|
||||
License, as follows:
|
||||
In order to receive this license, you must agree to its
|
||||
rules. The rules of this license are both obligations
|
||||
under that agreement and conditions to your license.
|
||||
You must not do anything with this software that triggers
|
||||
a rule that you cannot or will not follow.
|
||||
|
||||
Copyright Mathias Bynens <https://mathiasbynens.be/>
|
||||
## Copyright
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
Each contributor licenses you to do everything with this
|
||||
software that would otherwise infringe that contributor's
|
||||
copyright in it.
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
## Notices
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
You must ensure that everyone who gets a copy of
|
||||
any part of this software from you, with or without
|
||||
changes, also gets the text of this license or a link to
|
||||
<https://blueoakcouncil.org/license/1.0.0>.
|
||||
|
||||
## Excuse
|
||||
|
||||
If anyone notifies you in writing that you have not
|
||||
complied with [Notices](#notices), you can keep your
|
||||
license by taking all practical steps to comply within 30
|
||||
days after the notice. If you do not do so, your license
|
||||
ends immediately.
|
||||
|
||||
## Patent
|
||||
|
||||
Each contributor licenses you to do everything with this
|
||||
software that would otherwise infringe any patent claims
|
||||
they can license or become able to license.
|
||||
|
||||
## Reliability
|
||||
|
||||
No contributor can revoke this license.
|
||||
|
||||
## No Liability
|
||||
|
||||
***As far as the law allows, this software comes as is,
|
||||
without any warranty or condition, and no contributor
|
||||
will be liable to anyone for any damages related to this
|
||||
software or this license, under any kind of legal claim.***
|
||||
|
||||
|
||||
to-regex-range
|
||||
@@ -1490,19 +1494,6 @@ Permission to use, copy, modify, and/or distribute this software for any purpose
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
|
||||
uuid
|
||||
MIT
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2010-2020 Robert Kieffer and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
|
||||
wrappy
|
||||
ISC
|
||||
The ISC License
|
||||
|
||||
5963
package-lock.json
generated
5963
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
68
package.json
68
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "test-reporter",
|
||||
"version": "1.9.1",
|
||||
"version": "2.5.0",
|
||||
"private": true,
|
||||
"description": "Presents test results from popular testing frameworks as Github check run",
|
||||
"main": "lib/main.js",
|
||||
@@ -16,6 +16,10 @@
|
||||
"all": "npm run build && npm run format && npm run lint && npm run package && npm test",
|
||||
"dart-fixture": "cd \"reports/dart\" && dart test --file-reporter=\"json:../../__tests__/fixtures/dart-json.json\"",
|
||||
"dotnet-fixture": "dotnet test reports/dotnet/DotnetTests.XUnitTests --logger \"trx;LogFileName=../../../../__tests__/fixtures/dotnet-trx.trx\"",
|
||||
"dotnet-xunitv3-fixture": "dotnet run --project reports/dotnet/DotnetTests.XUnitV3Tests/DotnetTests.XUnitV3Tests.csproj --report-trx --report-trx-filename dotnet-xunitv3.trx --results-directory __tests__/fixtures/",
|
||||
"dotnet-nunit-fixture": "nunit.exe reports/dotnet/DotnetTests.NUnitV3Tests/bin/Debug/netcoreapp3.1/DotnetTests.NUnitV3Tests.dll --result=__tests__/fixtures/dotnet-nunit.xml",
|
||||
"dotnet-nunit-legacy-fixture": "nunit-console.exe reports/dotnet-nunit-legacy/NUnitLegacy.sln --result=__tests__/fixtures/dotnet-nunit-legacy.xml",
|
||||
"golang-json-fixture": "go test -v -json -timeout 5s ./reports/go | tee __tests__/fixtures/golang-json.json",
|
||||
"jest-fixture": "cd \"reports/jest\" && npm test",
|
||||
"mocha-fixture": "cd \"reports/mocha\" && npm test"
|
||||
},
|
||||
@@ -32,41 +36,41 @@
|
||||
"author": "Michal Dorner <dorner.michal@gmail.com>",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.1",
|
||||
"@actions/core": "^1.11.1",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/github": "^6.0.0",
|
||||
"adm-zip": "^0.5.12",
|
||||
"fast-glob": "^3.3.2",
|
||||
"@actions/github": "^6.0.1",
|
||||
"adm-zip": "^0.5.16",
|
||||
"fast-glob": "^3.3.3",
|
||||
"got": "^11.8.6",
|
||||
"picomatch": "^4.0.2",
|
||||
"picomatch": "^4.0.3",
|
||||
"xml2js": "^0.6.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@octokit/types": "^12.4.0",
|
||||
"@octokit/webhooks": "^12.0.11",
|
||||
"@octokit/webhooks-types": "^7.3.1",
|
||||
"@types/adm-zip": "^0.5.5",
|
||||
"@types/jest": "^29.5.12",
|
||||
"@types/node": "^18.19.32",
|
||||
"@types/picomatch": "^2.3.3",
|
||||
"@octokit/webhooks-types": "^7.6.1",
|
||||
"@types/adm-zip": "^0.5.7",
|
||||
"@types/jest": "^30.0.0",
|
||||
"@types/node": "^20.19.23",
|
||||
"@types/picomatch": "^4.0.2",
|
||||
"@types/xml2js": "^0.4.14",
|
||||
"@typescript-eslint/eslint-plugin": "^7.8.0",
|
||||
"@typescript-eslint/parser": "^7.8.0",
|
||||
"@vercel/ncc": "^0.38.1",
|
||||
"eol-converter-cli": "^1.0.8",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-import-resolver-typescript": "^3.6.1",
|
||||
"@typescript-eslint/eslint-plugin": "^7.18.0",
|
||||
"@typescript-eslint/parser": "^7.18.0",
|
||||
"@vercel/ncc": "^0.38.4",
|
||||
"eol-converter-cli": "^1.1.0",
|
||||
"eslint": "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.10.1",
|
||||
"eslint-plugin-github": "^4.10.2",
|
||||
"eslint-plugin-import": "^2.29.1",
|
||||
"eslint-plugin-jest": "^27.9.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"jest": "^29.7.0",
|
||||
"jest-circus": "^29.7.0",
|
||||
"eslint-plugin-import": "^2.32.0",
|
||||
"eslint-plugin-jest": "^28.14.0",
|
||||
"eslint-plugin-prettier": "^5.5.4",
|
||||
"jest": "^30.2.0",
|
||||
"jest-junit": "^16.0.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"prettier": "^3.2.5",
|
||||
"ts-jest": "^29.1.2",
|
||||
"typescript": "^5.4.5"
|
||||
"js-yaml": "^4.1.1",
|
||||
"prettier": "^3.6.2",
|
||||
"ts-jest": "^29.4.5",
|
||||
"typescript": "^5.9.3"
|
||||
},
|
||||
"overrides": {
|
||||
"sax": "^1.4.3"
|
||||
},
|
||||
"jest-junit": {
|
||||
"suiteName": "jest tests",
|
||||
@@ -77,5 +81,13 @@
|
||||
"suiteNameTemplate": "{filepath}",
|
||||
"classNameTemplate": "{classname}",
|
||||
"titleTemplate": "{title}"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
},
|
||||
"markdownlint-cli2": {
|
||||
"ignores": [
|
||||
"__tests__/**/*"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
64
reports/dotnet/DotnetTests.NUnitV3Tests/CalculatorTests.cs
Normal file
64
reports/dotnet/DotnetTests.NUnitV3Tests/CalculatorTests.cs
Normal file
@@ -0,0 +1,64 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using DotnetTests.Unit;
|
||||
using NUnit.Framework;
|
||||
|
||||
namespace DotnetTests.XUnitTests
|
||||
{
|
||||
public class CalculatorTests
|
||||
{
|
||||
private readonly Calculator _calculator = new Calculator();
|
||||
|
||||
[Test]
|
||||
public void Passing_Test()
|
||||
{
|
||||
Assert.That(_calculator.Sum(1, 1), Is.EqualTo(2));
|
||||
}
|
||||
|
||||
[Test(Description = "Some description")]
|
||||
public void Passing_Test_With_Description()
|
||||
{
|
||||
Assert.That(2, Is.EqualTo(2));
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void Failing_Test()
|
||||
{
|
||||
Assert.That(_calculator.Sum(1, 1), Is.EqualTo(3));
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void Exception_In_TargetTest()
|
||||
{
|
||||
_calculator.Div(1, 0);
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void Exception_In_Test()
|
||||
{
|
||||
throw new Exception("Test");
|
||||
}
|
||||
|
||||
[Test]
|
||||
[CancelAfter(1)]
|
||||
public void Timeout_Test()
|
||||
{
|
||||
Thread.Sleep(100);
|
||||
}
|
||||
|
||||
[Test]
|
||||
[Ignore("Skipped")]
|
||||
public void Skipped_Test()
|
||||
{
|
||||
throw new Exception("Test");
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[TestCase(2)]
|
||||
[TestCase(3)]
|
||||
public void Is_Even_Number(int i)
|
||||
{
|
||||
Assert.That(i % 2 == 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<DeterministicSourcePaths>true</DeterministicSourcePaths>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="NUnit" Version="4.3.2" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\DotnetTests.Unit\DotnetTests.Unit.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>netstandard2.0</TargetFramework>
|
||||
<DeterministicSourcePaths>true</DeterministicSourcePaths>
|
||||
</PropertyGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>netcoreapp3.1</TargetFramework>
|
||||
|
||||
<IsPackable>false</IsPackable>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<DeterministicSourcePaths>true</DeterministicSourcePaths>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.5.0" />
|
||||
<PackageReference Include="xunit" Version="2.4.0" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.0" />
|
||||
<PackageReference Include="coverlet.collector" Version="1.2.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.1" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<OutputType>exe</OutputType>
|
||||
<DeterministicSourcePaths>true</DeterministicSourcePaths>
|
||||
<UseMicrosoftTestingPlatformRunner>true</UseMicrosoftTestingPlatformRunner>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Testing.Extensions.TrxReport" Version="1.7.3" />
|
||||
<PackageReference Include="xunit.v3" Version="2.0.3" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
27
reports/dotnet/DotnetTests.XUnitV3Tests/FixtureTests.cs
Normal file
27
reports/dotnet/DotnetTests.XUnitV3Tests/FixtureTests.cs
Normal file
@@ -0,0 +1,27 @@
|
||||
using System;
|
||||
using Xunit;
|
||||
|
||||
namespace DotnetTests.XUnitV3Tests;
|
||||
|
||||
public sealed class Fixture : IDisposable
|
||||
{
|
||||
public void Dispose()
|
||||
{
|
||||
throw new InvalidOperationException("Failure during fixture disposal");
|
||||
}
|
||||
}
|
||||
|
||||
public class FixtureTests(Fixture fixture) : IClassFixture<Fixture>
|
||||
{
|
||||
[Fact]
|
||||
public void Passing_Test()
|
||||
{
|
||||
Assert.NotNull(fixture);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Failing_Test()
|
||||
{
|
||||
Assert.Null(fixture);
|
||||
}
|
||||
}
|
||||
@@ -9,6 +9,10 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tests", "Tests", "{BCAC3B31
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "DotnetTests.XUnitTests", "DotnetTests.XUnitTests\DotnetTests.XUnitTests.csproj", "{F8607EDB-D25D-47AA-8132-38ACA242E845}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DotnetTests.NUnitV3Tests", "DotnetTests.NUnitV3Tests\DotnetTests.NUnitV3Tests.csproj", "{81023ED7-56CB-47E9-86C5-9125A0873C55}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DotnetTests.XUnitV3Tests", "DotnetTests.XUnitV3Tests\DotnetTests.XUnitV3Tests.csproj", "{D35E65DC-62EF-4612-9FF3-66F5600BFB74}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
@@ -23,12 +27,22 @@ Global
|
||||
{F8607EDB-D25D-47AA-8132-38ACA242E845}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{F8607EDB-D25D-47AA-8132-38ACA242E845}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{F8607EDB-D25D-47AA-8132-38ACA242E845}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{81023ED7-56CB-47E9-86C5-9125A0873C55}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{81023ED7-56CB-47E9-86C5-9125A0873C55}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{81023ED7-56CB-47E9-86C5-9125A0873C55}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{81023ED7-56CB-47E9-86C5-9125A0873C55}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{D35E65DC-62EF-4612-9FF3-66F5600BFB74}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{D35E65DC-62EF-4612-9FF3-66F5600BFB74}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{D35E65DC-62EF-4612-9FF3-66F5600BFB74}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{D35E65DC-62EF-4612-9FF3-66F5600BFB74}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
EndGlobalSection
|
||||
GlobalSection(NestedProjects) = preSolution
|
||||
{F8607EDB-D25D-47AA-8132-38ACA242E845} = {BCAC3B31-ADB1-4221-9D5B-182EE868648C}
|
||||
{81023ED7-56CB-47E9-86C5-9125A0873C55} = {BCAC3B31-ADB1-4221-9D5B-182EE868648C}
|
||||
{D35E65DC-62EF-4612-9FF3-66F5600BFB74} = {BCAC3B31-ADB1-4221-9D5B-182EE868648C}
|
||||
EndGlobalSection
|
||||
GlobalSection(ExtensibilityGlobals) = postSolution
|
||||
SolutionGuid = {6ED5543C-74AA-4B21-8050-943550F3F66E}
|
||||
|
||||
20
reports/go/calculator.go
Normal file
20
reports/go/calculator.go
Normal file
@@ -0,0 +1,20 @@
|
||||
package main
|
||||
|
||||
import "errors"
|
||||
|
||||
func CalculatorSum(a, b int) int {
|
||||
return a + b
|
||||
}
|
||||
|
||||
func CalculatorDivide(a, b int) int {
|
||||
return a / b
|
||||
}
|
||||
|
||||
var ErrDivideByZero = errors.New("divide by zero")
|
||||
|
||||
func CalculatorSafeDivide(a, b int) (int, error) {
|
||||
if b == 0 {
|
||||
return 0, ErrDivideByZero
|
||||
}
|
||||
return a / b, nil
|
||||
}
|
||||
82
reports/go/calculator_test.go
Normal file
82
reports/go/calculator_test.go
Normal file
@@ -0,0 +1,82 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"math/rand"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestPassing(t *testing.T) {
|
||||
randomSleep()
|
||||
t.Log("pass!")
|
||||
}
|
||||
|
||||
func TestFailing(t *testing.T) {
|
||||
randomSleep()
|
||||
expected := 3
|
||||
actual := CalculatorSum(1, 1)
|
||||
if actual != expected {
|
||||
t.Fatalf("expected 1+1 = %d, got %d", expected, actual)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPanicInsideFunction(t *testing.T) {
|
||||
defer catchPanics(t)
|
||||
|
||||
expected := 0
|
||||
actual := CalculatorDivide(1, 0)
|
||||
if actual != expected {
|
||||
t.Fatalf("expected 1/1 = %d, got %d", expected, actual)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPanicInsideTest(t *testing.T) {
|
||||
defer catchPanics(t)
|
||||
panic("bad stuff")
|
||||
}
|
||||
|
||||
// Timeouts cause the entire test process to end - so we can't get good output for these
|
||||
// func TestTimeout(t *testing.T) {
|
||||
// time.Sleep(time.Second * 5)
|
||||
// }
|
||||
|
||||
func TestSkipped(t *testing.T) {
|
||||
randomSleep()
|
||||
t.Skipf("skipping test")
|
||||
}
|
||||
|
||||
func TestCases(t *testing.T) {
|
||||
for _, tc := range []struct {
|
||||
name string
|
||||
fn func(int, int) int
|
||||
a, b, c int
|
||||
}{
|
||||
{"1 + 2 = 3", CalculatorSum, 1, 2, 3},
|
||||
{"4 + 7 = 11", CalculatorSum, 4, 7, 11},
|
||||
{"2 + 3 = 4", CalculatorSum, 2, 3, 4},
|
||||
|
||||
{"1 / 2 = 1", CalculatorDivide, 1, 2, 1},
|
||||
{"9 / 3 = 3", CalculatorDivide, 9, 3, 3},
|
||||
{"14 / 7 = 2", CalculatorDivide, 14, 7, 2},
|
||||
} {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
randomSleep()
|
||||
|
||||
c := tc.fn(tc.a, tc.b)
|
||||
if c != tc.c {
|
||||
t.Fatalf("expected %s, got %d", tc.name, c)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func catchPanics(t *testing.T) {
|
||||
err := recover()
|
||||
if err != nil {
|
||||
t.Fatalf("caught panic: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func randomSleep() {
|
||||
time.Sleep(time.Duration(rand.Int63n(int64(time.Second))))
|
||||
}
|
||||
3
reports/go/go.mod
Normal file
3
reports/go/go.mod
Normal file
@@ -0,0 +1,3 @@
|
||||
module test_reporter_example
|
||||
|
||||
go 1.24.2
|
||||
4826
reports/jest/package-lock.json
generated
4826
reports/jest/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user